diff --git a/.github/workflows/node-zxc-build-release-artifact.yaml b/.github/workflows/node-zxc-build-release-artifact.yaml index 3c7aa7817d35..66ee3ec140e8 100644 --- a/.github/workflows/node-zxc-build-release-artifact.yaml +++ b/.github/workflows/node-zxc-build-release-artifact.yaml @@ -81,6 +81,8 @@ jobs: Artifact: name: ${{ inputs.custom-job-label || 'Artifact' }} runs-on: [self-hosted, Linux, large, ephemeral] + outputs: + version: ${{ steps.effective-version.outputs.number }} steps: - name: Install Semantic Version Tools run: | @@ -220,6 +222,13 @@ jobs: EFF_VERSION="$(./gradlew showVersion --quiet | tr -d '[:space:]')" echo "number=${EFF_VERSION}" >>"${GITHUB_OUTPUT}" + - name: Cache Build Artifacts + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # pin@v3 + if: ${{ inputs.dry-run-enabled != true && !cancelled() && !failure() }} + with: + path: ${{ env.HOME }}/artifact-build + key: node-build-artifacts-${{ steps.effective-version.outputs.number }}-${{ github.sha }} + - name: Stage Artifact Build Folder id: artifact-staging run: | @@ -328,3 +337,77 @@ jobs: echo "::error title=Jenkins Trigger Failure::Failed to trigger the 'build-preview-testnet' job via the Jenkins 'preview' pipeline!" exit 1 fi + + local-node-images: + name: Local Node Images + runs-on: [ self-hosted, Linux, large, ephemeral ] + needs: + - Artifact + if: ${{ inputs.dry-run-enabled != true && inputs.version-policy == 'specified' && !cancelled() && !failure() }} + steps: + - name: Checkout Code + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # pin@v3 + + - name: Authenticate to Google Cloud + id: google-auth + uses: google-github-actions/auth@ef5d53e30bbcd8d0836f4288f5e50ff3e086997d # pin@v1 + with: + token_format: 'access_token' + workload_identity_provider: "projects/235822363393/locations/global/workloadIdentityPools/hedera-builds-pool/providers/hedera-builds-gh-actions" + service_account: "swirlds-automation@hedera-registry.iam.gserviceaccount.com" + + - name: Setup QEmu Support + uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # pin@v2 + + - name: Setup Docker Buildx Support + uses: docker/setup-buildx-action@16c0bc4a6e6ada2cfd8afd41d22d95379cf7c32a # pin@v2 + + - name: Docker Login + uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # pin@v2 + with: + registry: gcr.io + username: oauth2accesstoken + password: ${{ steps.google-auth.outputs.access_token }} + + - name: Cache Build Artifacts + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # pin@v3 + with: + fail-on-cache-miss: true + path: ${{ env.HOME }}/artifact-build + key: node-build-artifacts-${{ needs.Artifact.outputs.version }}-${{ github.sha }} + + - name: Stage SDK Artifacts + run: | + mkdir -p hedera-node/infrastructure/docker/containers/local-node/main-network-node/sdk + cp -rvf ${{ env.HOME }}/artifact-build/* hedera-node/infrastructure/docker/containers/local-node/main-network-node/sdk/ + + - name: Build Haveged Image + uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # pin@v4 + with: + cache-from: type=gha + cache-to: type=gha,mode=max + push: true + platforms: linux/amd64,linux/arm64 + context: hedera-node/infrastructure/docker/containers/local-node/network-node-haveged + tags: gcr.io/hedera-registry/network-node-haveged:${{ needs.Artifact.outputs.version }} + + - name: Build Base Image + uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # pin@v4 + with: + cache-from: type=gha + cache-to: type=gha,mode=max + push: true + platforms: linux/amd64,linux/arm64 + context: hedera-node/infrastructure/docker/containers/local-node/network-node-base + tags: gcr.io/hedera-registry/network-node-base:${{ needs.Artifact.outputs.version }} + + - name: Build Network Node Image + uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # pin@v4 + with: + cache-from: type=gha + cache-to: type=gha,mode=max + push: true + platforms: linux/amd64,linux/arm64 + build-args: IMAGE_TAG=${{ needs.Artifact.outputs.version }} + context: hedera-node/infrastructure/docker/containers/local-node/main-network-node + tags: gcr.io/hedera-registry/main-network-node:${{ needs.Artifact.outputs.version }} diff --git a/hedera-node/configuration/compose/settings.txt b/hedera-node/configuration/compose/settings.txt index 64cdd38b4f0a..0e169577200c 100644 --- a/hedera-node/configuration/compose/settings.txt +++ b/hedera-node/configuration/compose/settings.txt @@ -2,11 +2,12 @@ # Gossip / Networking # ############################# -chatter.useChatter, false -doUpnp, false -maxOutgoingSyncs, 1 # differs from mainnet numConnections, 1000 -useLoopbackIp, false +socket.doUpnp, false +socket.useLoopbackIp, false +sync.maxOutgoingSyncs, 1 # differs from mainnet +sync.syncAsProtocolEnabled, true +sync.syncProtocolPermitCount, 2 # differs from mainnet ############################# # State # diff --git a/hedera-node/configuration/dev/settings.txt b/hedera-node/configuration/dev/settings.txt index a5cc4947e983..dba3a86dc6c0 100644 --- a/hedera-node/configuration/dev/settings.txt +++ b/hedera-node/configuration/dev/settings.txt @@ -2,13 +2,12 @@ # Gossip / Networking # ############################# -chatter.useChatter, false -doUpnp, false -maxOutgoingSyncs, 1 # differs from mainnet numConnections, 1000 +socket.doUpnp, false +socket.useLoopbackIp, false +sync.maxOutgoingSyncs, 1 # differs from mainnet sync.syncAsProtocolEnabled, true -sync.syncProtocolPermitCount, 17 -useLoopbackIp, false +sync.syncProtocolPermitCount, 2 # differs from mainnet ############################# # State # @@ -45,4 +44,4 @@ reconnect.asyncStreamTimeoutMilliseconds, 60000 metrics.csvFileName, MainNetStats metrics.csvOutputFolder, data/stats showInternalStats, true -prometheus.endpointEnabled, false # differs from mainnet +prometheus.endpointEnabled, false # differs from mainnet diff --git a/hedera-node/configuration/mainnet/settings.txt b/hedera-node/configuration/mainnet/settings.txt index cfa82a39b283..a7590b7dea6c 100644 --- a/hedera-node/configuration/mainnet/settings.txt +++ b/hedera-node/configuration/mainnet/settings.txt @@ -2,11 +2,12 @@ # Gossip / Networking # ############################# -chatter.useChatter, false -doUpnp, false -maxOutgoingSyncs, 8 numConnections, 1000 -useLoopbackIp, false +socket.doUpnp, false +socket.useLoopbackIp, false +sync.maxOutgoingSyncs, 8 +sync.syncAsProtocolEnabled, true +sync.syncProtocolPermitCount, 17 ############################# # State # diff --git a/hedera-node/configuration/preprod/settings.txt b/hedera-node/configuration/preprod/settings.txt index 467cfe4205f3..a031d79df1ba 100644 --- a/hedera-node/configuration/preprod/settings.txt +++ b/hedera-node/configuration/preprod/settings.txt @@ -2,11 +2,12 @@ # Gossip / Networking # ############################# -chatter.useChatter, false -doUpnp, false -maxOutgoingSyncs, 4 # differs from mainnet numConnections, 1000 -useLoopbackIp, false +socket.doUpnp, false +socket.useLoopbackIp, false +sync.maxOutgoingSyncs, 4 # differs from mainnet +sync.syncAsProtocolEnabled, true +sync.syncProtocolPermitCount, 4 # differs from mainnet ############################# # State # diff --git a/hedera-node/configuration/previewnet/settings.txt b/hedera-node/configuration/previewnet/settings.txt index 6c44916d43e2..a031d79df1ba 100644 --- a/hedera-node/configuration/previewnet/settings.txt +++ b/hedera-node/configuration/previewnet/settings.txt @@ -2,13 +2,12 @@ # Gossip / Networking # ############################# -chatter.useChatter, false -doUpnp, false -maxOutgoingSyncs, 4 # differs from mainnet numConnections, 1000 +socket.doUpnp, false +socket.useLoopbackIp, false +sync.maxOutgoingSyncs, 4 # differs from mainnet sync.syncAsProtocolEnabled, true -sync.syncProtocolPermitCount, 17 -useLoopbackIp, false +sync.syncProtocolPermitCount, 4 # differs from mainnet ############################# # State # diff --git a/hedera-node/configuration/testnet/settings.txt b/hedera-node/configuration/testnet/settings.txt index 467cfe4205f3..a031d79df1ba 100644 --- a/hedera-node/configuration/testnet/settings.txt +++ b/hedera-node/configuration/testnet/settings.txt @@ -2,11 +2,12 @@ # Gossip / Networking # ############################# -chatter.useChatter, false -doUpnp, false -maxOutgoingSyncs, 4 # differs from mainnet numConnections, 1000 -useLoopbackIp, false +socket.doUpnp, false +socket.useLoopbackIp, false +sync.maxOutgoingSyncs, 4 # differs from mainnet +sync.syncAsProtocolEnabled, true +sync.syncProtocolPermitCount, 4 # differs from mainnet ############################# # State # diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/HapiUtils.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/HapiUtils.java index 50141c4d1f7e..9b1c05a4d751 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/HapiUtils.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/HapiUtils.java @@ -248,4 +248,45 @@ public static String toString(@NonNull final SemanticVersion version) { } return baseVersion.toString(); } + + /** + * Parses an account from a string of the form shardNum.realmNum.accountNum + * @param string The input string + * @return The corresponding {@link AccountID} + * @throws IllegalArgumentException if the string is not a dot-separated triplet of numbers + */ + public static AccountID parseAccount(@NonNull final String string) { + try { + final var parts = string.split("\\."); + return AccountID.newBuilder() + .shardNum(Long.parseLong(parts[0])) + .realmNum(Long.parseLong(parts[1])) + .accountNum(Long.parseLong(parts[2])) + .build(); + } catch (final NumberFormatException | ArrayIndexOutOfBoundsException e) { + throw new IllegalArgumentException(String.format("'%s' is not a dot-separated triplet", string)); + } + } + + /** + * Utility to convert an {@link AccountID} into a nicely formatted String. + * @param id The id to convert + * @return The string representation + */ + public static String toString(@NonNull final AccountID id) { + var builder = new StringBuilder() + .append(id.shardNum()) + .append(".") + .append(id.realmNum()) + .append("."); + + if (id.hasAccountNum()) { + builder.append(id.accountNum()); + } else if (id.hasAlias()) { + builder.append(id.alias()); + } else { + builder.append("-"); + } + return builder.toString(); + } } diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/NetworkInfo.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/NetworkInfo.java index 20bfbf41b749..9e4a35f44930 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/NetworkInfo.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/NetworkInfo.java @@ -18,6 +18,8 @@ import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.List; /** * Provides information about the network. @@ -31,4 +33,13 @@ public interface NetworkInfo { */ @NonNull Bytes ledgerId(); + + @NonNull + NodeInfo selfNodeInfo(); + + @NonNull + List addressBook(); + + @Nullable + NodeInfo nodeInfo(long nodeId); } diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/NodeInfo.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/NodeInfo.java index 4b604cbe7a34..292801487bbf 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/NodeInfo.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/NodeInfo.java @@ -18,7 +18,6 @@ import com.hedera.hapi.node.base.AccountID; import com.swirlds.common.system.address.AddressBook; -import edu.umd.cs.findbugs.annotations.NonNull; /** * Summarizes useful information about the nodes in the {@link AddressBook} from the Platform. In @@ -32,30 +31,13 @@ public interface NodeInfo { * * @return whether this node has zero stake. */ - boolean isSelfZeroStake(); + boolean zeroStake(); /** - * Returns the account parsed from the address book memo corresponding to the given node id. + * Returns the account ID corresponding with this node. * - * @param nodeId the id of interest - * @return the account parsed from the address book memo corresponding to the given node id. - * @throws IllegalArgumentException if the book did not contain the id, or was missing an - * account for the id + * @return the account ID of the node. + * @throws IllegalStateException if the book did not contain the id, or was missing an account for the id */ - @NonNull - AccountID accountOf(long nodeId); - - /** - * Returns if the given node id is valid and the address book contains the id. - * @param nodeId the id of interest - * @return true if the given node id is valid. False otherwise. - */ - default boolean isValidId(long nodeId) { - try { - accountOf(nodeId); - return true; - } catch (IllegalArgumentException e) { - return false; - } - } + AccountID accountId(); } diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/signatures/SignatureVerification.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/signatures/SignatureVerification.java index 59f134718112..195068ea2316 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/signatures/SignatureVerification.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/signatures/SignatureVerification.java @@ -16,9 +16,12 @@ package com.hedera.node.app.spi.signatures; +import static java.util.Objects.requireNonNull; + import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.state.token.Account; import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; /** @@ -68,4 +71,52 @@ default Bytes evmAlias() { default boolean failed() { return !passed(); } + + /** + * Convenience method to create a SignatureVerification that failed + * + * @param key The key for which verification failed + */ + @NonNull + static SignatureVerification failedVerification(@NonNull final Key key) { + requireNonNull(key, "Key must not be null"); + return new SignatureVerification() { + @NonNull + @Override + public Key key() { + return key; + } + + @Override + public boolean passed() { + return false; + } + }; + } + + /** + * Convenience method to create a SignatureVerification for a hollow account that failed + * + * @param evmAlias The alias for which verification failed + */ + static SignatureVerification failedVerification(@NonNull final Bytes evmAlias) { + return new SignatureVerification() { + @Nullable + @Override + public Key key() { + return null; + } + + @NonNull + @Override + public Bytes evmAlias() { + return evmAlias; + } + + @Override + public boolean passed() { + return false; + } + }; + } } diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/validation/AttributeValidator.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/validation/AttributeValidator.java index 72f56d02ff34..cc6af1687961 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/validation/AttributeValidator.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/validation/AttributeValidator.java @@ -20,6 +20,7 @@ import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.TransactionHandler; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; /** * A type that any {@link TransactionHandler} can use to validate entity @@ -35,7 +36,7 @@ public interface AttributeValidator { * @param key the key to validate * @throws HandleException if the key is invalid or more than {@value MAX_NESTED_KEY_LEVELS} */ - void validateKey(Key key); + void validateKey(@NonNull Key key); /** * Validates the given memo. @@ -43,7 +44,7 @@ public interface AttributeValidator { * @param memo the memo to validate * @throws HandleException if the key is invalid */ - void validateMemo(String memo); + void validateMemo(@Nullable String memo); /** * Validates the given expiry. diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/validation/ExpiryValidator.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/validation/ExpiryValidator.java index 3f4ae4e9e1f7..cf40473782d9 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/validation/ExpiryValidator.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/validation/ExpiryValidator.java @@ -59,9 +59,7 @@ public interface ExpiryValidator { */ @NonNull ResponseCodeEnum expirationStatus( - @NonNull final EntityType entityType, - final boolean isMarkedExpired, - final long balanceAvailableForSelfRenewal); + @NonNull EntityType entityType, boolean isMarkedExpired, long balanceAvailableForSelfRenewal); /** * Gets the expiration status of an account and returns if the account is detached @@ -71,9 +69,7 @@ ResponseCodeEnum expirationStatus( * @return true if the account is detached, otherwise false */ default boolean isDetached( - @NonNull final EntityType entityType, - final boolean isMarkedExpired, - final long balanceAvailableForSelfRenewal) { + @NonNull EntityType entityType, boolean isMarkedExpired, long balanceAvailableForSelfRenewal) { return expirationStatus(entityType, isMarkedExpired, balanceAvailableForSelfRenewal) != ResponseCodeEnum.OK; } } diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/HandleContext.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/HandleContext.java index ef8787fae670..b1335d1a49af 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/HandleContext.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/HandleContext.java @@ -16,6 +16,7 @@ package com.hedera.node.app.spi.workflows; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.spi.signatures.SignatureVerification; @@ -73,6 +74,14 @@ enum TransactionCategory { @NonNull TransactionBody body(); + /** + * Gets the payer {@link AccountID}. + * + * @return the {@link AccountID} of the payer in this context + */ + @NonNull + AccountID payer(); + /** * Returns the current {@link Configuration} for the node. * @@ -81,6 +90,14 @@ enum TransactionCategory { @NonNull Configuration configuration(); + /** + * Getter for the payer key + * + * @return the payer key + */ + @Nullable + Key payerKey(); + /** * Returns the next entity number, for use by handlers that create entities. * @@ -109,25 +126,69 @@ enum TransactionCategory { @NonNull ExpiryValidator expiryValidator(); + /** + * Returns all (required and optional) keys of a nested transaction. + * + * @param nestedTxn the {@link TransactionBody} which keys are needed + * @param payerForNested the payer for the nested transaction + * @return the set of keys + * @throws PreCheckException If there is a problem with the nested transaction + */ + @NonNull + default TransactionKeys allKeysForTransaction(@NonNull TransactionBody nestedTxn, @NonNull AccountID payerForNested) + throws PreCheckException { + throw new UnsupportedOperationException("Not implemented"); + } + /** * Gets the {@link SignatureVerification} for the given key. If this key was not provided during pre-handle, then * there will be no corresponding {@link SignatureVerification}. If the key was provided during pre-handle, then the * corresponding {@link SignatureVerification} will be returned with the result of that verification operation. * + *

The signatures of required keys are guaranteed to be verified. Optional signatures may still be in the + * process of being verified (and therefore may time out). The timeout can be configured via the configuration + * {@code hedera.workflow.verificationTimeoutMS} + * * @param key the key to get the verification for - * @return the verification for the given key, or {@code null} if no such key was provided during pre-handle + * @return the verification for the given key * @throws NullPointerException if {@code key} is {@code null} */ - @Nullable + @NonNull SignatureVerification verificationFor(@NonNull Key key); + /** + * Gets the {@link SignatureVerification} for the given key. If this key was not provided during pre-handle, then + * there will be no corresponding {@link SignatureVerification}. If the key was provided during pre-handle, then the + * corresponding {@link SignatureVerification} will be returned with the result of that verification operation. + * Additionally, the VerificationAssistant provided may modify the result for "primitive", "Contract ID", or + * "Delegatable Contract ID" keys, and will be called to observe and reply for each such key as it is processed. + * + *

The signatures of required keys are guaranteed to be verified. Optional signatures may still be in the + * process of being verified (and therefore may time out). The timeout can be configured via the configuration + * {@code hedera.workflow.verificationTimeoutMS} + * + * @param key the key to get the verification for + * @param callback a VerificationAssistant callback function that will observe each "primitive", "Contract ID", or + * "Delegatable Contract ID" key and return a boolean indicating if the given key should be considered valid. + * @return the verification for the given key + */ + @NonNull + default SignatureVerification verificationFor(@NonNull Key key, @NonNull VerificationAssistant callback) { + throw new UnsupportedOperationException("Not implemented"); + } + /** * Gets the {@link SignatureVerification} for the given hollow account. * + *

The signatures of required accounts are guaranteed to be verified. Optional accounts may still be in the + * process of being verified (and therefore may time out). The timeout can be configured via the configuration + * {@code hedera.workflow.verificationTimeoutMS} + * * @param evmAlias The evm alias to lookup verification for. * @return the verification for the given hollow account. + * @throws NullPointerException if {@code evmAlias} is {@code null} */ - @Nullable + @NonNull SignatureVerification verificationFor(@NonNull final Bytes evmAlias); /** diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/PreHandleContext.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/PreHandleContext.java index 2536f48bf350..500cf0f646f5 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/PreHandleContext.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/PreHandleContext.java @@ -46,7 +46,7 @@ * key was added when the context was created). Some basic validation is performed (the key cannot be null or empty). */ @SuppressWarnings("UnusedReturnValue") -public interface PreHandleContext { +public interface PreHandleContext extends TransactionKeys { /** * Gets the {@link TransactionBody} @@ -72,46 +72,6 @@ public interface PreHandleContext { @NonNull Configuration configuration(); - /** - * Returns an immutable copy of the set of required non-payer keys. - * - * @return the {@link Set} with the required non-payer keys - */ - @NonNull - Set requiredNonPayerKeys(); - - /** - * Gets an immutable copy of the set of required hollow accounts that need signatures. - * - * @return the {@link Set} of hollow accounts required - */ - @NonNull - Set requiredHollowAccounts(); - - /** - * Returns an immutable copy of the set of optional non-payer keys. - * - * @return the {@link Set} with the optional non-payer keys. This set may be empty. - */ - @NonNull - Set optionalNonPayerKeys(); - - /** - * Gets an immutable copy of the set of optional hollow accounts that may need signatures. - * - * @return the {@link Set} of hollow accounts possibly required - */ - @NonNull - Set optionalHollowAccounts(); - - /** - * Getter for the payer key - * - * @return the payer key - */ - @Nullable - Key payerKey(); - /** * Create a new store given the store's interface. This gives read-only access to the store. * @@ -260,6 +220,20 @@ PreHandleContext requireKeyIfReceiverSigRequired( @NonNull PreHandleContext requireSignatureForHollowAccount(@NonNull final Account hollowAccount); + /** + * Returns all (required and optional) keys of a nested transaction. + * + * @param nestedTxn the {@link TransactionBody} which keys are needed + * @param payerForNested the payer for the nested transaction + * @return the set of keys + * @throws PreCheckException If there is a problem with the nested transaction + */ + @NonNull + default TransactionKeys allKeysForTransaction(@NonNull TransactionBody nestedTxn, @NonNull AccountID payerForNested) + throws PreCheckException { + throw new UnsupportedOperationException("Not implemented"); + } + /** * Creates a new {@link PreHandleContext} for a nested transaction. The nested transaction will be set on * this context as the "inner context". There can only be one such at a time. The inner context is returned diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/QueryContext.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/QueryContext.java index 7439edf3c0fc..b7595efc09e5 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/QueryContext.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/QueryContext.java @@ -17,6 +17,7 @@ package com.hedera.node.app.spi.workflows; import com.hedera.hapi.node.transaction.Query; +import com.hedera.node.app.spi.records.RecordCache; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; @@ -52,4 +53,8 @@ public interface QueryContext { */ @NonNull Configuration configuration(); + + /** Gets the {@link RecordCache}. */ + @NonNull + RecordCache recordCache(); } diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/TransactionKeys.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/TransactionKeys.java new file mode 100644 index 000000000000..e866b31cedae --- /dev/null +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/TransactionKeys.java @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.spi.workflows; + +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.state.token.Account; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Set; + +/** + * Contains all keys and hollow accounts (required and optional) of a transaction. + */ +public interface TransactionKeys { + + /** + * Getter for the payer key + * + * @return the payer key + */ + @Nullable + Key payerKey(); + + /** + * Returns an immutable copy of the set of required non-payer keys. + * + * @return the {@link Set} with the required non-payer keys + */ + @NonNull + Set requiredNonPayerKeys(); + + /** + * Gets an immutable copy of the set of required hollow accounts that need signatures. + * + * @return the {@link Set} of hollow accounts required + */ + @NonNull + Set requiredHollowAccounts(); + + /** + * Returns an immutable copy of the set of optional non-payer keys. + * + * @return the {@link Set} with the optional non-payer keys. This set may be empty. + */ + @NonNull + Set optionalNonPayerKeys(); + + /** + * Gets an immutable copy of the set of optional hollow accounts that may need signatures. + * + * @return the {@link Set} of hollow accounts possibly required + */ + @NonNull + Set optionalHollowAccounts(); +} diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/VerificationAssistant.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/VerificationAssistant.java new file mode 100644 index 000000000000..e6c30aaa3edb --- /dev/null +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/VerificationAssistant.java @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.spi.workflows; + +import com.hedera.hapi.node.base.Key; +import com.hedera.node.app.spi.signatures.SignatureVerification; +import java.util.function.BiPredicate; + +/** + * A BiPredicate specialized to assisting and observing the Signature Verification process. Implementations of this + * interface are expected to, in the test method, return true if and only if the Key passed should be considered valid. + * This may be based on both the {@link SignatureVerification} provided, as well as other information (such as a set of + * previously valid keys in state). The "test" method is also an observer of all keys and associated verifications, and + * may choose to store or further inspect each such key and SignatureVerification. + *

+ * The "test" method will only be called for keys of the "primitive", "Contract ID", and "Delegatable Contract ID" + * types. + */ +public interface VerificationAssistant extends BiPredicate {} diff --git a/hedera-node/hedera-app-spi/src/main/java/module-info.java b/hedera-node/hedera-app-spi/src/main/java/module-info.java index 0248834a2d25..10bec2c659fa 100644 --- a/hedera-node/hedera-app-spi/src/main/java/module-info.java +++ b/hedera-node/hedera-app-spi/src/main/java/module-info.java @@ -6,6 +6,7 @@ requires static com.github.spotbugs.annotations; exports com.hedera.node.app.spi; + exports com.hedera.node.app.spi.info; exports com.hedera.node.app.spi.state; exports com.hedera.node.app.spi.key; exports com.hedera.node.app.spi.numbers; @@ -13,6 +14,5 @@ exports com.hedera.node.app.spi.records; exports com.hedera.node.app.spi.signatures; exports com.hedera.node.app.spi.validation; - exports com.hedera.node.app.spi.info; exports com.hedera.node.app.spi.meta.bni; } diff --git a/hedera-node/hedera-app/build.gradle.kts b/hedera-node/hedera-app/build.gradle.kts index d2d7e1242129..446f6dbfcce7 100644 --- a/hedera-node/hedera-app/build.gradle.kts +++ b/hedera-node/hedera-app/build.gradle.kts @@ -47,6 +47,7 @@ dependencies { itestImplementation(project(":config")) itestImplementation(project(":hapi")) itestImplementation(testFixtures(project(":app-spi"))) + itestImplementation(testFixtures(project(":config"))) itestImplementation(gav("com.github.spotbugs.annotations")) itestImplementation(gav("com.hedera.pbj.runtime")) itestImplementation(gav("com.swirlds.common")) @@ -125,6 +126,14 @@ tasks.register("run") { mainClass.set("com.swirlds.platform.Browser") } +tasks.register("modrun") { + group = "application" + dependsOn(tasks.assemble) + workingDir = project(":hedera-node").projectDir + jvmArgs = listOf("-cp", "data/lib/*", "-Dhedera.workflows.enabled=true") + mainClass.set("com.swirlds.platform.Browser") +} + val cleanRun = tasks.register("cleanRun") { val prj = project(":hedera-node") diff --git a/hedera-node/hedera-app/src/itest/java/signatures/SignatureVerificationTest.java b/hedera-node/hedera-app/src/itest/java/signatures/SignatureVerificationTest.java index 30a924a4a950..aa8bc3c181ce 100644 --- a/hedera-node/hedera-app/src/itest/java/signatures/SignatureVerificationTest.java +++ b/hedera-node/hedera-app/src/itest/java/signatures/SignatureVerificationTest.java @@ -16,23 +16,24 @@ package signatures; -import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; -import static com.hedera.node.app.workflows.prehandle.PreHandleResult.Status.SO_FAR_SO_GOOD; import static java.util.Objects.requireNonNull; import static org.assertj.core.api.Assertions.assertThat; -import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.base.KeyList; import com.hedera.hapi.node.base.SignaturePair; import com.hedera.hapi.node.base.ThresholdKey; +import com.hedera.node.app.config.VersionedConfigImpl; import com.hedera.node.app.signature.ExpandedSignaturePair; import com.hedera.node.app.signature.impl.SignatureExpanderImpl; import com.hedera.node.app.signature.impl.SignatureVerifierImpl; import com.hedera.node.app.spi.fixtures.Scenarios; import com.hedera.node.app.spi.fixtures.TestKeyInfo; import com.hedera.node.app.spi.signatures.SignatureVerification; -import com.hedera.node.app.workflows.prehandle.PreHandleResult; +import com.hedera.node.app.workflows.handle.HandleContextVerifier; +import com.hedera.node.config.VersionedConfiguration; +import com.hedera.node.config.data.HederaConfig; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import java.math.BigInteger; @@ -45,7 +46,6 @@ import java.util.HashSet; import java.util.LinkedList; import java.util.List; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import org.bouncycastle.asn1.sec.SECNamedCurves; @@ -70,6 +70,8 @@ class SignatureVerificationTest implements Scenarios { private static final long DEFAULT_CONFIG_VERSION = 1L; + private static final VersionedConfiguration CONFIGURATION = + new VersionedConfigImpl(HederaTestConfigBuilder.createConfig(), DEFAULT_CONFIG_VERSION); @Test @DisplayName("Verify Hollow Account") @@ -89,18 +91,10 @@ void verifyHollowAccount() { final var verificationResults = verifier.verify(testCase.signedBytes, expanded); // Finally, assert that the verification results are as expected - final var result = new PreHandleResult( - AccountID.DEFAULT, - testCase.keyToVerify, - SO_FAR_SO_GOOD, - OK, - null, - verificationResults, - null, - DEFAULT_CONFIG_VERSION); - assertThat(result.verificationFor(ERIN.account().alias())) + final var hederaConfig = CONFIGURATION.getConfigData(HederaConfig.class); + final var handleContextVerifier = new HandleContextVerifier(hederaConfig, verificationResults); + assertThat(handleContextVerifier.verificationFor(ERIN.account().alias())) .isNotNull() - .succeedsWithin(1, TimeUnit.MINUTES) .extracting(SignatureVerification::passed) .isEqualTo(true); } @@ -125,18 +119,10 @@ void verifySignatures( final var verificationResults = verifier.verify(signedBytes, expanded); // Finally, assert that the verification results are as expected - final var result = new PreHandleResult( - AccountID.DEFAULT, - keyToVerify, - SO_FAR_SO_GOOD, - OK, - null, - verificationResults, - null, - DEFAULT_CONFIG_VERSION); - assertThat(result.verificationFor(keyToVerify)) + final var hederaConfig = CONFIGURATION.getConfigData(HederaConfig.class); + final var handleContextVerifier = new HandleContextVerifier(hederaConfig, verificationResults); + assertThat(handleContextVerifier.verificationFor(keyToVerify)) .isNotNull() - .succeedsWithin(1, TimeUnit.MINUTES) .extracting(SignatureVerification::passed) .isEqualTo(shouldPass); } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java index a13b2867c648..c3c588604118 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java @@ -16,59 +16,47 @@ package com.hedera.node.app; -import static com.hedera.node.app.service.mono.ServicesState.EMPTY_HASH; -import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_FIRST_USER_ENTITY; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.LEDGER_TOTAL_TINY_BAR_FLOAT; -import static com.hedera.node.app.service.mono.context.properties.SemanticVersions.SEMANTIC_VERSIONS; +import static com.hedera.node.app.spi.HapiUtils.parseAccount; +import static com.swirlds.common.system.InitTrigger.EVENT_STREAM_RECOVERY; +import static com.swirlds.common.system.InitTrigger.GENESIS; +import static com.swirlds.common.system.InitTrigger.RESTART; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Objects.requireNonNull; -import com.hedera.hapi.node.base.AccountID; import com.hedera.node.app.config.ConfigProviderImpl; +import com.hedera.node.app.info.CurrentPlatformStatusImpl; import com.hedera.node.app.service.consensus.impl.ConsensusServiceImpl; import com.hedera.node.app.service.contract.impl.ContractServiceImpl; import com.hedera.node.app.service.file.impl.FileServiceImpl; -import com.hedera.node.app.service.mono.context.StateChildrenProvider; import com.hedera.node.app.service.mono.context.properties.BootstrapProperties; -import com.hedera.node.app.service.mono.context.properties.SerializableSemVers; -import com.hedera.node.app.service.mono.pbj.PbjConverter; -import com.hedera.node.app.service.mono.state.merkle.MerkleNetworkContext; -import com.hedera.node.app.service.mono.state.merkle.MerkleScheduledTransactionsState; -import com.hedera.node.app.service.mono.state.merkle.MerkleSpecialFiles; import com.hedera.node.app.service.mono.state.merkle.MerkleStakingInfo; -import com.hedera.node.app.service.mono.state.migration.StateVersions; -import com.hedera.node.app.service.mono.state.submerkle.ExchangeRates; -import com.hedera.node.app.service.mono.state.submerkle.SequenceNumber; -import com.hedera.node.app.service.mono.stream.RecordsRunningHashLeaf; import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.mono.utils.NamedDigestFactory; -import com.hedera.node.app.service.networkadmin.FreezeService; -import com.hedera.node.app.service.networkadmin.NetworkService; +import com.hedera.node.app.service.networkadmin.ReadableRunningHashLeafStore; import com.hedera.node.app.service.networkadmin.impl.FreezeServiceImpl; import com.hedera.node.app.service.networkadmin.impl.NetworkServiceImpl; -import com.hedera.node.app.service.schedule.ScheduleService; import com.hedera.node.app.service.schedule.impl.ScheduleServiceImpl; import com.hedera.node.app.service.token.impl.TokenServiceImpl; import com.hedera.node.app.service.util.impl.UtilServiceImpl; import com.hedera.node.app.services.ServicesRegistry; import com.hedera.node.app.services.ServicesRegistryImpl; +import com.hedera.node.app.spi.HapiUtils; import com.hedera.node.app.spi.Service; -import com.hedera.node.app.spi.state.WritableFreezeStore; import com.hedera.node.app.spi.state.WritableKVState; import com.hedera.node.app.state.HederaState; import com.hedera.node.app.state.merkle.MerkleHederaState; -import com.hedera.node.app.state.merkle.MerkleHederaState.MerkleWritableStates; import com.hedera.node.app.state.merkle.MerkleSchemaRegistry; import com.hedera.node.app.state.recordcache.RecordCacheService; +import com.hedera.node.app.version.HederaSoftwareVersion; +import com.hedera.node.app.workflows.dispatcher.ReadableStoreFactory; +import com.hedera.node.config.data.HederaConfig; +import com.hedera.node.config.data.VersionConfig; import com.swirlds.common.constructable.ClassConstructorPair; import com.swirlds.common.constructable.ConstructableRegistry; import com.swirlds.common.constructable.ConstructableRegistryException; import com.swirlds.common.crypto.CryptographyHolder; -import com.swirlds.common.crypto.RunningHash; import com.swirlds.common.notification.listeners.PlatformStatusChangeListener; -import com.swirlds.common.notification.listeners.ReconnectCompleteListener; -import com.swirlds.common.notification.listeners.StateWriteToDiskCompleteListener; -import com.swirlds.common.system.DualState; import com.swirlds.common.system.InitTrigger; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.Platform; @@ -79,13 +67,13 @@ import com.swirlds.common.system.SwirldState; import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.system.events.Event; -import com.swirlds.common.system.state.notifications.IssListener; -import com.swirlds.common.system.state.notifications.NewSignedStateListener; -import com.swirlds.platform.gui.SwirldsGui; +import com.swirlds.common.system.transaction.Transaction; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.charset.Charset; import java.security.NoSuchAlgorithmException; +import java.time.InstantSource; +import java.util.ArrayList; import java.util.Locale; import java.util.Set; import org.apache.logging.log4j.LogManager; @@ -128,6 +116,8 @@ public final class Hedera implements SwirldMain { } private static final Logger logger = LogManager.getLogger(Hedera.class); + private static final int STATE_VERSION_NEWER_THAN_SOFTWARE_VERSION_EXIT_CODE = 10; + private static final int VERSION_NOT_IN_SAVED_STATE_EXIT_CODE = 11; // This should come from configuration, NOT be hardcoded. public static final int MAX_SIGNED_TXN_SIZE = 6144; @@ -146,16 +136,11 @@ private record ServiceRegistration( /** The registry of all known services */ private final ServicesRegistry servicesRegistry; /** The current version of THIS software */ - private final SerializableSemVers version; - /** The BootstrapProperties for this node */ - private final BootstrapProperties bootstrapProps; + private final HederaSoftwareVersion version; /** The Hashgraph Platform. This is set during state initialization. */ private Platform platform; /** The configuration for this node */ private ConfigProviderImpl configProvider; - /** Used to interface with the mono-service. */ - private StateChildrenProvider stateChildren; - /** * Dependencies managed by Dagger. Set during state initialization. The mono-service requires this object, but none * of the rest of the system (and particularly the modular implementation) uses it directly. Rather, it is created @@ -173,20 +158,28 @@ private record ServiceRegistration( * Create a new Hedera instance. * * @param constructableRegistry The registry to use during the deserialization process - * @param bootstrapProps The bootstrap properties */ - Hedera( - @NonNull final ConstructableRegistry constructableRegistry, - @NonNull final BootstrapProperties bootstrapProps) { - + public Hedera(@NonNull final ConstructableRegistry constructableRegistry) { this.constructableRegistry = requireNonNull(constructableRegistry); - // Load properties, configuration, and other things that can be done before a state is created. - this.bootstrapProps = requireNonNull(bootstrapProps); + // Print welcome message + logger.info("Welcome to Hedera! Developed with love by the Open Source Community. " + + "https://github.com/hashgraph/hedera-services"); + + // Let the user know which mode they are starting in (DEV vs. TEST vs. PROD) + final var bootstrapConfig = new ConfigProviderImpl(false).getConfiguration(); + final var hederaConfig = bootstrapConfig.getConfigData(HederaConfig.class); + final var activeProfile = Profile.valueOf(hederaConfig.activeProfile()); + logger.info("Starting in {} mode", activeProfile); // Read the software version - version = SEMANTIC_VERSIONS.deployedSoftwareVersion(); - logger.info("Creating Hedera Consensus Node v{} with HAPI v{}", version.getServices(), version.getProto()); + logger.debug("Loading Software Version"); + final var versionConfig = bootstrapConfig.getConfigData(VersionConfig.class); + version = new HederaSoftwareVersion(versionConfig.hapiVersion(), versionConfig.servicesVersion()); + logger.info( + "Creating Hedera Consensus Node {} with HAPI {}", + () -> HapiUtils.toString(version.getHapiVersion()), + () -> HapiUtils.toString(version.getServicesVersion())); // Create all the service implementations logger.info("Registering schemas for services"); @@ -215,10 +208,15 @@ private record ServiceRegistration( } } + /** Gets the port the gRPC server is listening on, or {@code -1} if there is no server listening. */ + public int getGrpcPort() { + return daggerApp.grpcServerManager().port(); + } + /** * {@inheritDoc} - *

- * Called immediately after the constructor to get the version of this software. In an upgrade scenario, this + * + *

Called immediately after the constructor to get the version of this software. In an upgrade scenario, this * version will be greater than the one in the saved state. * * @return The software version. @@ -265,48 +263,74 @@ private void onStateInitialized( @NonNull final Platform platform, @NonNull final SwirldDualState dualState, @NonNull final InitTrigger trigger, - @NonNull final SoftwareVersion previousVersion) { + @Nullable final SoftwareVersion previousVersion) { + + // We do nothing for EVENT_STREAM_RECOVERY. This is a special case that is handled by the platform. + if (trigger == EVENT_STREAM_RECOVERY) { + logger.debug("Skipping state initialization for trigger {}", trigger); + return; + } //noinspection ConstantValue assert dualState != null : "Platform should never pass a null dual state"; - logger.info("Initializing Hedera state with trigger {} and previous version {}", trigger, previousVersion); + logger.info( + "Initializing Hedera state with trigger {} and previous version {}", + () -> trigger, + () -> previousVersion == null ? "" : previousVersion); // We do not support downgrading from one version to an older version. - final var deserializedVersion = (SerializableSemVers) previousVersion; + final var deserializedVersion = (HederaSoftwareVersion) previousVersion; if (isDowngrade(version, deserializedVersion)) { - logger.error( - "Fatal error, state source version {} is after node software version {}", + logger.fatal( + "Fatal error, state source version {} is higher than node software version {}", deserializedVersion, version); - System.exit(1); + System.exit(STATE_VERSION_NEWER_THAN_SOFTWARE_VERSION_EXIT_CODE); } // This is the *FIRST* time in the initialization sequence that we have access to the platform. Grab it! + // This instance should never change on us, once it has been set + assert this.platform == null || this.platform == platform : "Platform should never change once set"; this.platform = platform; - // Different paths for different triggers. + // Different paths for different triggers. Every trigger should be handled here. If a new trigger is added, + // since there is no 'default' case, it will cause a compile error, so you will know you have to deal with it + // here. This is intentional so as to avoid forgetting to handle a new trigger. switch (trigger) { case GENESIS -> genesis(state, dualState); case RESTART -> restart(state, dualState, deserializedVersion); case RECONNECT -> reconnect(); - case EVENT_STREAM_RECOVERY -> eventStreamRecovery(); + // We exited from this method early if we were recovering from an event stream. + case EVENT_STREAM_RECOVERY -> throw new RuntimeException("Should never be reached"); } - // Since we now have an "app" instance, we can update the dual state accessor. - // This is *ONLY* used by to produce a log summary after a freeze. We should refactor - // to not have a global reference to this. + // This field has to be set by the time we get here. It will be set by both the genesis and restart code + // branches. One of those two is called before a "reconnect" trigger, so we should be fully guaranteed that this + // assertion will hold true. + assert configProvider != null : "Config Provider *must* have been set by now!"; + + // Since we now have an "app" instance, we can update the dual state accessor. This is *ONLY* used by the app to + // produce a log summary after a freeze. We should refactor to not have a global reference to this. updateDualState(dualState); + + logger.info("Validating ledger state..."); + validateLedgerState(state); + logger.info("Ledger state ok"); } /** - * Called by this class when we detect it is time to do migration. + * Called by this class when we detect it is time to do migration. This is only used as part of genesis or restart, + * not as a result of reconnect. */ private void onMigrate( - @NonNull final MerkleHederaState state, @Nullable final SerializableSemVers deserializedVersion) { - final var previousVersion = - deserializedVersion == null ? null : PbjConverter.toPbj(deserializedVersion.getServices()); - final var currentVersion = PbjConverter.toPbj(version.getServices()); - logger.info("Migrating from version {} to {}", previousVersion, currentVersion); + @NonNull final MerkleHederaState state, @Nullable final HederaSoftwareVersion deserializedVersion) { + final var currentVersion = version.getServicesVersion(); + final var previousVersion = deserializedVersion == null ? null : deserializedVersion.getServicesVersion(); + logger.info( + "Migrating from version {} to {}", + () -> previousVersion == null ? "" : HapiUtils.toString(previousVersion), + () -> HapiUtils.toString(currentVersion)); + for (final var service : servicesRegistry.services()) { // FUTURE We should have metrics here to keep track of how long it takes to migrate each service final var serviceName = service.getServiceName(); @@ -315,6 +339,7 @@ private void onMigrate( registry.migrate(state, previousVersion, currentVersion, configProvider.getConfiguration()); logger.info("Migrated Service {}", serviceName); } + logger.info("Migration complete"); } /*================================================================================================================== @@ -335,12 +360,8 @@ public void init(@NonNull final Platform platform, @NonNull final NodeId nodeId) assert this.platform == platform : "Platform must be the same instance"; logger.info("Initializing Hedera app with HederaNode#{}", nodeId); - // Ensure the prefetch queue is created and thread pool is active instead of waiting - // for lazy-initialization to take place - daggerApp.prefetchProcessor(); - - // Check that UTF-8 is in use. Otherwise, the node will be subject to subtle bugs - // in string handling that will lead to ISS. + // Check that UTF-8 is in use. Otherwise, the node will be subject to subtle bugs in string handling that will + // lead to ISS. final var defaultCharset = daggerApp.nativeCharset().get(); if (!isUTF8(defaultCharset)) { logger.error( @@ -364,17 +385,68 @@ public void init(@NonNull final Platform platform, @NonNull final NodeId nodeId) Locale.setDefault(Locale.US); logger.info("Locale to set to US en"); - validateLedgerState(); - logger.info("Ledger state ok"); - - configurePlatform(); - logger.info("Platform is configured w/ callbacks and stats registered"); - - exportAccountsIfDesired(); - logger.info("Accounts exported (if requested)"); + // The Hashgraph platform has a "platform state", and a notification service to indicate when those + // states change. We will use these state changes for various purposes, such as turning off the gRPC + // server when we fall behind or ISS. + final var notifications = platform.getNotificationEngine(); + notifications.register(PlatformStatusChangeListener.class, notification -> { + switch (notification.getNewStatus()) { + case ACTIVE -> { + run(); + logger.info("Hederanode#{} is ACTIVE", nodeId); + } + case BEHIND -> { + logger.info("Hederanode#{} is BEHIND", nodeId); + shutdownGrpcServer(); + } + case DISCONNECTED -> { + logger.info("Hederanode#{} is DISCONNECTED", nodeId); + shutdownGrpcServer(); + } + case FREEZE_COMPLETE -> { + logger.info("Hederanode#{} is in FREEZE_COMPLETE", nodeId); + shutdownGrpcServer(); + } + case REPLAYING_EVENTS -> logger.info("Hederanode#{} is REPLAYING_EVENTS", nodeId); + case STARTING_UP -> logger.info("Hederanode#{} is STARTING_UP", nodeId); + case CATASTROPHIC_FAILURE -> { + logger.info("Hederanode#{} is in CATASTROPHIC_FAILURE", nodeId); + shutdownGrpcServer(); + } + case CHECKING -> logger.info("Hederanode#{} is CHECKING", nodeId); + case OBSERVING -> logger.info("Hederanode#{} is OBSERVING", nodeId); + case FREEZING -> logger.info("Hederanode#{} is FREEZING", nodeId); + case RECONNECT_COMPLETE -> logger.info("Hederanode#{} is RECONNECT_COMPLETE", nodeId); + } + }); + + // TBD: notifications.register(ReconnectCompleteListener.class, daggerApp.reconnectListener()); + // The main job of the reconnect listener (com.hedera.node.app.service.mono.state.logic.ReconnectListener) + // is to log some output (including hashes from the tree for the main state per service) and then to + // "catchUpOnMissedSideEffects". This last part worries me, because it looks like it invades into the space + // filled by the freeze service. How should we coordinate lifecycle like reconnect with the services? I am + // tempted to say that each service has lifecycle methods we can invoke (optional methods on the Service + // interface), but I worry about the order of invocation on different services. Which service gets called + // before which other service? Does it matter? + + // TBD: notifications.register(StateWriteToDiskCompleteListener.class, + // It looks like this notification is handled by + // com.hedera.node.app.service.mono.state.logic.StateWriteToDiskListener + // which looks like it is related to freeze / upgrade. + // daggerApp.stateWriteToDiskListener()); + + // TBD: notifications.register(NewSignedStateListener.class, daggerApp.newSignedStateListener()); + // com.hedera.node.app.service.mono.state.exports.NewSignedStateListener + // Has some relationship to freeze/upgrade, but also with balance exports. This was the trigger that + // caused us to export balance files on a certain schedule. + + // TBD: notifications.register(IssListener.class, daggerApp.issListener()); + // com.hedera.node.app.service.mono.state.forensics.ServicesIssListener + // This is something that MUST be implemented by the Hedera app module. We use this to respond to detected + // ISS events, logging, restarting, etc. } catch (final Exception e) { logger.error("Fatal precondition violation in HederaNode#{}", daggerApp.nodeId(), e); - daggerApp.systemExits().fail(1); + daggerApp.systemExits().fail(1); // TBD: Better exit code? } } @@ -398,23 +470,24 @@ private boolean sha384DigestIsAvailable(@NonNull final NamedDigestFactory digest } } - private void exportAccountsIfDesired() { - daggerApp.accountsExporter().toFile(daggerApp.workingState().accounts()); - } - - private void configurePlatform() { - daggerApp.statsManager().initializeFor(platform); - } + /** Verifies some aspects of the ledger state */ + private void validateLedgerState(@NonNull final HederaState state) { + // For a non-zero stake node, validates presence of a self-account in the address book. + final var selfNodeInfo = daggerApp.networkInfo().selfNodeInfo(); + if (!selfNodeInfo.zeroStake() && selfNodeInfo.accountId() == null) { + logger.fatal("Node is not zero-stake, but has no known account"); + daggerApp.systemExits().fail(1); // TBD What code to use? + } - private void validateLedgerState() { - daggerApp.ledgerValidator().validate(daggerApp.workingState().accounts()); - daggerApp.nodeInfo().validateSelfAccountIfNonZeroStake(); - final var notifications = daggerApp.notificationEngine().get(); - notifications.register(PlatformStatusChangeListener.class, daggerApp.statusChangeListener()); - notifications.register(ReconnectCompleteListener.class, daggerApp.reconnectListener()); - notifications.register(StateWriteToDiskCompleteListener.class, daggerApp.stateWriteToDiskListener()); - notifications.register(NewSignedStateListener.class, daggerApp.newSignedStateListener()); - notifications.register(IssListener.class, daggerApp.issListener()); + // Verify the ledger state. At the moment, this is a sanity check that we still have all HBARs present and + // accounted for. We may do more checks in the future. Every check we add slows down restart, especially when + // we start loading massive amounts of state from disk. + try { + daggerApp.ledgerValidator().validate(state); + } catch (Throwable th) { + logger.fatal("Ledger validation failed", th); + daggerApp.systemExits().fail(1); // TBD What code to use? + } } /*================================================================================================================== @@ -439,9 +512,19 @@ public void run() { * Invoked by the platform to handle pre-consensus events. This only happens after {@link #run()} has been called. */ private void onPreHandle(@NonNull final Event event, @NonNull final HederaState state) { - // For now, we will delegate pre-handle to the mono-service. But this needs to be moved to - // use the Pre-Handle workflow instead. - daggerApp.adaptedMonoEventExpansion().expand(event, state, daggerApp.nodeInfo()); + final var readableStoreFactory = new ReadableStoreFactory(state); + final var creator = + daggerApp.networkInfo().nodeInfo(event.getCreatorId().id()); + if (creator == null) { + // We were given an event for a node that *does not exist in the address book*. This will be logged as + // a warning, as this should never happen, and we will skip the event, which may well result in an ISS. + logger.warn("Received event from node {} which is not in the address book", event.getCreatorId()); + return; + } + + final var transactions = new ArrayList(1000); + event.forEachTransaction(transactions::add); + daggerApp.preHandleWorkflow().preHandle(readableStoreFactory, creator.accountId(), transactions.stream()); } /** @@ -450,11 +533,9 @@ private void onPreHandle(@NonNull final Event event, @NonNull final HederaState */ private void onHandleConsensusRound( @NonNull final Round round, @NonNull final SwirldDualState dualState, @NonNull final HederaState state) { - // TBD: The handle workflow should be created by dagger and just be something we can delegate to here. - daggerApp.mutableState().set(state); - daggerApp.dualStateAccessor().setDualState(dualState); daggerApp.workingStateAccessor().setHederaState(state); - daggerApp.logic().incorporateConsensus(round); + // TBD: Add in dual state when needed :: daggerApp.dualStateAccessor().setDualState(dualState); + daggerApp.handleWorkflow().handleRound(state, round); } /*================================================================================================================== @@ -487,91 +568,194 @@ private void genesis(@NonNull final MerkleHederaState state, @NonNull final Swir logger.info("Initializing Configuration"); this.configProvider = new ConfigProviderImpl(true); + logConfiguration(); // Create all the nodes in the merkle tree for all the services onMigrate(state, null); - // Initialize the tree with all the pre-built state we need for a basic system, - // such as the accounts for initial users. Some services might populate their data - // in their Schema migration handlers. - final var seqStart = bootstrapProps.getLongProperty(HEDERA_FIRST_USER_ENTITY); - logger.debug("Creating genesis children at seqStart = {}", seqStart); - createSpecialGenesisChildren(state, dualState, platform.getAddressBook(), seqStart); - // Now that we have the state created, we are ready to create the dependency graph with Dagger - initializeDagger(state, InitTrigger.GENESIS); + initializeDagger(state, GENESIS); - // Store the version in state (ideally this would move to be something that is done when the - // network service runs its schema migration) + // Store the version in state + // TODO Who is responsible for saving this in the tree? I assumed it went into dual state... not sensible! logger.debug("Saving version information in state"); - final var networkCtx = stateChildren.networkCtx(); - networkCtx.setStateVersion(StateVersions.CURRENT_VERSION); - - // TODO Not sure - daggerApp.initializationFlow().runWith(stateChildren, bootstrapProps); - daggerApp - .sysAccountsCreator() - .ensureSystemAccounts( - daggerApp.backingAccounts(), daggerApp.workingState().addressBook()); - daggerApp.sysFilesManager().createManagedFilesIfMissing(); - daggerApp.stakeStartupHelper().doGenesisHousekeeping(stateChildren.addressBook()); + // final var networkCtx = stateChildren.networkCtx(); + // networkCtx.setStateVersion(StateVersions.CURRENT_VERSION); // For now, we have to update the stake details manually. When we have dynamic address book, // then we'll move this to be shared with all state initialization flows and not just genesis // and restart. logger.debug("Initializing stake details"); - daggerApp.sysFilesManager().updateStakeDetails(); + // daggerApp.sysFilesManager().updateStakeDetails(); // TODO Not sure - networkCtx.markPostUpgradeScanStatus(); + // networkCtx.markPostUpgradeScanStatus(); } - /** - * Create the special children of the root node that are needed for genesis. - * - *

It would be good to see if we can break this logic up and have it be part of the individual - * modules. For example, it would be good if the first Schema version in NetworkServices would put this genesis - * state into place. However, for to that work, we need to make some information available at migration, such as the - * initial sequence number and the address book. We either have to make that information globally available to - * services, or we need to have Dagger injection for schemas to provide ad-hoc dependencies. - */ - private void createSpecialGenesisChildren( - @NonNull final MerkleHederaState state, - @NonNull final DualState dualState, - @NonNull final AddressBook addressBook, - final long seqStart) { + // TODO SHOULD BE USED FOR ALL START/RESTART/GENESIS SCENARIOS + private void stateInitializationFlow() { + /* + final var lastThrottleExempt = bootstrapProperties.getLongProperty(ACCOUNTS_LAST_THROTTLE_EXEMPT); + // The last throttle-exempt account is configurable to make it easy to start dev networks + // without throttling + numberConfigurer.configureNumbers(hederaNums, lastThrottleExempt); - // Prepopulate the FreezeService state with default values for genesis - final var adminStates = state.createWritableStates(FreezeService.NAME); - // specialFiles will move to FileService - adminStates.getSingleton(FreezeServiceImpl.UPGRADE_FILES_KEY).put(new MerkleSpecialFiles()); - ((MerkleWritableStates) adminStates).commit(); + workingState.updateFrom(activeState); + log.info("Context updated with working state"); - adminStates.getSingleton(FreezeServiceImpl.DUAL_STATE_KEY).put(new WritableFreezeStore(dualState)); + final var activeHash = activeState.runningHashLeaf().getRunningHash().getHash(); + recordStreamManager.setInitialHash(activeHash); + log.info("Record running hash initialized"); - // Prepopulate the NetworkServices state with default values for genesis - // Can these be moved to Schema version 1 of NetworkServicesImpl? - final var networkStates = state.createWritableStates(NetworkService.NAME); - networkStates.getSingleton(NetworkServiceImpl.CONTEXT_KEY).put(genesisNetworkCtxWith(seqStart)); - networkStates.getSingleton(NetworkServiceImpl.RUNNING_HASHES_KEY).put(genesisRunningHashLeaf()); - buildStakingInfoMap(addressBook, bootstrapProps, networkStates.get(NetworkServiceImpl.STAKING_KEY)); - ((MerkleWritableStates) networkStates).commit(); + if (hfs.numRegisteredInterceptors() == 0) { + fileUpdateInterceptors.forEach(hfs::register); + log.info("Registered {} file update interceptors", fileUpdateInterceptors.size()); + } + */ + } - // Prepopulate the ScheduleServices state with default values for genesis - final var scheduledStates = state.createWritableStates(ScheduleService.NAME); - final var neverScheduledState = new MerkleScheduledTransactionsState(); - scheduledStates.getSingleton(ScheduleServiceImpl.SCHEDULING_STATE_KEY).put(neverScheduledState); - ((MerkleWritableStates) scheduledStates).commit(); + // TODO SHOULD BE USED FOR ALL START/RESTART/GENESIS SCENARIOS + private void storeFlow() { + /* + backingTokenRels.rebuildFromSources(); + backingAccounts.rebuildFromSources(); + backingTokens.rebuildFromSources(); + backingNfts.rebuildFromSources(); + log.info("Backing stores rebuilt"); + + usageLimits.resetNumContracts(); + aliasManager.rebuildAliasesMap(workingState.accounts(), (num, account) -> { + if (account.isSmartContract()) { + usageLimits.recordContracts(1); + } + }); + log.info("Account aliases map rebuilt"); + */ } - private RecordsRunningHashLeaf genesisRunningHashLeaf() { - final var genesisRunningHash = new RunningHash(); - genesisRunningHash.setHash(EMPTY_HASH); - return new RecordsRunningHashLeaf(genesisRunningHash); + // TODO SHOULD BE USED FOR ALL START/RESTART/GENESIS SCENARIOS + private void entitiesFlow() { + /* + expiries.reviewExistingPayerRecords(); + log.info("Payer records reviewed"); + // Use any entities stored in state to rebuild queue of expired entities. + log.info("Short-lived entities reviewed"); + + sigImpactHistorian.invalidateCurrentWindow(); + log.info("Signature impact history invalidated"); + + // Re-initialize the "observable" system files; that is, the files which have + // associated callbacks managed by the SysFilesCallback object. We explicitly + // re-mark the files are not loaded here, in case this is a reconnect. + networkCtxManager.setObservableFilesNotLoaded(); + networkCtxManager.loadObservableSysFilesIfNeeded(); + */ } - private MerkleNetworkContext genesisNetworkCtxWith(final long seqStart) { - return new MerkleNetworkContext(null, new SequenceNumber(seqStart), seqStart - 1, new ExchangeRates()); + private void ensureSystemAccounts() { + /* + final long systemAccounts = properties.getIntProperty(LEDGER_NUM_SYSTEM_ACCOUNTS); + final long expiry = properties.getLongProperty(BOOTSTRAP_SYSTEM_ENTITY_EXPIRY); + final long tinyBarFloat = properties.getLongProperty(LEDGER_TOTAL_TINY_BAR_FLOAT); + + for (long num = 1; num <= systemAccounts; num++) { + final var id = STATIC_PROPERTIES.scopedAccountWith(num); + if (accounts.contains(id)) { + continue; + } + final HederaAccount account; + if (num == accountNums.treasury()) { + account = accountWith(tinyBarFloat, expiry); + } else { + account = accountWith(ZERO_BALANCE, expiry); + } + accounts.put(id, account); + systemAccountsCreated.add(account); + } + + final var stakingRewardAccountNum = accountNums.stakingRewardAccount(); + final var stakingRewardAccountId = STATIC_PROPERTIES.scopedAccountWith(stakingRewardAccountNum); + final var nodeRewardAccountNum = accountNums.nodeRewardAccount(); + final var nodeRewardAccountId = STATIC_PROPERTIES.scopedAccountWith(nodeRewardAccountNum); + final var stakingFundAccounts = List.of(stakingRewardAccountId, nodeRewardAccountId); + for (final var id : stakingFundAccounts) { + if (!accounts.contains(id)) { + final var stakingFundAccount = accountSupplier.get(); + customizeAsStakingFund(stakingFundAccount); + accounts.put(id, stakingFundAccount); + } + } + for (long num = 900; num <= 1000; num++) { + final var id = STATIC_PROPERTIES.scopedAccountWith(num); + if (!accounts.contains(id)) { + final var account = accountWith(ZERO_BALANCE, expiry); + accounts.put(id, account); + systemAccountsCreated.add(account); + } + } + + treasuryCloner.ensureTreasuryClonesExist(); + + var ledgerFloat = 0L; + final var allIds = accounts.idSet(); + for (final var id : allIds) { + ledgerFloat += accounts.getImmutableRef(id).getBalance(); + } + log.info("Ledger float is {} tinyBars in {} accounts.", ledgerFloat, allIds.size()); + */ + } + + // Only called during genesis + private void createAddressBookIfMissing() { + // Get the address book from the platform and create a NodeAddressBook, and write the protobuf bytes of + // this into state. (This should be done by the File service schema. Or somebody who owns it.) To do that, + // we need to make the address book available in the SPI so the file service can get it. Or, is it owned + // by the network admin service, and the current storage is in the file service, but doesn't actually belong + // there. I tend to think that is the case. But we use the file service today and a special file and that is + // actually depended on by the mirror node. So to change that would require a HIP. + /* + writeFromBookIfMissing(fileNumbers.addressBook(), this::platformAddressBookToGrpc); + */ + } + + // Only called during genesis + private void createNodeDetailsIfMissing() { + // Crazy! Same contents as the address book, but this one is "node details" file. Two files with the same + // contents? Why? + /* + writeFromBookIfMissing(fileNumbers.nodeDetails(), this::platformAddressBookToGrpc); + */ + } + + // Only called during genesis + private void createUpdateFilesIfMissing() { + /* + final var firstUpdateNum = fileNumbers.firstSoftwareUpdateFile(); + final var lastUpdateNum = fileNumbers.lastSoftwareUpdateFile(); + final var specialFiles = hfs.specialFiles(); + for (var updateNum = firstUpdateNum; updateNum <= lastUpdateNum; updateNum++) { + final var disFid = fileNumbers.toFid(updateNum); + if (!hfs.exists(disFid)) { + materialize(disFid, systemFileInfo(), new byte[0]); + } else if (!specialFiles.contains(disFid)) { + // This can be the case for file 0.0.150, whose metadata had + // been created for the legacy MerkleDiskFs. But whatever its + // contents were doesn't matter now. Just make sure it exists + // in the MerkleSpecialFiles! + specialFiles.update(disFid, new byte[0]); + } + } + */ + } + + private void doGenesisHousekeeping() { + /* + // List the node ids in the address book at genesis + final List genesisNodeIds = idsFromAddressBook(addressBook); + + // Prepare the stake info manager for managing the new node ids + stakeInfoManager.prepForManaging(genesisNodeIds); + */ } private void buildStakingInfoMap( @@ -600,9 +784,15 @@ private void buildStakingInfoMap( private void restart( @NonNull final MerkleHederaState state, @NonNull final SwirldDualState dualState, - @NonNull final SerializableSemVers deserializedVersion) { + @Nullable final HederaSoftwareVersion deserializedVersion) { logger.debug("Restart Initialization"); + // The deserialized version can ONLY be null if we are in genesis, otherwise something is wrong with the state + if (deserializedVersion == null) { + logger.fatal("Fatal error, previous software version not found in saved state!"); + System.exit(VERSION_NOT_IN_SAVED_STATE_EXIT_CODE); + } + // This configuration is based on what is in state *RIGHT NOW*, before any possible upgrade. This is the config // that must be passed to the migration methods. // TODO: Actually, we should reinitialize the config on each step along the migration path, so we should pass @@ -617,30 +807,36 @@ private void restart( onMigrate(state, deserializedVersion); } + // TODO Update the configuration with whatever is the new latest version in state. In reality, we shouldn't + // be messing with configuration during migration, but it could happen (by the file service), so we should + // be defensive about it so the software is always correct, even in that very unlikely scenario. + // this.configProvider.update(null); + // Now that we have the state created, we are ready to create the all the dagger dependencies - initializeDagger(state, InitTrigger.RESTART); + initializeDagger(state, RESTART); // We may still want to change the address book without an upgrade. But note // that without a dynamic address book, this MUST be a no-op during reconnect. - final var stakingInfo = stateChildren.stakingInfo(); - final var networkCtx = stateChildren.networkCtx(); - daggerApp.stakeStartupHelper().doRestartHousekeeping(stateChildren.addressBook(), stakingInfo); - if (upgrade) { - dualState.setFreezeTime(null); - networkCtx.discardPreparedUpgradeMeta(); - if (version.hasMigrationRecordsFrom(deserializedVersion)) { - networkCtx.markMigrationRecordsNotYetStreamed(); - } - } + // final var stakingInfo = stateChildren.stakingInfo(); + // final var networkCtx = stateChildren.networkCtx(); + // daggerApp.stakeStartupHelper().doRestartHousekeeping(stateChildren.addressBook(), stakingInfo); + // if (upgrade) { + // dualState.setFreezeTime(null); + // networkCtx.discardPreparedUpgradeMeta(); + //// if (version.hasMigrationRecordsFrom(deserializedVersion)) { + //// networkCtx.markMigrationRecordsNotYetStreamed(); + //// } + // } // This updates the working state accessor with our children - daggerApp.initializationFlow().runWith(stateChildren, bootstrapProps); - if (upgrade) { - daggerApp.stakeStartupHelper().doUpgradeHousekeeping(networkCtx, stateChildren.accounts(), stakingInfo); - } + // daggerApp.initializationFlow().runWith(stateChildren, configProvider); + // if (upgrade) { + // daggerApp.stakeStartupHelper().doUpgradeHousekeeping(networkCtx, stateChildren.accounts(), + // stakingInfo); + // } // Once we have a dynamic address book, this will run unconditionally - daggerApp.sysFilesManager().updateStakeDetails(); + // daggerApp.sysFilesManager().updateStakeDetails(); } /*================================================================================================================== @@ -653,16 +849,6 @@ private void reconnect() { // No-op } - /*================================================================================================================== - * - * Event Stream Recovery Initialization - * - =================================================================================================================*/ - - private void eventStreamRecovery() { - // No-op - } - /*================================================================================================================== * * Random private helper methods @@ -673,54 +859,55 @@ private void initializeDagger(@NonNull final MerkleHederaState state, @NonNull f logger.debug("Initializing dagger"); final var selfId = platform.getSelfId(); if (daggerApp == null) { - stateChildren = state.getStateChildrenProvider(platform); - final var nodeAddress = stateChildren.addressBook().getAddress(selfId); + final var nodeAddress = platform.getAddressBook().getAddress(selfId); final var nodeSelfAccount = parseAccount(nodeAddress.getMemo()); - final var initialHash = - stateChildren.runningHashLeaf().getRunningHash().getHash(); + final var runningHashStore = new ReadableStoreFactory(state).getStore(ReadableRunningHashLeafStore.class); + final var initialHash = runningHashStore.getRunningHash(); // Fully qualified so as to not confuse javadoc daggerApp = com.hedera.node.app.DaggerHederaInjectionComponent.builder() .initTrigger(trigger) + .configuration(configProvider) .staticAccountMemo(nodeAddress.getMemo()) - .bootstrapProps(bootstrapProps) .initialHash(initialHash) .platform(platform) - .consoleCreator(SwirldsGui::createConsole) .maxSignedTxnSize(MAX_SIGNED_TXN_SIZE) .crypto(CryptographyHolder.get()) + .currentPlatformStatus(new CurrentPlatformStatusImpl(platform)) .selfId(nodeSelfAccount) - .genesisUsage(trigger == InitTrigger.GENESIS) .servicesRegistry(servicesRegistry) + .bootstrapProps(new BootstrapProperties(false)) // TBD REMOVE + .instantSource(InstantSource.system()) .build(); + + daggerApp.workingStateAccessor().setHederaState(state); } } private void updateDualState(final SwirldDualState dualState) { - daggerApp.dualStateAccessor().setDualState(dualState); + // daggerApp.dualStateAccessor().setDualState(dualState); logger.info( "Dual state includes freeze time={} and last frozen={}", dualState.getFreezeTime(), dualState.getLastFrozenTime()); } - private boolean isUpgrade(final SerializableSemVers deployedVersion, final SoftwareVersion deserializedVersion) { + private boolean isUpgrade(final HederaSoftwareVersion deployedVersion, final SoftwareVersion deserializedVersion) { return deployedVersion.isAfter(deserializedVersion); } - private boolean isDowngrade(final SerializableSemVers deployedVersion, final SoftwareVersion deserializedVersion) { + private boolean isDowngrade( + final HederaSoftwareVersion deployedVersion, final SoftwareVersion deserializedVersion) { return deployedVersion.isBefore(deserializedVersion); } - private AccountID parseAccount(@NonNull final String string) { - try { - final var parts = string.split("\\."); - return AccountID.newBuilder() - .shardNum(Long.parseLong(parts[0])) - .realmNum(Long.parseLong(parts[1])) - .accountNum(Long.parseLong(parts[2])) - .build(); - } catch (final NumberFormatException | ArrayIndexOutOfBoundsException e) { - throw new IllegalArgumentException(String.format("'%s' is not a dot-separated triplet", string)); + private void logConfiguration() { + // TODO Need reflection to print out all of the configuration values. + if (logger.isInfoEnabled()) { + final var config = configProvider.getConfiguration(); + final var lines = new ArrayList(); + lines.add("Active Configuration:"); + config.getPropertyNames().forEach(name -> lines.add(name + " = " + config.getValue(name))); + logger.info(String.join("\n", lines)); } } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/HederaInjectionComponent.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/HederaInjectionComponent.java index af542939dc08..6f65fc06bb4d 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/HederaInjectionComponent.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/HederaInjectionComponent.java @@ -22,38 +22,42 @@ import com.hedera.node.app.authorization.AuthorizerInjectionModule; import com.hedera.node.app.components.IngestInjectionComponent; import com.hedera.node.app.components.QueryInjectionComponent; -import com.hedera.node.app.config.ConfigModule; -import com.hedera.node.app.config.GenesisUsage; import com.hedera.node.app.fees.FeesInjectionModule; import com.hedera.node.app.grpc.GrpcInjectionModule; import com.hedera.node.app.grpc.GrpcServerManager; +import com.hedera.node.app.info.CurrentPlatformStatus; import com.hedera.node.app.info.InfoInjectionModule; import com.hedera.node.app.metrics.MetricsInjectionModule; import com.hedera.node.app.platform.PlatformModule; import com.hedera.node.app.service.mono.LegacyMonoInjectionModule; -import com.hedera.node.app.service.mono.ServicesApp; import com.hedera.node.app.service.mono.context.annotations.BootstrapProps; import com.hedera.node.app.service.mono.context.annotations.StaticAccountMemo; import com.hedera.node.app.service.mono.context.properties.PropertySource; -import com.hedera.node.app.service.mono.state.StateModule; -import com.hedera.node.app.service.mono.utils.NonAtomicReference; +import com.hedera.node.app.service.mono.utils.NamedDigestFactory; +import com.hedera.node.app.service.mono.utils.SystemExits; import com.hedera.node.app.services.ServicesInjectionModule; import com.hedera.node.app.services.ServicesRegistry; import com.hedera.node.app.solvency.SolvencyInjectionModule; +import com.hedera.node.app.spi.info.NetworkInfo; import com.hedera.node.app.spi.records.RecordCache; -import com.hedera.node.app.state.HederaState; import com.hedera.node.app.state.HederaStateInjectionModule; +import com.hedera.node.app.state.LedgerValidator; import com.hedera.node.app.state.WorkingStateAccessor; import com.hedera.node.app.throttle.ThrottleInjectionModule; import com.hedera.node.app.workflows.WorkflowsInjectionModule; -import com.hedera.node.app.workflows.prehandle.AdaptedMonoEventExpansion; +import com.hedera.node.app.workflows.handle.HandleWorkflow; +import com.hedera.node.app.workflows.prehandle.PreHandleWorkflow; +import com.hedera.node.config.ConfigProvider; import com.swirlds.common.crypto.Cryptography; import com.swirlds.common.crypto.Hash; import com.swirlds.common.system.InitTrigger; +import com.swirlds.common.system.NodeId; import com.swirlds.common.system.Platform; import dagger.BindsInstance; import dagger.Component; -import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.charset.Charset; +import java.time.InstantSource; +import java.util.function.Supplier; import javax.inject.Provider; import javax.inject.Singleton; @@ -76,10 +80,9 @@ InfoInjectionModule.class, ThrottleInjectionModule.class, SolvencyInjectionModule.class, - ConfigModule.class, PlatformModule.class }) -public interface HederaInjectionComponent extends ServicesApp { +public interface HederaInjectionComponent { /* Needed by ServicesState */ Provider queryComponentFactory(); @@ -87,14 +90,26 @@ public interface HederaInjectionComponent extends ServicesApp { WorkingStateAccessor workingStateAccessor(); - AdaptedMonoEventExpansion adaptedMonoEventExpansion(); - - NonAtomicReference mutableState(); - RecordCache recordCache(); GrpcServerManager grpcServerManager(); + NodeId nodeId(); + + Supplier nativeCharset(); + + SystemExits systemExits(); + + NamedDigestFactory digestFactory(); + + NetworkInfo networkInfo(); + + LedgerValidator ledgerValidator(); + + PreHandleWorkflow preHandleWorkflow(); + + HandleWorkflow handleWorkflow(); + @Component.Builder interface Builder { @BindsInstance @@ -110,10 +125,7 @@ interface Builder { Builder initialHash(Hash initialHash); @BindsInstance - Builder platform(@NonNull Platform platform); - - @BindsInstance - Builder consoleCreator(StateModule.ConsoleCreator consoleCreator); + Builder platform(Platform platform); @BindsInstance Builder selfId(@NodeSelfId final AccountID selfId); @@ -124,15 +136,17 @@ interface Builder { @BindsInstance Builder bootstrapProps(@BootstrapProps PropertySource bootstrapProps); + @BindsInstance + Builder configuration(ConfigProvider configProvider); + @BindsInstance Builder maxSignedTxnSize(@MaxSignedTxnSize final int maxSignedTxnSize); - /** - * @deprecated we need to define the correct workflow to define that genesis is used - */ - @Deprecated @BindsInstance - Builder genesisUsage(@GenesisUsage final boolean genesisUsage); + Builder currentPlatformStatus(CurrentPlatformStatus currentPlatformStatus); + + @BindsInstance + Builder instantSource(InstantSource instantSource); HederaInjectionComponent build(); } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/internal/SettingsCommon.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Profile.java similarity index 61% rename from platform-sdk/swirlds-common/src/main/java/com/swirlds/common/internal/SettingsCommon.java rename to hedera-node/hedera-app/src/main/java/com/hedera/node/app/Profile.java index 409c01d1070a..a4f217b25f7d 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/internal/SettingsCommon.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Profile.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016-2023 Hedera Hashgraph, LLC + * Copyright (C) 2023 Hedera Hashgraph, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,14 +14,13 @@ * limitations under the License. */ -package com.swirlds.common.internal; +package com.hedera.node.app; /** - * @deprecated this is not a good access pattern, don't add to this mess by increasing the places where its used + * Defines the mode in which the consensus node is executed. */ -@Deprecated -public class SettingsCommon { - // used by AbstractStatistics - public static boolean showInternalStats; - public static boolean verboseStatistics; +public enum Profile { + DEV, + TEST, + PROD } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/ServicesMain.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/ServicesMain.java index 2f58525def65..4cde018ce3d5 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/ServicesMain.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/ServicesMain.java @@ -16,9 +16,8 @@ package com.hedera.node.app; -import static com.hedera.node.app.service.mono.context.properties.PropertyNames.WORKFLOWS_ENABLED; - -import com.hedera.node.app.service.mono.context.properties.BootstrapProperties; +import com.hedera.node.app.config.ConfigProviderImpl; +import com.hedera.node.config.data.HederaConfig; import com.swirlds.common.constructable.ConstructableRegistry; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.Platform; @@ -51,14 +50,14 @@ public class ServicesMain implements SwirldMain { /** Create a new instance */ public ServicesMain() { - final var bootstrapProps = new BootstrapProperties(false); - final var enabledWorkflows = bootstrapProps.getFunctionsProperty(WORKFLOWS_ENABLED); - if (enabledWorkflows.isEmpty()) { + final var configProvider = new ConfigProviderImpl(false); + final var hederaConfig = configProvider.getConfiguration().getConfigData(HederaConfig.class); + if (hederaConfig.workflowsEnabled().isEmpty()) { logger.info("No workflows enabled, using mono-service"); delegate = new MonoServicesMain(); } else { logger.info("One or more workflows enabled, using Hedera"); - delegate = new Hedera(ConstructableRegistry.getInstance(), bootstrapProps); + delegate = new Hedera(ConstructableRegistry.getInstance()); } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/authorization/Authorizer.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/authorization/Authorizer.java index 0ba824928689..21e5f9198918 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/authorization/Authorizer.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/authorization/Authorizer.java @@ -32,4 +32,13 @@ public interface Authorizer { * @return true if the account is authorized, otherwise false. */ boolean isAuthorized(@NonNull AccountID id, @NonNull HederaFunctionality function); + + /** + * Checks whether the given account refers to a superuser. If the {@link AccountID} does not contain an account + * number (for example, because it uses an alias), then this method will return false. + * + * @param id The ID of the account to check + * @return Whether the ID definitively refers to a super-user + */ + boolean isSuperUser(@NonNull AccountID id); } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/authorization/AuthorizerImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/authorization/AuthorizerImpl.java index b0f1cb5036f3..c158ff5347e6 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/authorization/AuthorizerImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/authorization/AuthorizerImpl.java @@ -16,27 +16,33 @@ package com.hedera.node.app.authorization; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.HederaFunctionality; -import com.hedera.node.app.service.mono.context.domain.security.HapiOpPermissions; +import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.AccountsConfig; +import com.hedera.node.config.data.ApiPermissionConfig; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Objects; import javax.inject.Inject; import javax.inject.Singleton; /** - * An implementation of {@link Authorizer} based on the existing mono-service {@link HapiOpPermissions} facility. + * An implementation of {@link Authorizer}. */ @Singleton public class AuthorizerImpl implements Authorizer { - private final HapiOpPermissions hapiOpPermissions; + private final ConfigProvider configProvider; + private final AccountsConfig accountsConfig; @Inject - public AuthorizerImpl(@NonNull final HapiOpPermissions hapiOpPermissions) { - this.hapiOpPermissions = requireNonNull(hapiOpPermissions); + public AuthorizerImpl(@NonNull final ConfigProvider configProvider) { + this.configProvider = requireNonNull(configProvider); + this.accountsConfig = configProvider.getConfiguration().getConfigData(AccountsConfig.class); } /** {@inheritDoc} */ @@ -44,7 +50,29 @@ public AuthorizerImpl(@NonNull final HapiOpPermissions hapiOpPermissions) { public boolean isAuthorized(@NonNull final AccountID id, @NonNull final HederaFunctionality function) { Objects.requireNonNull(id); Objects.requireNonNull(function); - final var permissionStatus = hapiOpPermissions.permissibilityOf2(function, id); - return permissionStatus == OK; + return permissibilityOf(id, function) == OK; + } + + @Override + public boolean isSuperUser(@NonNull final AccountID accountID) { + if (!accountID.hasAccountNum()) return false; + long num = accountID.accountNumOrThrow(); + return num == accountsConfig.treasury() || num == accountsConfig.systemAdmin(); + } + + private ResponseCodeEnum permissibilityOf( + @NonNull final AccountID givenPayer, @NonNull final HederaFunctionality function) { + if (isSuperUser(givenPayer)) { + return ResponseCodeEnum.OK; + } + + if (!givenPayer.hasAccountNum()) { + return ResponseCodeEnum.AUTHORIZATION_FAILED; + } + + final long num = givenPayer.accountNumOrThrow(); + final var permissionConfig = configProvider.getConfiguration().getConfigData(ApiPermissionConfig.class); + final var permission = permissionConfig.getPermission(function); + return permission != null && permission.contains(num) ? OK : NOT_SUPPORTED; } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/authorization/AuthorizerInjectionModule.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/authorization/AuthorizerInjectionModule.java index eb463fa7d7e7..a7c5328d74d3 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/authorization/AuthorizerInjectionModule.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/authorization/AuthorizerInjectionModule.java @@ -16,18 +16,15 @@ package com.hedera.node.app.authorization; -import com.hedera.node.app.service.mono.context.domain.security.HapiOpPermissions; +import dagger.Binds; import dagger.Module; -import dagger.Provides; import edu.umd.cs.findbugs.annotations.NonNull; import javax.inject.Singleton; /** A Dagger module for providing dependencies based on {@link Authorizer}. */ @Module public interface AuthorizerInjectionModule { - @Provides + @Binds @Singleton - static Authorizer provideAuthorizer(@NonNull final HapiOpPermissions permissions) { - return new AuthorizerImpl(permissions); - } + Authorizer provideAuthorizer(@NonNull final AuthorizerImpl impl); } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/config/ConfigProviderImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/config/ConfigProviderImpl.java index 81f178dc8c10..ba834be2b19f 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/config/ConfigProviderImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/config/ConfigProviderImpl.java @@ -33,7 +33,6 @@ import com.hedera.node.config.converter.LegacyContractIdActivationsConverter; import com.hedera.node.config.converter.MapAccessTypeConverter; import com.hedera.node.config.converter.PermissionedAccountsRangeConverter; -import com.hedera.node.config.converter.ProfileConverter; import com.hedera.node.config.converter.RecomputeTypeConverter; import com.hedera.node.config.converter.ScaleFactorConverter; import com.hedera.node.config.converter.SemanticVersionConverter; @@ -76,12 +75,13 @@ import com.hedera.pbj.runtime.io.buffer.Bytes; import com.swirlds.common.config.ConsensusConfig; import com.swirlds.common.config.sources.PropertyFileConfigSource; +import com.swirlds.common.config.sources.SystemEnvironmentConfigSource; +import com.swirlds.common.config.sources.SystemPropertiesConfigSource; import com.swirlds.common.threading.locks.AutoClosableLock; import com.swirlds.common.threading.locks.Locks; import com.swirlds.config.api.Configuration; import com.swirlds.config.api.ConfigurationBuilder; import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; import java.io.ByteArrayInputStream; import java.io.IOException; import java.nio.file.Path; @@ -89,7 +89,6 @@ import java.util.Properties; import java.util.concurrent.atomic.AtomicReference; import java.util.function.ObjIntConsumer; -import javax.inject.Inject; import javax.inject.Singleton; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -130,9 +129,7 @@ public class ConfigProviderImpl implements ConfigProvider { * Create a new instance. You must specify whether to use the genesis.properties file as a source for the * configuration. This should only be true if the node is starting from genesis. */ - @Inject - public ConfigProviderImpl(@GenesisUsage @Nullable final Boolean useGenesisSource) { - requireNonNull(useGenesisSource); + public ConfigProviderImpl(final boolean useGenesisSource) { final var builder = createConfigurationBuilder(); addFileSources(builder, useGenesisSource); final Configuration config = builder.build(); @@ -163,6 +160,8 @@ public void update(@NonNull final Bytes propertyFileContent) { private ConfigurationBuilder createConfigurationBuilder() { return ConfigurationBuilder.create() + .withSource(SystemEnvironmentConfigSource.getInstance()) + .withSource(SystemPropertiesConfigSource.getInstance()) .withSource(new PropertyConfigSource(SEMANTIC_VERSION_PROPERTIES_DEFAULT_PATH, 500)) .withConfigDataType(AccountsConfig.class) .withConfigDataType(AutoCreationConfig.class) @@ -211,7 +210,6 @@ private ConfigurationBuilder createConfigurationBuilder() { .withConverter(new FileIDConverter()) .withConverter(new HederaFunctionalityConverter()) .withConverter(new PermissionedAccountsRangeConverter()) - .withConverter(new ProfileConverter()) .withConverter(new SidecarTypeConverter()) .withConverter(new SemanticVersionConverter()) .withConverter(new KeyValuePairConverter()) diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/AdaptedMonoFeeCalculator.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/AdaptedMonoFeeCalculator.java deleted file mode 100644 index f93334d9c3a0..000000000000 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/AdaptedMonoFeeCalculator.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.fees; - -import static com.hedera.node.app.service.mono.pbj.PbjConverter.toPbj; -import static com.hederahashgraph.api.proto.java.HederaFunctionality.ConsensusUpdateTopic; - -import com.hedera.node.app.hapi.utils.exception.InvalidTxBodyException; -import com.hedera.node.app.hapi.utils.fee.FeeObject; -import com.hedera.node.app.service.consensus.ReadableTopicStore; -import com.hedera.node.app.service.mono.context.primitives.StateView; -import com.hedera.node.app.service.mono.fees.FeeCalculator; -import com.hedera.node.app.service.mono.fees.HbarCentExchange; -import com.hedera.node.app.service.mono.fees.calculation.RenewAssessment; -import com.hedera.node.app.service.mono.fees.calculation.UsageBasedFeeCalculator; -import com.hedera.node.app.service.mono.fees.calculation.UsagePricesProvider; -import com.hedera.node.app.service.mono.fees.calculation.consensus.txns.UpdateTopicResourceUsage; -import com.hedera.node.app.service.mono.legacy.core.jproto.JKey; -import com.hedera.node.app.service.mono.state.migration.HederaAccount; -import com.hedera.node.app.service.mono.utils.accessors.TxnAccessor; -import com.hedera.node.app.state.HederaState; -import com.hedera.node.app.state.WorkingStateAccessor; -import com.hedera.node.app.workflows.dispatcher.ReadableStoreFactory; -import com.hederahashgraph.api.proto.java.ExchangeRate; -import com.hederahashgraph.api.proto.java.FeeData; -import com.hederahashgraph.api.proto.java.HederaFunctionality; -import com.hederahashgraph.api.proto.java.Query; -import com.hederahashgraph.api.proto.java.ResponseType; -import com.hederahashgraph.api.proto.java.SubType; -import com.hederahashgraph.api.proto.java.Timestamp; -import com.swirlds.common.utility.AutoCloseableWrapper; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.time.Instant; -import java.util.Map; -import java.util.function.Supplier; -import javax.inject.Inject; -import javax.inject.Singleton; - -@Singleton -public class AdaptedMonoFeeCalculator implements FeeCalculator { - private final HbarCentExchange exchange; - private final UsagePricesProvider usagePrices; - private final UsageBasedFeeCalculator monoFeeCalculator; - private final WorkingStateAccessor workingStateAccessor; - private final UpdateTopicResourceUsage monoUpdateTopicUsage; - private final Supplier> stateAccessor; - - @Inject - public AdaptedMonoFeeCalculator( - @NonNull final HbarCentExchange exchange, - @NonNull final UsagePricesProvider usagePrices, - @NonNull final UsageBasedFeeCalculator monoFeeCalculator, - @NonNull final WorkingStateAccessor workingStateAccessor, - @NonNull final UpdateTopicResourceUsage monoUpdateTopicUsage, - @NonNull final Supplier> stateAccessor) { - this.exchange = exchange; - this.usagePrices = usagePrices; - this.monoFeeCalculator = monoFeeCalculator; - this.workingStateAccessor = workingStateAccessor; - this.monoUpdateTopicUsage = monoUpdateTopicUsage; - this.stateAccessor = stateAccessor; - } - - @Override - public void init() { - monoFeeCalculator.init(); - } - - @Override - public long estimatedGasPriceInTinybars(final HederaFunctionality function, final Timestamp at) { - return monoFeeCalculator.estimatedGasPriceInTinybars(function, at); - } - - @Override - public long estimatedNonFeePayerAdjustments(final TxnAccessor accessor, final Timestamp at) { - return monoFeeCalculator.estimatedNonFeePayerAdjustments(accessor, at); - } - - @Override - public FeeObject computeFee( - @NonNull final TxnAccessor accessor, - @NonNull final JKey payerKey, - @NonNull final StateView view, - @NonNull final Instant now) { - if (accessor.getFunction() == ConsensusUpdateTopic) { - final var workingState = workingStateAccessor.getHederaState(); - return topicUpdateFeeGiven( - accessor, payerKey, workingState, usagePrices.activePrices(accessor), exchange.activeRate(now)); - } else { - return monoFeeCalculator.computeFee(accessor, payerKey, view, now); - } - } - - @Override - public FeeObject estimateFee( - @NonNull final TxnAccessor accessor, - @NonNull final JKey payerKey, - @NonNull final StateView view, - @NonNull final Timestamp at) { - if (accessor.getFunction() == ConsensusUpdateTopic) { - try (final var immutableState = stateAccessor.get()) { - // TODO - what if this is null? - final var workingState = immutableState.get(); - return topicUpdateFeeGiven( - accessor, - payerKey, - workingState, - monoFeeCalculator.uncheckedPricesGiven(accessor, at), - exchange.rate(at)); - } - } else { - return monoFeeCalculator.estimateFee(accessor, payerKey, view, at); - } - } - - @Override - public FeeObject estimatePayment( - @NonNull final Query query, - @NonNull final FeeData usagePrices, - @NonNull final StateView view, - @NonNull final Timestamp at, - @NonNull final ResponseType type) { - return monoFeeCalculator.estimatePayment(query, usagePrices, view, at, type); - } - - @Override - public FeeObject computePayment( - @NonNull final Query query, - @NonNull final FeeData usagePrices, - @NonNull final StateView view, - @NonNull final Timestamp at, - @NonNull final Map queryCtx) { - return monoFeeCalculator.computePayment(query, usagePrices, view, at, queryCtx); - } - - @Override - public RenewAssessment assessCryptoAutoRenewal( - @NonNull final HederaAccount expiredAccount, - final long requestedRenewal, - @NonNull final Instant now, - @NonNull final HederaAccount payer) { - return monoFeeCalculator.assessCryptoAutoRenewal(expiredAccount, requestedRenewal, now, payer); - } - - private FeeObject topicUpdateFeeGiven( - final TxnAccessor accessor, - final JKey payerKey, - final HederaState state, - final Map prices, - final ExchangeRate rate) { - final var storeFactory = new ReadableStoreFactory(state); - final var topicStore = storeFactory.getStore(ReadableTopicStore.class); - final var topic = topicStore.getTopicLeaf( - toPbj(accessor.getTxn().getConsensusUpdateTopic().getTopicID())); - try { - final var usage = monoUpdateTopicUsage.usageGivenExplicit( - accessor.getTxn(), - monoFeeCalculator.getSigUsage(accessor, payerKey), - topic.map(MonoGetTopicInfoUsage::monoTopicFrom).orElse(null)); - final var typedPrices = prices.get(accessor.getSubType()); - return monoFeeCalculator.feesIncludingCongestion(usage, typedPrices, accessor, rate); - } catch (final InvalidTxBodyException e) { - throw new IllegalStateException(e); - } - } -} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/FeeAccumulatorImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/FeeAccumulatorImpl.java new file mode 100644 index 000000000000..3269f8a4b83c --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/FeeAccumulatorImpl.java @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.fees; + +import com.hedera.hapi.node.base.HederaFunctionality; +import com.hedera.hapi.node.base.Timestamp; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.node.app.hapi.utils.fee.FeeObject; +import com.hedera.node.app.workflows.dispatcher.ReadableStoreFactory; +import edu.umd.cs.findbugs.annotations.NonNull; +import javax.inject.Inject; +import javax.inject.Singleton; + +/** + * To be implemented: An implementation of {@link FeeAccumulator}. + */ +@Singleton +public class FeeAccumulatorImpl implements FeeAccumulator { + + @Inject + public FeeAccumulatorImpl() { + // For dagger + } + + @NonNull + @Override + public FeeObject computePayment( + @NonNull ReadableStoreFactory readableStoreFactory, + @NonNull HederaFunctionality functionality, + @NonNull Query query, + @NonNull Timestamp now) { + return new FeeObject(0, 0, 0); + } +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/FeesInjectionModule.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/FeesInjectionModule.java index 055ba0129efb..a2a4d178bcf6 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/FeesInjectionModule.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/FeesInjectionModule.java @@ -16,7 +16,6 @@ package com.hedera.node.app.fees; -import com.hedera.node.app.service.mono.fees.FeeCalculator; import dagger.Binds; import dagger.Module; import javax.inject.Singleton; @@ -25,9 +24,9 @@ public interface FeesInjectionModule { @Binds @Singleton - FeeCalculator bindFeeCalculator(AdaptedMonoFeeCalculator adaptedMonoFeeCalculator); + FeeAccumulator bindFeeAccumulator(FeeAccumulatorImpl feeAccumulator); @Binds @Singleton - QueryFeeCheck bindQueryFeeCheck(MonoQueryFeeCheck monoQueryFeeCheck); + QueryFeeCheck bindQueryFeeCheck(QueryFeeCheckImpl queryFeeCheck); } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/MonoFeeAccumulator.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/MonoFeeAccumulator.java deleted file mode 100644 index e5c9abbcade1..000000000000 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/MonoFeeAccumulator.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.fees; - -import com.hedera.hapi.node.base.HederaFunctionality; -import com.hedera.hapi.node.base.Timestamp; -import com.hedera.hapi.node.transaction.Query; -import com.hedera.node.app.hapi.utils.fee.FeeObject; -import com.hedera.node.app.service.consensus.ReadableTopicStore; -import com.hedera.node.app.service.mono.context.primitives.StateView; -import com.hedera.node.app.service.mono.fees.calculation.UsageBasedFeeCalculator; -import com.hedera.node.app.service.mono.fees.calculation.UsagePricesProvider; -import com.hedera.node.app.service.mono.pbj.PbjConverter; -import com.hedera.node.app.workflows.dispatcher.ReadableStoreFactory; -import com.hedera.node.app.workflows.query.QueryWorkflow; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.HashMap; -import java.util.function.Supplier; -import javax.inject.Inject; -import javax.inject.Singleton; - -/** - * Adapter for {@link UsageBasedFeeCalculator} to be used in {@link QueryWorkflow}. This class is - * currently calling mono-service code and will be replaced with a new implementation as per design. - */ -@Singleton -public class MonoFeeAccumulator implements FeeAccumulator { - private final UsageBasedFeeCalculator feeCalculator; - private final MonoGetTopicInfoUsage getTopicInfoUsage; - private final UsagePricesProvider resourceCosts; - private final Supplier stateView; - - @Inject - public MonoFeeAccumulator( - final UsageBasedFeeCalculator feeCalculator, - final MonoGetTopicInfoUsage getTopicInfoUsage, - final UsagePricesProvider resourceCosts, - final Supplier stateView) { - this.feeCalculator = feeCalculator; - this.getTopicInfoUsage = getTopicInfoUsage; - this.resourceCosts = resourceCosts; - this.stateView = stateView; - } - - /** - * {@inheritDoc} - */ - @Override - @NonNull - public FeeObject computePayment( - @NonNull final ReadableStoreFactory readableStoreFactory, - @NonNull final HederaFunctionality functionality, - @NonNull final Query query, - @NonNull final Timestamp now) { - final var monoFunctionality = PbjConverter.fromPbj(functionality); - final var monoQuery = PbjConverter.fromPbj(query); - final var monoNow = PbjConverter.fromPbj(now); - final var usagePrices = resourceCosts.defaultPricesGiven(monoFunctionality, monoNow); - // Special case here because when running with workflows enabled, the underlying - // states will have PBJ Topic's as keys, not MerkleTopic's; so the mono-service - // resource estimator would hit a ClassCastException - if (functionality == HederaFunctionality.CONSENSUS_GET_TOPIC_INFO) { - final var topicStore = readableStoreFactory.getStore(ReadableTopicStore.class); - final var usage = getTopicInfoUsage.computeUsage(monoQuery, topicStore); - return feeCalculator.computeFromQueryResourceUsage(usage, usagePrices, monoNow); - } - return feeCalculator.computePayment(monoQuery, usagePrices, stateView.get(), monoNow, new HashMap<>()); - } -} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/MonoGetTopicInfoUsage.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/MonoGetTopicInfoUsage.java deleted file mode 100644 index 1948fb965a06..000000000000 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/MonoGetTopicInfoUsage.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.fees; - -import static com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbjKey; -import static com.hedera.node.app.service.mono.pbj.PbjConverter.toPbjTopicId; - -import com.hedera.hapi.node.base.Key; -import com.hedera.hapi.node.state.consensus.Topic; -import com.hedera.node.app.service.consensus.ReadableTopicStore; -import com.hedera.node.app.service.mono.fees.calculation.consensus.queries.GetTopicInfoResourceUsage; -import com.hedera.node.app.service.mono.legacy.core.jproto.JKey; -import com.hedera.node.app.service.mono.pbj.PbjConverter; -import com.hedera.node.app.service.mono.state.merkle.MerkleTopic; -import com.hedera.node.app.service.mono.state.submerkle.EntityId; -import com.hedera.node.app.service.mono.state.submerkle.RichInstant; -import com.hedera.node.app.workflows.query.QueryWorkflow; -import com.hederahashgraph.api.proto.java.ConsensusGetTopicInfoQuery; -import com.hederahashgraph.api.proto.java.FeeData; -import com.hederahashgraph.api.proto.java.Query; -import edu.umd.cs.findbugs.annotations.NonNull; -import javax.inject.Inject; -import javax.inject.Singleton; - -/** - * A thin adapter for {@link GetTopicInfoResourceUsage} to be used in {@link QueryWorkflow}. - * It simply looks up the requested topic in the given {@link ReadableTopicStore}, converts - * it to a {@link MerkleTopic}, and delegates to the {@link GetTopicInfoResourceUsage} to - * compute the resource usage. - */ -@Singleton -public class MonoGetTopicInfoUsage { - private final GetTopicInfoResourceUsage delegate; - - @Inject - public MonoGetTopicInfoUsage(final GetTopicInfoResourceUsage delegate) { - this.delegate = delegate; - } - - /** - * Computes the resource usage for a the {@link ConsensusGetTopicInfoQuery} in the - * given top-level {@link Query}, based on the contents of the given - * {@link ReadableTopicStore} and the requested response type in the query header. - * - * @param query the top-level query - * @param topicStore the topic store - * @return the resource usage of the contained topic info query - */ - public FeeData computeUsage(final Query query, final ReadableTopicStore topicStore) { - final var topicInfoQuery = query.getConsensusGetTopicInfo(); - final var topicId = topicInfoQuery.getTopicID(); - final var responseType = topicInfoQuery.getHeader().getResponseType(); - final var maybeTopic = topicStore.getTopicLeaf(toPbjTopicId(topicId)); - return delegate.usageGivenTypeAndTopic( - maybeTopic.map(MonoGetTopicInfoUsage::monoTopicFrom).orElse(null), responseType); - } - - /** - * Converts a PBJ {@link Topic} to a {@link MerkleTopic} for use with the - * {@link GetTopicInfoResourceUsage} delegate. - * - * @param topic the PBJ topic - * @return the Merkle topic - */ - public static MerkleTopic monoTopicFrom(@NonNull final Topic topic) { - final MerkleTopic monoTopic = new MerkleTopic( - topic.memo(), - (JKey) fromPbjKey(topic.adminKeyOrElse(Key.DEFAULT)).orElse(null), - (JKey) fromPbjKey(topic.submitKeyOrElse(Key.DEFAULT)).orElse(null), - topic.autoRenewPeriod(), - new EntityId(0, 0, topic.autoRenewAccountNumber()), - new RichInstant(topic.expiry(), 0)); - monoTopic.setRunningHash(PbjConverter.asBytes(topic.runningHash())); - monoTopic.setSequenceNumber(topic.sequenceNumber()); - monoTopic.setDeleted(topic.deleted()); - return monoTopic; - } -} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/MonoQueryFeeCheck.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/MonoQueryFeeCheck.java deleted file mode 100644 index 88d621f109fd..000000000000 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/MonoQueryFeeCheck.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.fees; - -import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; -import static java.util.Objects.requireNonNull; - -import com.hedera.hapi.node.base.AccountAmount; -import com.hedera.hapi.node.base.AccountID; -import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.mono.pbj.PbjConverter; -import com.hedera.node.app.spi.workflows.InsufficientBalanceException; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.List; -import javax.inject.Inject; - -/** - * Implementation of {@link QueryFeeCheck} that is based on the mono-service implementation. - */ -public class MonoQueryFeeCheck implements QueryFeeCheck { - - private final com.hedera.node.app.service.mono.queries.validation.QueryFeeCheck delegate; - - @Inject - public MonoQueryFeeCheck(com.hedera.node.app.service.mono.queries.validation.QueryFeeCheck delegate) { - this.delegate = requireNonNull(delegate, "The supplied argument 'delegate' cannot be null!"); - } - - @Override - public void validateQueryPaymentTransfers(@NonNull final TransactionBody txBody, long queryFee) - throws InsufficientBalanceException { - requireNonNull(txBody, "The supplied argument 'txBody' cannot be null!"); - final var monoTxBody = PbjConverter.fromPbj(txBody); - final var monoResult = delegate.validateQueryPaymentTransfers(monoTxBody); - final var result = PbjConverter.toPbj(monoResult); - if (result != OK) { - throw new InsufficientBalanceException(result, queryFee); - } - } - - @Override - public void nodePaymentValidity( - @NonNull final List transfers, long queryFee, @NonNull final AccountID node) - throws InsufficientBalanceException { - requireNonNull(transfers, "The supplied argument 'transfers' cannot be null!"); - requireNonNull(node, "The supplied argument 'node' cannot be null!"); - final var monoNode = PbjConverter.fromPbj(node); - final var monoTransfers = transfers.stream().map(PbjConverter::fromPbj).toList(); - final var monoResult = delegate.nodePaymentValidity(monoTransfers, queryFee, monoNode); - final var result = PbjConverter.toPbj(monoResult); - if (result != OK) { - throw new InsufficientBalanceException(result, queryFee); - } - } -} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/QueryFeeCheckImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/QueryFeeCheckImpl.java new file mode 100644 index 000000000000..2cf777d7ee2f --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/fees/QueryFeeCheckImpl.java @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.fees; + +import com.hedera.hapi.node.base.AccountAmount; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.spi.workflows.InsufficientBalanceException; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; +import javax.inject.Inject; +import javax.inject.Singleton; + +/** + * A modular implementation of the {@link QueryFeeCheck}. TBD. + */ +@Singleton +public class QueryFeeCheckImpl implements QueryFeeCheck { + + @Inject + public QueryFeeCheckImpl() { + // For dagger + } + + @Override + public void validateQueryPaymentTransfers(@NonNull TransactionBody txBody, long queryFee) + throws InsufficientBalanceException {} + + @Override + public void nodePaymentValidity(@NonNull List transfers, long queryFee, @NonNull AccountID node) + throws InsufficientBalanceException {} +} diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/CurrentPlatformStatus.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/CurrentPlatformStatus.java similarity index 96% rename from hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/CurrentPlatformStatus.java rename to hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/CurrentPlatformStatus.java index 849a0deedc74..35e8fd9276d7 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/CurrentPlatformStatus.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/CurrentPlatformStatus.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.hedera.node.app.spi.info; +package com.hedera.node.app.info; import com.swirlds.common.system.status.PlatformStatus; import edu.umd.cs.findbugs.annotations.NonNull; diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/MonoCurrentPlatformStatus.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/CurrentPlatformStatusImpl.java similarity index 50% rename from hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/MonoCurrentPlatformStatus.java rename to hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/CurrentPlatformStatusImpl.java index 2ae5de3f4e05..c0fbaad77cd8 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/MonoCurrentPlatformStatus.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/CurrentPlatformStatusImpl.java @@ -16,33 +16,27 @@ package com.hedera.node.app.info; -import static java.util.Objects.requireNonNull; - -import com.hedera.node.app.spi.info.CurrentPlatformStatus; +import com.swirlds.common.notification.listeners.PlatformStatusChangeListener; +import com.swirlds.common.system.Platform; import com.swirlds.common.system.status.PlatformStatus; import edu.umd.cs.findbugs.annotations.NonNull; +import javax.inject.Singleton; /** - * Implementation of {@link CurrentPlatformStatus} that delegates to the mono-service. + * An implementation of {@link CurrentPlatformStatus} that uses the {@link Platform} to get the current status. */ -public class MonoCurrentPlatformStatus implements CurrentPlatformStatus { - - private final com.hedera.node.app.service.mono.context.CurrentPlatformStatus delegate; +@Singleton +public class CurrentPlatformStatusImpl implements CurrentPlatformStatus { + private PlatformStatus status = PlatformStatus.STARTING_UP; - /** - * Constructs a {@link MonoCurrentPlatformStatus} with the given delegate. - * - * @param delegate the delegate - * @throws NullPointerException if {@code delegate} is {@code null} - */ - public MonoCurrentPlatformStatus(@NonNull com.hedera.node.app.service.mono.context.CurrentPlatformStatus delegate) { - this.delegate = requireNonNull(delegate); + public CurrentPlatformStatusImpl(@NonNull final Platform platform) { + platform.getNotificationEngine() + .register(PlatformStatusChangeListener.class, notification -> status = notification.getNewStatus()); } - @Override @NonNull + @Override public PlatformStatus get() { - final var status = delegate.get(); - return status != null ? status : PlatformStatus.STARTING_UP; + return status; } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/InfoInjectionModule.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/InfoInjectionModule.java index 9c678d609e31..931eb13662c0 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/InfoInjectionModule.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/InfoInjectionModule.java @@ -16,34 +16,16 @@ package com.hedera.node.app.info; -import com.hedera.node.app.spi.info.CurrentPlatformStatus; import com.hedera.node.app.spi.info.NetworkInfo; -import com.hedera.node.app.spi.info.NodeInfo; +import dagger.Binds; import dagger.Module; -import dagger.Provides; import edu.umd.cs.findbugs.annotations.NonNull; import javax.inject.Singleton; /** A Dagger module for facilities in the {@link com.hedera.node.app.info} package. */ @Module public interface InfoInjectionModule { - - @Provides - @Singleton - static CurrentPlatformStatus provideCurrentPlatformStatus( - @NonNull final com.hedera.node.app.service.mono.context.CurrentPlatformStatus delegate) { - return new MonoCurrentPlatformStatus(delegate); - } - - @Provides - @Singleton - static NetworkInfo provideNetworkInfo(@NonNull final com.hedera.node.app.service.mono.config.NetworkInfo delegate) { - return new MonoNetworkInfo(delegate); - } - - @Provides + @Binds @Singleton - static NodeInfo provideNodeInfo(@NonNull final com.hedera.node.app.service.mono.context.NodeInfo delegate) { - return new MonoNodeInfo(delegate); - } + NetworkInfo provideNetworkInfo(@NonNull final NetworkInfoImpl impl); } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/MonoNetworkInfo.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/MonoNetworkInfo.java deleted file mode 100644 index 5b63b8a597fd..000000000000 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/MonoNetworkInfo.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.info; - -import static java.util.Objects.requireNonNull; - -import com.hedera.node.app.spi.info.NetworkInfo; -import com.hedera.pbj.runtime.io.buffer.Bytes; -import edu.umd.cs.findbugs.annotations.NonNull; - -/** - * Implementation of {@link NetworkInfo} that delegates to the mono-service. - */ -public class MonoNetworkInfo implements NetworkInfo { - - private final com.hedera.node.app.service.mono.config.NetworkInfo delegate; - - /** - * Constructs a {@link MonoNetworkInfo} with the given delegate. - * - * @param delegate the delegate - * @throws NullPointerException if {@code delegate} is {@code null} - */ - public MonoNetworkInfo(@NonNull com.hedera.node.app.service.mono.config.NetworkInfo delegate) { - this.delegate = requireNonNull(delegate); - } - - @Override - @NonNull - public Bytes ledgerId() { - final var ledgerId = delegate.ledgerId(); - return ledgerId != null ? Bytes.wrap(ledgerId.toByteArray()) : Bytes.EMPTY; - } -} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/MonoNodeInfo.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/MonoNodeInfo.java deleted file mode 100644 index fbb8c987b3b8..000000000000 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/MonoNodeInfo.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.info; - -import static java.util.Objects.requireNonNull; - -import com.hedera.hapi.node.base.AccountID; -import com.hedera.node.app.service.mono.pbj.PbjConverter; -import com.hedera.node.app.spi.info.NodeInfo; -import edu.umd.cs.findbugs.annotations.NonNull; - -/** - * Implementation of {@link NodeInfo} that delegates to the mono-service. - */ -public class MonoNodeInfo implements NodeInfo { - - private final com.hedera.node.app.service.mono.context.NodeInfo delegate; - - /** - * Constructs a {@link MonoNodeInfo} with the given delegate. - * - * @param delegate the delegate - * @throws NullPointerException if {@code delegate} is {@code null} - */ - public MonoNodeInfo(@NonNull com.hedera.node.app.service.mono.context.NodeInfo delegate) { - this.delegate = requireNonNull(delegate); - } - - @Override - public boolean isSelfZeroStake() { - return delegate.isSelfZeroStake(); - } - - @Override - public AccountID accountOf(final long nodeId) { - return PbjConverter.toPbj(delegate.accountOf(nodeId)); - } -} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/NetworkInfoImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/NetworkInfoImpl.java new file mode 100644 index 000000000000..05917394f64d --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/NetworkInfoImpl.java @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.info; + +import static java.util.Objects.requireNonNull; + +import com.hedera.node.app.spi.info.NetworkInfo; +import com.hedera.node.app.spi.info.NodeInfo; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.LedgerConfig; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.Platform; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; +import javax.inject.Inject; +import javax.inject.Singleton; + +@Singleton +public class NetworkInfoImpl implements NetworkInfo { + private final Bytes ledgerId; + private final NodeId selfId; + private final Platform platform; + + @Inject + public NetworkInfoImpl( + @NonNull final NodeId selfNodeId, + @NonNull final Platform platform, + @NonNull final ConfigProvider configProvider) { + // Load the ledger ID from configuration + final var config = configProvider.getConfiguration(); + final var ledgerConfig = config.getConfigData(LedgerConfig.class); + ledgerId = ledgerConfig.id(); + + // Save the platform for looking up the address book later + this.platform = requireNonNull(platform); + + // The node ID of **this** node within the address book + this.selfId = requireNonNull(selfNodeId); + if (platform.getAddressBook().getAddress(selfNodeId) == null) { + throw new IllegalArgumentException("Node ID " + this.selfId + " is not in the address book"); + } + } + + @NonNull + @Override + public Bytes ledgerId() { + return ledgerId; + } + + @NonNull + @Override + public NodeInfo selfNodeInfo() { + final var self = nodeInfo(selfId); + if (self == null) throw new IllegalStateException("Self Node ID " + selfId + " is not in the address book!!"); + return self; + } + + @NonNull + @Override + public List addressBook() { + final var platformAddressBook = platform.getAddressBook(); + return StreamSupport.stream(platformAddressBook.spliterator(), false) + .map(NodeInfoImpl::fromAddress) + .collect(Collectors.toList()); + } + + @Nullable + @Override + public NodeInfo nodeInfo(long nodeId) { + return nodeInfo(new NodeId(nodeId)); + } + + @Nullable + private NodeInfo nodeInfo(@NonNull final NodeId nodeId) { + final var platformAddressBook = platform.getAddressBook(); + if (platformAddressBook == null) return null; + + final var address = platformAddressBook.getAddress(nodeId); + return address == null ? null : NodeInfoImpl.fromAddress(address); + } +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/NodeInfoImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/NodeInfoImpl.java new file mode 100644 index 000000000000..7b2a59729702 --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/info/NodeInfoImpl.java @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.info; + +import static com.hedera.node.app.spi.HapiUtils.parseAccount; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.node.app.spi.info.NodeInfo; +import com.swirlds.common.system.address.Address; +import edu.umd.cs.findbugs.annotations.NonNull; + +public record NodeInfoImpl(AccountID accountId, boolean zeroStake) implements NodeInfo { + @NonNull + static NodeInfo fromAddress(@NonNull final Address address) { + return new NodeInfoImpl(parseAccount(address.getMemo()), address.getWeight() <= 0); + } +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/meta/MonoHandleContext.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/meta/MonoHandleContext.java index 03d50be54c5c..f3006d88ebf7 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/meta/MonoHandleContext.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/meta/MonoHandleContext.java @@ -16,6 +16,7 @@ package com.hedera.node.app.meta; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.records.SingleTransactionRecordBuilder; @@ -86,6 +87,12 @@ public TransactionBody body() { return txBody; } + @NonNull + @Override + public AccountID payer() { + throw new UnsupportedOperationException("Not implemented yet"); + } + /** * {@inheritDoc} */ @@ -95,6 +102,12 @@ public Configuration configuration() { throw new UnsupportedOperationException("Not implemented yet"); } + @Nullable + @Override + public Key payerKey() { + return null; + } + /** * {@inheritDoc} */ @@ -125,13 +138,13 @@ public ExpiryValidator expiryValidator() { * {@inheritDoc} */ @Override - @Nullable + @NonNull public SignatureVerification verificationFor(@NonNull Key key) { throw new UnsupportedOperationException("Not implemented yet"); } @Override - @Nullable + @NonNull public SignatureVerification verificationFor(@NonNull Bytes evmAlias) { throw new UnsupportedOperationException("Not yet implemented"); } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/RecordManager.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/RecordManager.java index f102452e78a0..aa57bec664e4 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/RecordManager.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/RecordManager.java @@ -20,6 +20,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; import java.util.stream.Stream; +import javax.inject.Inject; import javax.inject.Singleton; /** @@ -34,6 +35,10 @@ */ @Singleton public class RecordManager { + + @Inject + public RecordManager() {} + /** * Inform BlockRecordManager of the new consensus time at the beginning of new transaction. This should only be called for before user * transactions where the workflow knows 100% that any there will be no new transaction records for any consensus time prior to this one. diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/SingleTransactionRecordBuilder.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/SingleTransactionRecordBuilder.java index 1ad00671adc0..590f4cd801c8 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/SingleTransactionRecordBuilder.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/SingleTransactionRecordBuilder.java @@ -288,7 +288,9 @@ public OneOf entropy() { return entropy; } - public SingleTransactionRecordBuilder evmAddress(Bytes evmAddress) { + @Override + @NonNull + public SingleTransactionRecordBuilder evmAddress(@NonNull Bytes evmAddress) { this.evmAddress = evmAddress; return this; } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/signature/SignatureExpander.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/signature/SignatureExpander.java index 715cc2fa3b9a..d3f3b27ea482 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/signature/SignatureExpander.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/signature/SignatureExpander.java @@ -65,4 +65,21 @@ public interface SignatureExpander { * @param expanded Will be populated with all created {@link ExpandedSignaturePair}s */ void expand(@NonNull Key key, @NonNull List sigPairs, @NonNull Set expanded); + + /** + * Expands all {@link Key}s within an {@link Iterable}. + * + * @param keys The {@link Iterable} of keys + * @param sigPairs The {@link SignaturePair}s to search for full key prefixes. This list must be pre-filtered such + * that there are no duplicate entries and one prefix is not the prefix of another. + * @param expanded Will be populated with all created {@link ExpandedSignaturePair}s + */ + default void expand( + @NonNull Iterable keys, + @NonNull List sigPairs, + @NonNull Set expanded) { + for (final var key : keys) { + expand(key, sigPairs, expanded); + } + } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/solvency/SolvencyInjectionModule.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/solvency/SolvencyInjectionModule.java index f54375d6dbe8..124fe525f3a9 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/solvency/SolvencyInjectionModule.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/solvency/SolvencyInjectionModule.java @@ -24,5 +24,5 @@ public interface SolvencyInjectionModule { @Binds @Singleton - SolvencyPreCheck bindSolvencyPreCheck(MonoSolvencyPreCheck solvencyPreCheck); + SolvencyPreCheck bindSolvencyPreCheck(SolvencyPreCheckImpl solvencyPreCheck); } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/solvency/SolvencyPreCheckImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/solvency/SolvencyPreCheckImpl.java new file mode 100644 index 000000000000..89372237f075 --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/solvency/SolvencyPreCheckImpl.java @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.solvency; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Transaction; +import com.hedera.node.app.spi.workflows.InsufficientBalanceException; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.state.HederaState; +import edu.umd.cs.findbugs.annotations.NonNull; +import javax.inject.Inject; +import javax.inject.Singleton; + +/** + * A modular implementation of {@link SolvencyPreCheck}. TBD + */ +@Singleton +public class SolvencyPreCheckImpl implements SolvencyPreCheck { + + @Inject + public SolvencyPreCheckImpl() { + // For dagger + } + + @Override + public void checkPayerAccountStatus(@NonNull HederaState state, @NonNull AccountID accountID) + throws PreCheckException {} + + @Override + public void checkSolvencyOfVerifiedPayer(@NonNull HederaState state, @NonNull Transaction transaction) + throws InsufficientBalanceException {} + + @Override + public void assessWithSvcFees(@NonNull Transaction transaction) throws PreCheckException {} +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/HederaStateInjectionModule.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/HederaStateInjectionModule.java index 0afdfdd0137b..d69708e94034 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/HederaStateInjectionModule.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/HederaStateInjectionModule.java @@ -27,10 +27,13 @@ @Module public interface HederaStateInjectionModule { @Binds - abstract RecordCache provideRecordCache(RecordCacheImpl cache); + LedgerValidator provideLedgerValidator(LedgerValidatorImpl impl); @Binds - abstract DeduplicationCache provideDeduplicationCache(DeduplicationCacheImpl cache); + RecordCache provideRecordCache(RecordCacheImpl cache); + + @Binds + DeduplicationCache provideDeduplicationCache(DeduplicationCacheImpl cache); @Provides @Singleton diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/LedgerValidator.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/LedgerValidator.java new file mode 100644 index 000000000000..cb86abc912dc --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/LedgerValidator.java @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.state; + +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * Validates a {@link HederaState}, checking to make sure the state is valid. This is used, for example, to verify that + * no HBAR were lost during an upgrade. Implementations should execute quickly, because validation will delay restarts + * and/or upgrades. Most validation will happen asynchronously. At the very least, validation should verify that no + * HBARs were lost or gained. + */ +public interface LedgerValidator { + /** + * Performs some kind of validation on the {@link HederaState}. + * + * @param state The state to check + * @throws IllegalStateException If the state is invalid. + */ + void validate(@NonNull HederaState state) throws IllegalStateException; +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/LedgerValidatorImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/LedgerValidatorImpl.java new file mode 100644 index 000000000000..054a98b4b8d8 --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/LedgerValidatorImpl.java @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.state; + +import static java.util.Objects.requireNonNull; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.node.app.service.token.TokenService; +import com.hedera.node.app.service.token.impl.TokenServiceImpl; +import com.hedera.node.app.spi.HapiUtils; +import com.hedera.node.app.spi.state.ReadableKVState; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.LedgerConfig; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.concurrent.atomic.AtomicLong; +import javax.inject.Inject; + +public final class LedgerValidatorImpl implements LedgerValidator { + private final ConfigProvider configProvider; + + @Inject + public LedgerValidatorImpl(@NonNull final ConfigProvider configProvider) { + this.configProvider = requireNonNull(configProvider); + } + + @Override + public void validate(@NonNull final HederaState state) throws IllegalStateException { + final var config = configProvider.getConfiguration().getConfigData(LedgerConfig.class); + final var expectedTotalTinyBar = config.totalTinyBarFloat(); + final var tokenStates = state.createReadableStates(TokenService.NAME); + final ReadableKVState accounts = tokenStates.get(TokenServiceImpl.ACCOUNTS_KEY); + final var total = new AtomicLong(0L); + + // FUTURE: This would be more efficient if we got the values instead of keys. We also should look at returning + // a stream instead, so we can parallelize it. This would be much faster when reading from disk. + accounts.keys().forEachRemaining(accountId -> { + if (accountId.accountNumOrElse(0L) < 1) { + throw new IllegalStateException("Invalid account id " + HapiUtils.toString(accountId)); + } + final var account = accounts.get(accountId); + if (account == null) { + throw new IllegalStateException("Missing account " + HapiUtils.toString(accountId)); + } + total.addAndGet(account.tinybarBalance()); + }); + + if (total.get() != expectedTotalTinyBar) { + throw new IllegalStateException( + "Wrong ℏ total, expected " + expectedTotalTinyBar + " but was " + total.get()); + } + } +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/MerkleHederaState.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/MerkleHederaState.java index 7fb10b4198fa..6b4f27d5b93d 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/MerkleHederaState.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/MerkleHederaState.java @@ -17,51 +17,7 @@ package com.hedera.node.app.state.merkle; import com.google.protobuf.ByteString; -import com.hedera.node.app.service.consensus.ConsensusService; -import com.hedera.node.app.service.consensus.impl.ConsensusServiceImpl; -import com.hedera.node.app.service.contract.ContractService; -import com.hedera.node.app.service.contract.impl.state.ContractSchema; -import com.hedera.node.app.service.file.FileService; -import com.hedera.node.app.service.file.impl.FileServiceImpl; -import com.hedera.node.app.service.mono.context.StateChildrenProvider; -import com.hedera.node.app.service.mono.state.adapters.MerkleMapLike; -import com.hedera.node.app.service.mono.state.adapters.VirtualMapLike; -import com.hedera.node.app.service.mono.state.logic.ScheduledTransactions; -import com.hedera.node.app.service.mono.state.merkle.MerkleNetworkContext; -import com.hedera.node.app.service.mono.state.merkle.MerklePayerRecords; -import com.hedera.node.app.service.mono.state.merkle.MerkleScheduledTransactionsState; -import com.hedera.node.app.service.mono.state.merkle.MerkleSpecialFiles; -import com.hedera.node.app.service.mono.state.merkle.MerkleStakingInfo; -import com.hedera.node.app.service.mono.state.merkle.MerkleToken; -import com.hedera.node.app.service.mono.state.merkle.MerkleTopic; -import com.hedera.node.app.service.mono.state.migration.AccountStorageAdapter; -import com.hedera.node.app.service.mono.state.migration.RecordsStorageAdapter; -import com.hedera.node.app.service.mono.state.migration.TokenRelStorageAdapter; -import com.hedera.node.app.service.mono.state.migration.UniqueTokenMapAdapter; -import com.hedera.node.app.service.mono.state.virtual.ContractKey; -import com.hedera.node.app.service.mono.state.virtual.EntityNumVirtualKey; -import com.hedera.node.app.service.mono.state.virtual.IterableContractValue; -import com.hedera.node.app.service.mono.state.virtual.UniqueTokenKey; -import com.hedera.node.app.service.mono.state.virtual.UniqueTokenValue; -import com.hedera.node.app.service.mono.state.virtual.VirtualBlobKey; -import com.hedera.node.app.service.mono.state.virtual.VirtualBlobValue; -import com.hedera.node.app.service.mono.state.virtual.entities.OnDiskAccount; -import com.hedera.node.app.service.mono.state.virtual.entities.OnDiskTokenRel; -import com.hedera.node.app.service.mono.state.virtual.schedule.ScheduleEqualityVirtualKey; -import com.hedera.node.app.service.mono.state.virtual.schedule.ScheduleEqualityVirtualValue; -import com.hedera.node.app.service.mono.state.virtual.schedule.ScheduleSecondVirtualValue; -import com.hedera.node.app.service.mono.state.virtual.schedule.ScheduleVirtualValue; -import com.hedera.node.app.service.mono.state.virtual.temporal.SecondSinceEpocVirtualKey; -import com.hedera.node.app.service.mono.stream.RecordsRunningHashLeaf; import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.node.app.service.networkadmin.FreezeService; -import com.hedera.node.app.service.networkadmin.NetworkService; -import com.hedera.node.app.service.networkadmin.impl.FreezeServiceImpl; -import com.hedera.node.app.service.networkadmin.impl.NetworkServiceImpl; -import com.hedera.node.app.service.schedule.ScheduleService; -import com.hedera.node.app.service.schedule.impl.ScheduleServiceImpl; -import com.hedera.node.app.service.token.TokenService; -import com.hedera.node.app.service.token.impl.TokenServiceImpl; import com.hedera.node.app.spi.state.EmptyReadableStates; import com.hedera.node.app.spi.state.EmptyWritableStates; import com.hedera.node.app.spi.state.ReadableKVState; @@ -78,9 +34,6 @@ import com.hedera.node.app.state.HandleConsensusRoundListener; import com.hedera.node.app.state.HederaState; import com.hedera.node.app.state.PreHandleListener; -import com.hedera.node.app.state.merkle.adapters.MerkleMapLikeAdapter; -import com.hedera.node.app.state.merkle.adapters.ScheduledTransactionsAdapter; -import com.hedera.node.app.state.merkle.adapters.VirtualMapLikeAdapter; import com.hedera.node.app.state.merkle.disk.OnDiskReadableKVState; import com.hedera.node.app.state.merkle.disk.OnDiskWritableKVState; import com.hedera.node.app.state.merkle.memory.InMemoryReadableKVState; @@ -100,14 +53,12 @@ import com.swirlds.common.system.SoftwareVersion; import com.swirlds.common.system.SwirldDualState; import com.swirlds.common.system.SwirldState; -import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.system.events.Event; import com.swirlds.common.utility.Labeled; import com.swirlds.fchashmap.FCHashMap; import com.swirlds.merkle.map.MerkleMap; import com.swirlds.virtualmap.VirtualMap; import edu.umd.cs.findbugs.annotations.NonNull; -import java.time.Instant; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -711,176 +662,4 @@ public void commit() { } } } - - /** - * This method, along with {@link StateChildrenProvider}, is a temporary bridge for the - * mono-service. It is defined here and not in the "mono" package because it needs access - * to the raw merkle tree nodes and state metadata. I could move it out of here if I create - * public API on MerkleHederaState for access to the raw tree bits. I'm not sure if that is - * better than having this here. - */ - @Deprecated(forRemoval = true) - public StateChildrenProvider getStateChildrenProvider(@NonNull final Platform platform) { - return new StateChildrenProvider() { - @Override - @SuppressWarnings("unchecked") - public AccountStorageAdapter accounts() { - return AccountStorageAdapter.fromOnDisk(VirtualMapLikeAdapter.unwrapping( - (StateMetadata) - services.get(TokenService.NAME).get("ACCOUNTS"), - getChild(findNodeIndex(TokenService.NAME, "ACCOUNTS")))); - } - - @SuppressWarnings("unchecked") - private MerkleMapLike mapLikePayerRecords() { - return MerkleMapLikeAdapter.unwrapping( - (StateMetadata) - services.get(TokenService.NAME).get(TokenServiceImpl.PAYER_RECORDS_KEY), - getChild(findNodeIndex(TokenService.NAME, TokenServiceImpl.PAYER_RECORDS_KEY))); - } - - @Override - @SuppressWarnings("unchecked") - public MerkleMapLike topics() { - return MerkleMapLikeAdapter.unwrapping( - (StateMetadata) - services.get(ConsensusService.NAME).get(ConsensusServiceImpl.TOPICS_KEY), - getChild(findNodeIndex(ConsensusService.NAME, ConsensusServiceImpl.TOPICS_KEY))); - } - - @Override - @SuppressWarnings("unchecked") - public VirtualMapLike storage() { - return VirtualMapLikeAdapter.unwrapping( - (StateMetadata) - services.get(FileService.NAME).get(FileServiceImpl.BLOBS_KEY), - getChild(findNodeIndex(FileService.NAME, FileServiceImpl.BLOBS_KEY))); - } - - @Override - @SuppressWarnings("unchecked") - public VirtualMapLike contractStorage() { - return VirtualMapLikeAdapter.unwrapping( - (StateMetadata) - services.get(ContractService.NAME).get(ContractSchema.STORAGE_KEY), - getChild(findNodeIndex(ContractService.NAME, ContractSchema.STORAGE_KEY))); - } - - @Override - @SuppressWarnings("unchecked") - public MerkleMapLike tokens() { - return MerkleMapLikeAdapter.unwrapping( - (StateMetadata) - services.get(TokenService.NAME).get(TokenServiceImpl.TOKENS_KEY), - getChild(findNodeIndex(TokenService.NAME, TokenServiceImpl.TOKENS_KEY))); - } - - @Override - @SuppressWarnings("unchecked") - public TokenRelStorageAdapter tokenAssociations() { - return TokenRelStorageAdapter.fromOnDisk(VirtualMapLikeAdapter.unwrapping( - (StateMetadata) - services.get(TokenService.NAME).get(TokenServiceImpl.TOKEN_RELS_KEY), - getChild(findNodeIndex(TokenService.NAME, TokenServiceImpl.TOKEN_RELS_KEY)))); - } - - @Override - @SuppressWarnings("unchecked") - public ScheduledTransactions scheduleTxs() { - return new ScheduledTransactionsAdapter( - ((SingletonNode) getChild( - findNodeIndex(ScheduleService.NAME, ScheduleServiceImpl.SCHEDULING_STATE_KEY))) - .getValue(), - MerkleMapLikeAdapter.unwrapping( - (StateMetadata) - services.get(ScheduleService.NAME).get(ScheduleServiceImpl.SCHEDULES_BY_ID_KEY), - getChild(findNodeIndex(ScheduleService.NAME, ScheduleServiceImpl.SCHEDULES_BY_ID_KEY))), - MerkleMapLikeAdapter.unwrapping( - (StateMetadata) - services.get(ScheduleService.NAME) - .get(ScheduleServiceImpl.SCHEDULES_BY_EXPIRY_SEC_KEY), - getChild(findNodeIndex( - ScheduleService.NAME, ScheduleServiceImpl.SCHEDULES_BY_EXPIRY_SEC_KEY))), - MerkleMapLikeAdapter.unwrapping( - (StateMetadata) - services.get(ScheduleService.NAME) - .get(ScheduleServiceImpl.SCHEDULES_BY_EQUALITY_KEY), - getChild(findNodeIndex( - ScheduleService.NAME, ScheduleServiceImpl.SCHEDULES_BY_EQUALITY_KEY)))); - } - - @Override - @SuppressWarnings("unchecked") - public MerkleNetworkContext networkCtx() { - return ((SingletonNode) - getChild(findNodeIndex(NetworkService.NAME, NetworkServiceImpl.CONTEXT_KEY))) - .getValue(); - } - - @Override - public AddressBook addressBook() { - return Objects.requireNonNull(platform).getAddressBook(); - } - - @Override - @SuppressWarnings("unchecked") - public MerkleSpecialFiles specialFiles() { - return ((SingletonNode) - getChild(findNodeIndex(FreezeService.NAME, FreezeServiceImpl.UPGRADE_FILES_KEY))) - .getValue(); - } - - @Override - @SuppressWarnings("unchecked") - public UniqueTokenMapAdapter uniqueTokens() { - return UniqueTokenMapAdapter.wrap(VirtualMapLikeAdapter.unwrapping( - (StateMetadata) - services.get(TokenService.NAME).get(TokenServiceImpl.NFTS_KEY), - getChild(findNodeIndex(TokenService.NAME, TokenServiceImpl.NFTS_KEY)))); - } - - @Override - public RecordsStorageAdapter payerRecords() { - return RecordsStorageAdapter.fromDedicated(mapLikePayerRecords()); - } - - @Override - @SuppressWarnings("unchecked") - public RecordsRunningHashLeaf runningHashLeaf() { - return ((SingletonNode) - getChild(findNodeIndex(NetworkService.NAME, NetworkServiceImpl.RUNNING_HASHES_KEY))) - .getValue(); - } - - @Override - public Map aliases() { - Objects.requireNonNull(aliases, "Cannot get aliases from an uninitialized state"); - return aliases; - } - - @Override - @SuppressWarnings("unchecked") - public MerkleMapLike stakingInfo() { - return MerkleMapLikeAdapter.unwrapping( - (StateMetadata) - services.get(NetworkService.NAME).get(NetworkServiceImpl.STAKING_KEY), - getChild(findNodeIndex(NetworkService.NAME, NetworkServiceImpl.STAKING_KEY))); - } - - @Override - public boolean isInitialized() { - return true; - } - - @Override - public Instant getTimeOfLastHandledTxn() { - return networkCtx().consensusTimeOfLastHandledTxn(); - } - - @Override - public int getStateVersion() { - return networkCtx().getStateVersion(); - } - }; - } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/StateUtils.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/StateUtils.java index 5fe124b2d615..5e504088dc19 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/StateUtils.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/StateUtils.java @@ -35,6 +35,17 @@ public final class StateUtils { /** Prevent instantiation */ private StateUtils() {} + /** + * Write the {@code object} to the {@link OutputStream} using the given {@link Codec}. + * + * @param out The object to write out + * @param codec The codec to use. MUST be compatible with the {@code object} type + * @param object The object to write + * @return The number of bytes written to the stream. + * @param The type of the object and associated codec. + * @throws IOException If the output stream throws it. + * @throws ClassCastException If the object or codec is not for type {@code T}. + */ public static int writeToStream( @NonNull final OutputStream out, @NonNull final Codec codec, @NonNull final T object) throws IOException { @@ -47,6 +58,16 @@ public static int writeToStream( return byteStream.size(); } + /** + * Read an object from the {@link InputStream} using the given {@link Codec}. + * + * @param in The input stream to read from + * @param codec The codec to use. MUST be compatible with the {@code object} type + * @return The object read from the stream + * @param The type of the object and associated codec. + * @throws IOException If the input stream throws it. + * @throws ClassCastException If the object or codec is not for type {@code T}. + */ @NonNull public static T readFromStream(@NonNull final InputStream in, @NonNull final Codec codec) throws IOException { diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/adapters/VirtualMapLikeAdapter.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/adapters/VirtualMapLikeAdapter.java index e386579fa5de..76eb5576ef83 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/adapters/VirtualMapLikeAdapter.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/adapters/VirtualMapLikeAdapter.java @@ -83,6 +83,20 @@ public void accept(final Pair, OnDiskValue> pair) throws Interru VirtualMapMigration.extractVirtualMapData(threadManager, real, unwrappingHandler, threadCount); } + @Override + public void extractVirtualMapDataC( + final ThreadManager threadManager, + final InterruptableConsumer> handler, + final int threadCount) + throws InterruptedException { + VirtualMapMigration.extractVirtualMapDataC( + threadManager, + real, + pair -> handler.accept( + Pair.of(pair.getKey().getKey(), pair.getValue().getValue())), + threadCount); + } + @Override public void registerMetrics(final Metrics metrics) { real.registerMetrics(metrics); diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/disk/OnDiskReadableKVState.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/disk/OnDiskReadableKVState.java index e3fdd7468bb7..5941d6741808 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/disk/OnDiskReadableKVState.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/disk/OnDiskReadableKVState.java @@ -20,8 +20,10 @@ import com.hedera.node.app.spi.state.ReadableKVStateBase; import com.hedera.node.app.state.merkle.StateMetadata; import com.swirlds.virtualmap.VirtualMap; +import com.swirlds.virtualmap.internal.merkle.VirtualLeafNode; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Iterator; +import java.util.NoSuchElementException; import java.util.Objects; /** @@ -62,11 +64,40 @@ protected V readFromDataSource(@NonNull K key) { @NonNull @Override protected Iterator iterateFromDataSource() { - throw new UnsupportedOperationException("You cannot iterate over a virtual map's keys!"); + final var itr = virtualMap.treeIterator(); + return new Iterator<>() { + private K next = null; + + @Override + public boolean hasNext() { + if (next != null) return true; + while (itr.hasNext()) { + final var merkleNode = itr.next(); + if (merkleNode instanceof VirtualLeafNode leaf) { + final var k = leaf.getKey(); + if (k instanceof OnDiskKey onDiskKey) { + this.next = (K) onDiskKey.getKey(); + return true; + } + } + } + return false; + } + + @Override + public K next() { + if (!hasNext()) { + throw new NoSuchElementException(); + } + + final var k = next; + next = null; + return k; + } + }; } /** {@inheritDoc} */ - @NonNull @Override public long size() { return virtualMap.size(); diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/recordcache/RecordCacheImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/recordcache/RecordCacheImpl.java index 8b3e24f62cde..1cb7995475a8 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/recordcache/RecordCacheImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/recordcache/RecordCacheImpl.java @@ -30,6 +30,7 @@ import com.hedera.hapi.node.state.recordcache.TransactionRecordEntry; import com.hedera.hapi.node.transaction.TransactionReceipt; import com.hedera.hapi.node.transaction.TransactionRecord; +import com.hedera.node.app.spi.state.ReadableQueueState; import com.hedera.node.app.spi.state.WritableQueueState; import com.hedera.node.app.state.DeduplicationCache; import com.hedera.node.app.state.HederaRecordCache; @@ -75,7 +76,6 @@ public class RecordCacheImpl implements HederaRecordCache { * @param records Every {@link TransactionRecord} handled for every transaction that came to consensus with the txId */ private record History(@NonNull Set nodeIds, @NonNull List records) { - History() { this(new HashSet<>(), new ArrayList<>()); } @@ -121,7 +121,7 @@ public void rebuild() { payerToTransactionIndex.clear(); deduplicationCache.clear(); - final var queue = getQueue(); + final var queue = getReadableQueue(); final var itr = queue.iterator(); while (itr.hasNext()) { final var entry = itr.next(); @@ -258,7 +258,7 @@ public List getReceipts(@NonNull final TransactionID transac : records.stream().map(TransactionRecord::receipt).toList(); } - /** Utility method that get the queue from the working state */ + /** Utility method that get the writable queue from the working state */ private WritableQueueState getQueue() { final var hederaState = workingStateAccessor.getHederaState(); if (hederaState == null) { @@ -267,4 +267,14 @@ private WritableQueueState getQueue() { final var states = hederaState.createWritableStates(NAME); return states.getQueue(TXN_RECORD_QUEUE); } + + /** Utility method that get the readable queue from the working state */ + private ReadableQueueState getReadableQueue() { + final var hederaState = workingStateAccessor.getHederaState(); + if (hederaState == null) { + throw new RuntimeException("HederaState is null. This can only happen very early during bootstrapping"); + } + final var states = hederaState.createReadableStates(NAME); + return states.getQueue(TXN_RECORD_QUEUE); + } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/recordcache/RecordCacheService.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/recordcache/RecordCacheService.java index d133ba4323b3..6a966ed540ad 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/recordcache/RecordCacheService.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/recordcache/RecordCacheService.java @@ -36,6 +36,8 @@ public class RecordCacheService implements Service { /** The name of the queue that stores the transaction records */ static final String TXN_RECORD_QUEUE = "TransactionRecordQueue"; + private static final SemanticVersion GENESIS_VERSION = SemanticVersion.DEFAULT; + /** {@inheritDoc} */ @NonNull @Override @@ -48,7 +50,7 @@ public String getServiceName() { public void registerSchemas(@NonNull SchemaRegistry registry) { // This is the genesis schema for this service, and simply creates the queue state that stores the // transaction records. - registry.register(new Schema(SemanticVersion.newBuilder().minor(38).build()) { + registry.register(new Schema(GENESIS_VERSION) { @NonNull @Override public SemanticVersion getVersion() { diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/throttle/ThrottleAccumulatorImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/throttle/ThrottleAccumulatorImpl.java new file mode 100644 index 000000000000..bf9df3ad50ce --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/throttle/ThrottleAccumulatorImpl.java @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.throttle; + +import com.hedera.hapi.node.base.HederaFunctionality; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.TransactionBody; +import edu.umd.cs.findbugs.annotations.NonNull; +import javax.inject.Inject; +import javax.inject.Singleton; + +/** + * A modular implementation of the {@link ThrottleAccumulator}. TBD. + */ +@Singleton +public class ThrottleAccumulatorImpl implements ThrottleAccumulator { + + @Inject + public ThrottleAccumulatorImpl() { + // Needed for dagger DI + } + + @Override + public boolean shouldThrottle(@NonNull TransactionBody txn) { + return false; + } + + @Override + public boolean shouldThrottleQuery(@NonNull HederaFunctionality functionality, @NonNull Query query) { + return false; + } +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/throttle/ThrottleInjectionModule.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/throttle/ThrottleInjectionModule.java index 01ab42df60a6..8b5fcea2af12 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/throttle/ThrottleInjectionModule.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/throttle/ThrottleInjectionModule.java @@ -24,5 +24,5 @@ public interface ThrottleInjectionModule { @Binds @Singleton - ThrottleAccumulator bindThrottleAccumulator(MonoThrottleAccumulator throttleAccumulator); + ThrottleAccumulator bindThrottleAccumulator(ThrottleAccumulatorImpl throttleAccumulator); } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/version/HederaSoftwareVersion.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/version/HederaSoftwareVersion.java index 29f1d1752987..7c36e29cf98f 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/version/HederaSoftwareVersion.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/version/HederaSoftwareVersion.java @@ -23,6 +23,7 @@ import com.swirlds.common.io.streams.SerializableDataOutputStream; import com.swirlds.common.system.SoftwareVersion; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.io.IOException; /** @@ -130,4 +131,18 @@ private static void serializeIfUsed(final String semVerPart, final SerializableD out.writeNormalisedString(semVerPart); } } + + public boolean isAfter(@Nullable final SoftwareVersion deserializedVersion) { + if (deserializedVersion == null) { + return true; + } + return compareTo(deserializedVersion) > 0; + } + + public boolean isBefore(@Nullable final SoftwareVersion deserializedVersion) { + if (deserializedVersion == null) { + return false; + } + return compareTo(deserializedVersion) < 0; + } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/TransactionChecker.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/TransactionChecker.java index 256950db83ef..6f2987903469 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/TransactionChecker.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/TransactionChecker.java @@ -51,6 +51,8 @@ import com.hedera.node.app.spi.HapiUtils; import com.hedera.node.app.spi.UnknownHederaFunctionality; import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.HederaConfig; import com.hedera.pbj.runtime.Codec; import com.hedera.pbj.runtime.MalformedProtobufException; import com.hedera.pbj.runtime.UnknownFieldException; @@ -97,8 +99,8 @@ public class TransactionChecker { /** The maximum number of bytes that can exist in the transaction */ private final int maxSignedTxnSize; - /** The {@link GlobalDynamicProperties} used to get properties needed for these checks. */ - private final GlobalDynamicProperties props; + /** The {@link ConfigProvider} used to get properties needed for these checks. */ + private final ConfigProvider props; /** The {@link Counter} used to track the number of deprecated transactions (bodyBytes, sigMap) received. */ private final Counter deprecatedCounter; /** The {@link Counter} used to track the number of super deprecated transactions (body, sigs) received. */ @@ -114,7 +116,7 @@ public class TransactionChecker { * Create a new {@link TransactionChecker} * * @param maxSignedTxnSize the maximum transaction size - * @param dynamicProperties the {@link GlobalDynamicProperties} + * @param configProvider access to configuration * @param metrics metrics related to workflows * @throws NullPointerException if one of the arguments is {@code null} * @throws IllegalArgumentException if {@code maxSignedTxnSize} is not positive @@ -123,7 +125,7 @@ public class TransactionChecker { public TransactionChecker( @MaxSignedTxnSize final int maxSignedTxnSize, @NodeSelfId @NonNull final AccountID nodeAccount, - @NonNull final GlobalDynamicProperties dynamicProperties, + @NonNull final ConfigProvider configProvider, @NonNull final Metrics metrics) { if (maxSignedTxnSize <= 0) { throw new IllegalArgumentException("maxSignedTxnSize must be > 0"); @@ -131,7 +133,7 @@ public TransactionChecker( this.nodeAccount = requireNonNull(nodeAccount); this.maxSignedTxnSize = maxSignedTxnSize; - this.props = requireNonNull(dynamicProperties); + this.props = requireNonNull(configProvider); this.deprecatedCounter = metrics.getOrCreate(new Counter.Config("app", COUNTER_DEPRECATED_TXNS_NAME) .withDescription(COUNTER_RECEIVED_DEPRECATED_DESC)); this.superDeprecatedCounter = metrics.getOrCreate(new Counter.Config("app", COUNTER_SUPER_DEPRECATED_TXNS_NAME) @@ -314,8 +316,9 @@ public void checkTransactionBody(@NonNull final TransactionBody txBody) throws P throw new PreCheckException(INVALID_NODE_ACCOUNT); } + final var config = props.getConfiguration().getConfigData(HederaConfig.class); checkTransactionID(txBody.transactionID()); - checkMemo(txBody.memo()); + checkMemo(txBody.memo(), config.transactionMaxMemoUtf8Bytes()); // You cannot have a negative transaction fee!! We're not paying you, buddy. if (txBody.transactionFee() < 0) { @@ -324,7 +327,10 @@ public void checkTransactionBody(@NonNull final TransactionBody txBody) throws P checkTimeBox( txBody.transactionID().transactionValidStart(), - txBody.transactionValidDurationOrElse(Duration.DEFAULT)); + txBody.transactionValidDurationOrElse(Duration.DEFAULT), + config.transactionMinValidDuration(), + config.transactionMaxValidDuration(), + config.transactionMinValidityBufferSecs()); } /** @@ -364,12 +370,12 @@ private void checkTransactionID(@Nullable final TransactionID txnId) throws PreC * @param memo The memo to check. * @throws PreCheckException if the memo is too long, or otherwise fails the check. */ - private void checkMemo(@Nullable final String memo) throws PreCheckException { + private void checkMemo(@Nullable final String memo, final int maxMemoUtf8Bytes) throws PreCheckException { if (memo == null) return; // Nothing to do, a null memo is valid. // Verify the number of bytes does not exceed the maximum allowed. // Note that these bytes are counted in UTF-8. final var buffer = memo.getBytes(StandardCharsets.UTF_8); - if (buffer.length > props.maxMemoUtf8Bytes()) { + if (buffer.length > maxMemoUtf8Bytes) { throw new PreCheckException(MEMO_TOO_LONG); } // FUTURE: This check should be removed after mirror node supports 0x00 in memo fields @@ -390,18 +396,26 @@ private void checkMemo(@Nullable final String memo) throws PreCheckException { * select a duration that is shorter than the network's configuration * for max duration, but cannot exceed it, as long as it is not shorter than the network's * configuration for min duration. + * @param min The minimum duration allowed by the network configuration. + * @param max The maximum duration allowed by the network configuration. * @throws PreCheckException if the transaction duration is invalid, or if the start time is too old, or in the future. */ - private void checkTimeBox(final Timestamp start, final Duration duration) throws PreCheckException { + private void checkTimeBox( + final Timestamp start, + final Duration duration, + final long min, + final long max, + final long minValidityBufferSecs) + throws PreCheckException { // The transaction duration must not be longer than the configured maximum transaction duration // or less than the configured minimum transaction duration. final var validForSecs = duration.seconds(); - if (validForSecs < props.minTxnDuration() || validForSecs > props.maxTxnDuration()) { + if (validForSecs < min || validForSecs > max) { throw new PreCheckException(INVALID_TRANSACTION_DURATION); } final var validStart = toInstant(start); - final var validDuration = toSecondsDuration(validForSecs, validStart); + final var validDuration = toSecondsDuration(validForSecs, validStart, minValidityBufferSecs); final var currentTime = Instant.now(Clock.systemUTC()); if (validStart.plusSeconds(validDuration).isBefore(currentTime)) { throw new PreCheckException(TRANSACTION_EXPIRED); @@ -432,11 +446,11 @@ private Instant toInstant(final Timestamp timestamp) { * * @param validForSecs the duration in seconds * @param validStart the {@link Instant} that is used to calculate the maximum + * @param minValidBufferSecs the minimum buffer in seconds * @return the valid duration given in seconds */ - private long toSecondsDuration(final long validForSecs, final Instant validStart) { - return Math.min( - validForSecs - props.minValidityBuffer(), Instant.MAX.getEpochSecond() - validStart.getEpochSecond()); + private long toSecondsDuration(final long validForSecs, final Instant validStart, final long minValidBufferSecs) { + return Math.min(validForSecs - minValidBufferSecs, Instant.MAX.getEpochSecond() - validStart.getEpochSecond()); } /** A simple utility method replaced in Java 21 with {@code Math.clamp(long, long long)} */ diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/ReadableStoreFactory.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/ReadableStoreFactory.java index 1f1807c6bef1..8bd1c2a747d1 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/ReadableStoreFactory.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/ReadableStoreFactory.java @@ -21,6 +21,9 @@ import com.hedera.node.app.service.consensus.ConsensusService; import com.hedera.node.app.service.consensus.ReadableTopicStore; import com.hedera.node.app.service.consensus.impl.ReadableTopicStoreImpl; +import com.hedera.node.app.service.file.FileService; +import com.hedera.node.app.service.file.ReadableFileStore; +import com.hedera.node.app.service.file.impl.ReadableFileStoreImpl; import com.hedera.node.app.service.networkadmin.FreezeService; import com.hedera.node.app.service.networkadmin.NetworkService; import com.hedera.node.app.service.networkadmin.ReadableRunningHashLeafStore; @@ -58,6 +61,7 @@ ReadableAccountStore.class, new StoreEntry(TokenService.NAME, ReadableAccountSto ReadableTokenStore.class, new StoreEntry(TokenService.NAME, ReadableTokenStoreImpl::new), ReadableTopicStore.class, new StoreEntry(ConsensusService.NAME, ReadableTopicStoreImpl::new), ReadableScheduleStore.class, new StoreEntry(ScheduleService.NAME, ReadableScheduleStoreImpl::new), + ReadableFileStore.class, new StoreEntry(FileService.NAME, ReadableFileStoreImpl::new), ReadableUpdateFileStore.class, new StoreEntry(FreezeService.NAME, ReadableUpdateFileStoreImpl::new), ReadableRunningHashLeafStore.class, new StoreEntry(NetworkService.NAME, ReadableRunningHashLeafStoreImpl::new), diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/CompoundSignatureVerificationFuture.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/CompoundSignatureVerificationFuture.java similarity index 99% rename from hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/CompoundSignatureVerificationFuture.java rename to hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/CompoundSignatureVerificationFuture.java index d4a1d6c20cae..8b6ede451c27 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/CompoundSignatureVerificationFuture.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/CompoundSignatureVerificationFuture.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.hedera.node.app.workflows.prehandle; +package com.hedera.node.app.workflows.handle; import static java.util.Objects.requireNonNull; diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleContextImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleContextImpl.java index 3e136137e988..2f48b6b91b71 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleContextImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleContextImpl.java @@ -20,6 +20,7 @@ import static com.hedera.node.app.spi.workflows.HandleContext.TransactionCategory.PRECEDING; import static java.util.Objects.requireNonNull; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.hapi.node.transaction.TransactionBody; @@ -38,6 +39,8 @@ import com.hedera.node.app.workflows.dispatcher.WritableStoreFactory; import com.hedera.node.app.workflows.handle.stack.Savepoint; import com.hedera.node.app.workflows.handle.stack.SavepointStackImpl; +import com.hedera.node.app.workflows.handle.validation.AttributeValidatorImpl; +import com.hedera.node.app.workflows.handle.validation.ExpiryValidatorImpl; import com.hedera.pbj.runtime.io.buffer.Bytes; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; @@ -50,6 +53,8 @@ public class HandleContextImpl implements HandleContext { private final TransactionBody txBody; + private final AccountID payer; + private final Key payerKey; private final TransactionCategory category; private final SingleTransactionRecordBuilder recordBuilder; private final SavepointStackImpl stack; @@ -61,11 +66,15 @@ public class HandleContextImpl implements HandleContext { private final WritableStoreFactory writableStoreFactory; private ReadableStoreFactory readableStoreFactory; + private AttributeValidator attributeValidator; + private ExpiryValidator expiryValidator; /** * Constructs a {@link HandleContextImpl}. * * @param txBody The {@link TransactionBody} of the transaction + * @param payer The {@link AccountID} of the payer + * @param payerKey The {@link Key} of the payer * @param category The {@link TransactionCategory} of the transaction (either user, preceding, or child) * @param recordBuilder The main {@link SingleTransactionRecordBuilder} * @param stack The {@link SavepointStackImpl} used to manage savepoints @@ -77,6 +86,8 @@ public class HandleContextImpl implements HandleContext { */ public HandleContextImpl( @NonNull final TransactionBody txBody, + @NonNull final AccountID payer, + @NonNull final Key payerKey, @NonNull final TransactionCategory category, @NonNull final SingleTransactionRecordBuilder recordBuilder, @NonNull final SavepointStackImpl stack, @@ -86,6 +97,8 @@ public HandleContextImpl( @NonNull final TransactionDispatcher dispatcher, @NonNull final ServiceScopeLookup serviceScopeLookup) { this.txBody = requireNonNull(txBody, "txBody must not be null"); + this.payer = requireNonNull(payer, "payer must not be null"); + this.payerKey = requireNonNull(payerKey, "payerKey must not be null"); this.category = requireNonNull(category, "category must not be null"); this.recordBuilder = requireNonNull(recordBuilder, "recordBuilder must not be null"); this.stack = requireNonNull(stack, "stack must not be null"); @@ -115,6 +128,18 @@ public TransactionBody body() { return txBody; } + @NonNull + @Override + public AccountID payer() { + return payer; + } + + @Nullable + @Override + public Key payerKey() { + return payerKey; + } + @Override @NonNull public Configuration configuration() { @@ -129,24 +154,30 @@ public long newEntityNum() { @Override @NonNull public AttributeValidator attributeValidator() { - return current().attributeValidator(); + if (attributeValidator == null) { + attributeValidator = new AttributeValidatorImpl(this); + } + return attributeValidator; } @Override @NonNull public ExpiryValidator expiryValidator() { - return current().expiryValidator(); + if (expiryValidator == null) { + expiryValidator = new ExpiryValidatorImpl(this); + } + return expiryValidator; } @Override - @Nullable + @NonNull public SignatureVerification verificationFor(@NonNull final Key key) { requireNonNull(key, "key must not be null"); return verifier.verificationFor(key); } @Override - @Nullable + @NonNull public SignatureVerification verificationFor(@NonNull final Bytes evmAlias) { requireNonNull(evmAlias, "evmAlias must not be null"); return verifier.verificationFor(evmAlias); @@ -264,6 +295,8 @@ private void dispatch( final var childStack = new SavepointStackImpl(current().state(), configuration()); final var childContext = new HandleContextImpl( txBody, + payer, + payerKey, childCategory, childRecordBuilder, childStack, diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleContextVerifier.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleContextVerifier.java index c19ab9fba61f..29de4f3abd1c 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleContextVerifier.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleContextVerifier.java @@ -16,62 +16,166 @@ package com.hedera.node.app.workflows.handle; +import static com.hedera.node.app.spi.signatures.SignatureVerification.failedVerification; +import static java.util.Collections.emptyList; import static java.util.Objects.requireNonNull; +import static java.util.concurrent.CompletableFuture.completedFuture; import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.KeyList; +import com.hedera.node.app.signature.SignatureVerificationFuture; import com.hedera.node.app.spi.signatures.SignatureVerification; +import com.hedera.node.config.data.HederaConfig; import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.List; import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.function.Supplier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper class that contains all functionality for verifying signatures during handle. */ public class HandleContextVerifier { - private final Map keyVerifications; + private static final Logger logger = LoggerFactory.getLogger(HandleContextVerifier.class); + + private final long timeout; + private final Map keyVerifications; /** * Creates a {@link HandleContextVerifier} * - * @param keyVerifications A {@link Map} with all data for required signatures + * @param keyVerifications A {@link Map} with all data to verify signatures */ - public HandleContextVerifier(@NonNull final Map keyVerifications) { + public HandleContextVerifier( + @NonNull final HederaConfig config, @NonNull final Map keyVerifications) { + this.timeout = requireNonNull(config, "config must not be null").workflowVerificationTimeoutMS(); this.keyVerifications = requireNonNull(keyVerifications, "keyVerifications must not be null"); } /** - * Gets the {@link SignatureVerification} for a {@link Key} + * Get a {@link SignatureVerification} for the given key. + * + *

If the key is a cryptographic key (i.e. a basic key like ED25519 or ECDSA_SECP256K1), and the cryptographic + * key was in the signature map of the transaction, then a {@link SignatureVerification} will be for that key. + * If there was no such cryptographic key in the signature map, {@code null} is returned. + * + *

If the key is a key list, then a {@link SignatureVerification} will be returned that aggregates the results + * of each key in the key list, possibly nested. * - * @param key The {@link Key} to get the {@link SignatureVerification} for - * @return The {@link SignatureVerification} if found, otherwise {@code null} - * @throws NullPointerException If {@code key} is {@code null} + *

If the key is a threshold key, then a {@link SignatureVerification} will be returned that aggregates the + * results of each key in the threshold key, possibly nested, based on the threshold for that key. + * + * @param key The key to check on the verification results for. + * @return A {@link SignatureVerification} for the given key, if available, {@code null} otherwise. */ - @Nullable + @NonNull public SignatureVerification verificationFor(@NonNull final Key key) { requireNonNull(key, "key must not be null"); - return keyVerifications.get(key); + // FUTURE: Cache the results of this method, if it is usually called several times + return resolveFuture(verificationFutureFor(key), () -> failedVerification(key)); } /** - * Gets the {@link SignatureVerification} for a hollow account - * - * @param evmAlias The evm-alias of the hollow account - * @return The {@link SignatureVerification} if found, otherwise {@code null} - * @throws NullPointerException If {@code evmAlias} is {@code null} + * Look for a {@link SignatureVerification} that applies to the given hollow account. + * @param evmAlias The evm alias to lookup verification for. + * @return The {@link SignatureVerification} for the given hollow account. */ - @Nullable + @NonNull public SignatureVerification verificationFor(@NonNull final Bytes evmAlias) { requireNonNull(evmAlias, "evmAlias must not be null"); + // FUTURE: Cache the results of this method, if it is usually called several times if (evmAlias.length() == 20) { for (final var result : keyVerifications.values()) { final var account = result.evmAlias(); if (account != null && evmAlias.matchesPrefix(account)) { - return result; + return resolveFuture(result, () -> failedVerification(evmAlias)); } } } - return null; + return failedVerification(evmAlias); + } + + /** + * Get a {@link Future} for the given key. + * + *

If the key is a cryptographic key (i.e. a basic key like ED25519 or ECDSA_SECP256K1), and the cryptographic + * key was in the signature map of the transaction, then a {@link Future} will be returned that will yield the + * {@link SignatureVerification} for that key. If there was no such cryptographic key in the signature map, then + * a completed, failed future is returned. + * + *

If the key is a key list, then a {@link Future} will be returned that aggregates the results of each key in + * the key list, possibly nested. + * + *

If the key is a threshold key, then a {@link Future} will be returned that aggregates the results of each key + * in the threshold key, possibly nested, based on the threshold for that key. + * + * @param key The key to check on the verification results for. + * @return A {@link Future} that will yield the {@link SignatureVerification} for the given key. + */ + @NonNull + private Future verificationFutureFor(@NonNull final Key key) { + return switch (key.key().kind()) { + case ED25519, ECDSA_SECP256K1 -> { + final var result = keyVerifications.get(key); + yield result == null ? completedFuture(failedVerification(key)) : result; + } + case KEY_LIST -> { + final var keys = key.keyListOrThrow().keysOrElse(emptyList()); + yield verificationFutureFor(key, keys, 0); + } + case THRESHOLD_KEY -> { + final var thresholdKey = key.thresholdKeyOrThrow(); + final var keyList = thresholdKey.keysOrElse(KeyList.DEFAULT); + final var keys = keyList.keysOrElse(emptyList()); + final var threshold = thresholdKey.threshold(); + final var clampedThreshold = Math.min(Math.max(1, threshold), keys.size()); + yield verificationFutureFor(key, keys, keys.size() - clampedThreshold); + } + case CONTRACT_ID, DELEGATABLE_CONTRACT_ID, ECDSA_384, RSA_3072, UNSET -> completedFuture( + failedVerification(key)); + }; + } + + /** + * Utility method that converts the keys into a list of {@link Future} and then aggregates + * them into a single {@link Future}. + * + * @param key The key that is being verified. + * @param keys The sub-keys of the key being verified + * @param numCanFail The number of sub-keys that can fail verification before the key itself does + * @return A {@link Future} + */ + @NonNull + private Future verificationFutureFor( + @NonNull final Key key, @NonNull final List keys, final int numCanFail) { + // If there are no keys, then we always fail. There must be at least one key in a key list or threshold key + // for it to be a valid key and to pass any form of verification. + if (keys.isEmpty() || numCanFail < 0) return completedFuture(failedVerification(key)); + final var futures = keys.stream().map(this::verificationFutureFor).toList(); + return new CompoundSignatureVerificationFuture(key, null, futures, numCanFail); + } + + @NonNull + private SignatureVerification resolveFuture( + @NonNull final Future future, + @NonNull final Supplier fallback) { + try { + return future.get(timeout, TimeUnit.MILLISECONDS); + } catch (final InterruptedException e) { + Thread.currentThread().interrupt(); + logger.error("Interrupted while waiting for signature verification", e); + } catch (final TimeoutException e) { + logger.warn("Timed out while waiting for signature verification, probably going to ISS soon", e); + } catch (final ExecutionException e) { + logger.error("An unexpected exception was thrown while waiting for SignatureVerification", e); + } + return fallback.get(); } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleWorkflow.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleWorkflow.java index 4a2928c67654..d8e54fb80714 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleWorkflow.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleWorkflow.java @@ -30,14 +30,12 @@ import com.hedera.node.app.signature.SignatureExpander; import com.hedera.node.app.signature.SignatureVerificationFuture; import com.hedera.node.app.signature.SignatureVerifier; -import com.hedera.node.app.spi.info.NodeInfo; -import com.hedera.node.app.spi.signatures.SignatureVerification; +import com.hedera.node.app.spi.info.NetworkInfo; import com.hedera.node.app.spi.workflows.HandleContext.TransactionCategory; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.state.HederaState; import com.hedera.node.app.workflows.TransactionChecker; -import com.hedera.node.app.workflows.TransactionInfo; import com.hedera.node.app.workflows.dispatcher.ReadableStoreFactory; import com.hedera.node.app.workflows.dispatcher.TransactionDispatcher; import com.hedera.node.app.workflows.handle.stack.SavepointStackImpl; @@ -46,6 +44,8 @@ import com.hedera.node.app.workflows.prehandle.PreHandleResult.Status; import com.hedera.node.app.workflows.prehandle.PreHandleWorkflow; import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.VersionedConfiguration; +import com.hedera.node.config.data.HederaConfig; import com.swirlds.common.system.Round; import com.swirlds.common.system.events.ConsensusEvent; import com.swirlds.common.system.transaction.ConsensusTransaction; @@ -53,9 +53,11 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.time.Instant; +import java.time.InstantSource; import java.util.HashMap; import java.util.HashSet; -import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import javax.inject.Inject; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -65,9 +67,9 @@ */ public class HandleWorkflow { - private static final Logger LOG = LogManager.getLogger(HandleWorkflow.class); + private static final Logger logger = LogManager.getLogger(HandleWorkflow.class); - private final NodeInfo nodeInfo; + private final NetworkInfo networkInfo; private final PreHandleWorkflow preHandleWorkflow; private final TransactionDispatcher dispatcher; private final RecordManager recordManager; @@ -76,10 +78,11 @@ public class HandleWorkflow { private final TransactionChecker checker; private final ServiceScopeLookup serviceScopeLookup; private final ConfigProvider configProvider; + private final InstantSource instantSource; @Inject public HandleWorkflow( - @NonNull final NodeInfo nodeInfo, + @NonNull final NetworkInfo networkInfo, @NonNull final PreHandleWorkflow preHandleWorkflow, @NonNull final TransactionDispatcher dispatcher, @NonNull final RecordManager recordManager, @@ -87,8 +90,9 @@ public HandleWorkflow( @NonNull final SignatureVerifier signatureVerifier, @NonNull final TransactionChecker checker, @NonNull final ServiceScopeLookup serviceScopeLookup, - @NonNull final ConfigProvider configProvider) { - this.nodeInfo = requireNonNull(nodeInfo, "nodeInfo must not be null"); + @NonNull final ConfigProvider configProvider, + @NonNull final InstantSource instantSource) { + this.networkInfo = requireNonNull(networkInfo, "networkInfo must not be null"); this.preHandleWorkflow = requireNonNull(preHandleWorkflow, "preHandleWorkflow must not be null"); this.dispatcher = requireNonNull(dispatcher, "dispatcher must not be null"); this.recordManager = requireNonNull(recordManager, "recordManager must not be null"); @@ -97,6 +101,7 @@ public HandleWorkflow( this.checker = requireNonNull(checker, "checker must not be null"); this.serviceScopeLookup = requireNonNull(serviceScopeLookup, "serviceScopeLookup must not be null"); this.configProvider = requireNonNull(configProvider, "configProvider must not be null"); + this.instantSource = requireNonNull(instantSource, "instantSource must not be null"); } /** @@ -130,28 +135,40 @@ private void handlePlatformTransaction( try { // Setup configuration var configuration = configProvider.getConfiguration(); + final var hederaConfig = configuration.getConfigData(HederaConfig.class); - final var verifications = getVerifications(state, platformEvent, platformTxn, configuration); + final var preHandleResult = getCurrentPreHandleResult(state, platformEvent, platformTxn, configuration); recordBuilder.transaction( - verifications.txInfo().transaction(), verifications.txInfo().signedBytes()); - - // Read all signature verifications. This will also wait, if validation is still ongoing. - final var keyVerifications = new HashMap(); - for (final var entry : verifications.keyVerifications().entrySet()) { - // TODO: Implement timeout - final var verification = entry.getValue().get(); - if (verification.failed()) { + preHandleResult.txInfo().transaction(), + preHandleResult.txInfo().signedBytes()); + + // Check all signature verifications. This will also wait, if validation is still ongoing. + final var timeout = hederaConfig.workflowVerificationTimeoutMS(); + final var maxMillis = instantSource.millis() + timeout; + final var payerKeyVerification = + preHandleResult.verificationResults().get(preHandleResult.payerKey()); + if (payerKeyVerification.get(timeout, TimeUnit.MILLISECONDS).failed()) { + throw new HandleException(ResponseCodeEnum.INVALID_SIGNATURE); + } + for (final var key : preHandleResult.requiredKeys()) { + final var remainingMillis = maxMillis - instantSource.millis(); + if (remainingMillis <= 0) { + throw new TimeoutException("Verification of signatures timed out"); + } + final var verification = preHandleResult.verificationResults().get(key); + if (verification.get(remainingMillis, TimeUnit.MILLISECONDS).failed()) { throw new HandleException(ResponseCodeEnum.INVALID_SIGNATURE); } - keyVerifications.put(entry.getKey(), verification); } // Setup context - final var txBody = verifications.txInfo().txBody(); + final var txBody = preHandleResult.txInfo().txBody(); final var stack = new SavepointStackImpl(state, configuration); - final var verifier = new HandleContextVerifier(keyVerifications); + final var verifier = new HandleContextVerifier(hederaConfig, preHandleResult.verificationResults()); final var context = new HandleContextImpl( txBody, + preHandleResult.payer(), + preHandleResult.payerKey(), TransactionCategory.USER, recordBuilder, stack, @@ -168,16 +185,19 @@ private void handlePlatformTransaction( // commit state stack.commit(); - } catch (PreCheckException e) { + } catch (final PreCheckException e) { recordFailedTransaction(e.responseCode(), recordBuilder, recordListBuilder); - } catch (HandleException e) { + } catch (final HandleException e) { recordFailedTransaction(e.getStatus(), recordBuilder, recordListBuilder); - } catch (InterruptedException e) { - LOG.error("Interrupted while waiting for signature verification", e); + } catch (final InterruptedException e) { + logger.error("Interrupted while waiting for signature verification", e); Thread.currentThread().interrupt(); recordBuilder.status(ResponseCodeEnum.UNKNOWN); - } catch (Throwable e) { - LOG.error("An unexpected exception was thrown during handle", e); + } catch (final TimeoutException e) { + logger.warn("Timed out while waiting for signature verification, probably going to ISS soon", e); + recordBuilder.status(ResponseCodeEnum.UNKNOWN); + } catch (final Throwable e) { + logger.error("An unexpected exception was thrown during handle", e); recordBuilder.status(ResponseCodeEnum.UNKNOWN); } @@ -207,27 +227,28 @@ private void recordFailedTransaction( * its verification data. */ @NonNull - private VerificationResult getVerifications( + private PreHandleResult getCurrentPreHandleResult( @NonNull final HederaState state, @NonNull final ConsensusEvent platformEvent, @NonNull final ConsensusTransaction platformTxn, - @NonNull final Configuration configuration) + @NonNull final VersionedConfiguration configuration) throws PreCheckException { final var metadata = platformTxn.getMetadata(); // We do not know how long transactions are kept in memory. Clearing metadata to avoid keeping it for too long. platformTxn.setMetadata(null); - // First check if pre-handle was run successfully and all configuration has not changed - if (preHandleStillValid(metadata)) { - final var previousResult = (PreHandleResult) metadata; + // First check if pre-handle was run before (in which case metadata is a PreHandleResult) + if (preHandleStillValid(configuration, metadata)) { + final var preHandleResult = (PreHandleResult) metadata; + // In case of due diligence error, we prepare a CryptoTransfer to charge the node and return immediately. - if (previousResult.status() == Status.NODE_DUE_DILIGENCE_FAILURE) { + if (preHandleResult.status() == Status.NODE_DUE_DILIGENCE_FAILURE) { return createPenaltyPayment(); } // If pre-handle was successful, we need to add signatures that were not known at the time of pre-handle. - if (previousResult.status() == Status.SO_FAR_SO_GOOD) { - return addMissingSignatures(state, previousResult, configuration); + if (preHandleResult.status() == Status.SO_FAR_SO_GOOD) { + return addMissingSignatures(state, preHandleResult, configuration); } } @@ -235,12 +256,13 @@ private VerificationResult getVerifications( // Therefore, we simply rerun pre-handle. final var storeFactory = new ReadableStoreFactory(state); final var accountStore = storeFactory.getStore(ReadableAccountStore.class); - final var creator = nodeInfo.accountOf(platformEvent.getCreatorId().id()); - final var result = preHandleWorkflow.preHandleTransaction(creator, storeFactory, accountStore, platformTxn); + final var creator = networkInfo.nodeInfo(platformEvent.getCreatorId().id()); + final var creatorId = creator == null ? null : creator.accountId(); + final var result = preHandleWorkflow.preHandleTransaction(creatorId, storeFactory, accountStore, platformTxn); // If pre-handle was successful, we return the result. Otherwise, we charge the node or throw an exception. return switch (result.status()) { - case SO_FAR_SO_GOOD -> new VerificationResult(result); + case SO_FAR_SO_GOOD -> result; case NODE_DUE_DILIGENCE_FAILURE -> createPenaltyPayment(); case UNKNOWN_FAILURE -> throw new IllegalStateException("Pre-handle failed with unknown failure"); default -> throw new PreCheckException(result.responseCode()); @@ -248,14 +270,17 @@ private VerificationResult getVerifications( } @NonNull - private VerificationResult createPenaltyPayment() { + private PreHandleResult createPenaltyPayment() { // TODO: Implement createPenaltyPayment() - https://github.com/hashgraph/hedera-services/issues/6811 throw new UnsupportedOperationException("Not implemented yet"); } - private boolean preHandleStillValid(@Nullable final Object metadata) { - // TODO: Check config (https://github.com/hashgraph/hedera-services/issues/6812) - return metadata instanceof PreHandleResult; + private boolean preHandleStillValid( + @NonNull final VersionedConfiguration configuration, @Nullable final Object metadata) { + if (metadata instanceof PreHandleResult preHandleResult) { + return preHandleResult.configVersion() == configuration.getVersion(); + } + return false; } /* @@ -264,54 +289,55 @@ private boolean preHandleStillValid(@Nullable final Object metadata) { * results. */ @NonNull - private VerificationResult addMissingSignatures( + private PreHandleResult addMissingSignatures( @NonNull final HederaState state, @NonNull final PreHandleResult previousResult, @NonNull final Configuration configuration) throws PreCheckException { - final var txBody = previousResult.txInfo().txBody(); + final var txInfo = previousResult.txInfo(); + final var txBody = txInfo.txBody(); + final var sigPairs = txInfo.signatureMap().sigPairOrElse(emptyList()); + final var signedBytes = txInfo.signedBytes(); // extract keys and hollow accounts again final var storeFactory = new ReadableStoreFactory(state); final var context = new PreHandleContextImpl(storeFactory, txBody, configuration); dispatcher.dispatchPreHandle(context); - // setup result with payer key (which has always been verified during preHandle) - final var previousVerifications = previousResult.verificationResults(); - final var newVerifications = new HashMap(); - newVerifications.put(previousResult.payerKey(), previousVerifications.get(previousResult.payerKey())); + // prepare signature verification + final var verifications = new HashMap(); + final var payerKey = previousResult.payerKey(); + verifications.put(payerKey, previousResult.verificationResults().get(payerKey)); - // add non-payer keys - final var originals = previousResult.txInfo().signatureMap().sigPairOrElse(emptyList()); + // expand all keys final var expanded = new HashSet(); - final var nonPayerKeys = context.requiredNonPayerKeys(); - for (final var key : nonPayerKeys) { - final var found = previousVerifications.get(key); - if (found != null) { - newVerifications.put(key, found); - } else { - signatureExpander.expand(key, originals, expanded); + signatureExpander.expand(context.requiredNonPayerKeys(), sigPairs, expanded); + signatureExpander.expand(context.optionalNonPayerKeys(), sigPairs, expanded); + + // remove all keys that were already verified + for (final var it = expanded.iterator(); it.hasNext(); ) { + final var entry = it.next(); + final var oldVerification = previousResult.verificationResults().get(entry.key()); + if (oldVerification != null) { + verifications.put(oldVerification.key(), oldVerification); + it.remove(); } } - // start verification of any key that was not found in the previous result + // start verification for remaining keys if (!expanded.isEmpty()) { - newVerifications.putAll( - signatureVerifier.verify(previousResult.txInfo().signedBytes(), expanded)); + verifications.putAll(signatureVerifier.verify(signedBytes, expanded)); } - return new VerificationResult(previousResult.txInfo(), newVerifications); - } - - /** - * A small data structure to hold the verification data of a transaction - */ - private record VerificationResult( - @NonNull TransactionInfo txInfo, @NonNull Map keyVerifications) { - - @SuppressWarnings("DataFlowIssue") - public VerificationResult(PreHandleResult result) { - this(result.txInfo(), result.verificationResults()); - } + return new PreHandleResult( + previousResult.payer(), + payerKey, + previousResult.status(), + previousResult.responseCode(), + previousResult.txInfo(), + context.requiredNonPayerKeys(), + verifications, + previousResult.innerResult(), + previousResult.configVersion()); } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/Savepoint.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/Savepoint.java index 3530d5683cb8..261c324e5622 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/Savepoint.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/Savepoint.java @@ -18,11 +18,7 @@ import static java.util.Objects.requireNonNull; -import com.hedera.node.app.spi.validation.AttributeValidator; -import com.hedera.node.app.spi.validation.ExpiryValidator; import com.hedera.node.app.state.WrappedHederaState; -import com.hedera.node.app.workflows.handle.validation.AttributeValidatorImpl; -import com.hedera.node.app.workflows.handle.validation.ExpiryValidatorImpl; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; @@ -91,28 +87,6 @@ void configuration(@NonNull final Configuration configuration) { */ public long newEntityNum() { // TODO: Implement Savepoint.newEntityNum (https://github.com/hashgraph/hedera-services/issues/6701) - return 1L; - } - - /** - * Returns an {@link AttributeValidator} that is based on the current configuration and state. - * - * @return an {@link AttributeValidator} - */ - @NonNull - public AttributeValidator attributeValidator() { - // TODO: Implement Savepoint.attributeValidator (https://github.com/hashgraph/hedera-services/issues/6701) - return new AttributeValidatorImpl(); - } - - /** - * Returns an {@link ExpiryValidator} that is based on the current configuration and state. - * - * @return an {@link ExpiryValidator} - */ - @NonNull - public ExpiryValidator expiryValidator() { - // TODO: Implement Savepoint.expiryValidator (https://github.com/hashgraph/hedera-services/issues/6701) - return new ExpiryValidatorImpl(); + return 1000L; } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/AttributeValidatorImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/AttributeValidatorImpl.java index 7bc129d3dc9a..002f1efb6d4e 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/AttributeValidatorImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/AttributeValidatorImpl.java @@ -16,9 +16,26 @@ package com.hedera.node.app.workflows.handle.validation; +import static com.hedera.hapi.node.base.ResponseCodeEnum.AUTORENEW_DURATION_NOT_IN_RANGE; +import static com.hedera.hapi.node.base.ResponseCodeEnum.BAD_ENCODING; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_EXPIRATION_TIME; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ZERO_BYTE_IN_STRING; +import static com.hedera.hapi.node.base.ResponseCodeEnum.MEMO_TOO_LONG; +import static com.hedera.node.app.spi.key.KeyUtils.isValid; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static java.util.Collections.emptyList; +import static java.util.Objects.requireNonNull; + import com.hedera.hapi.node.base.Key; import com.hedera.node.app.spi.validation.AttributeValidator; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.config.data.EntitiesConfig; +import com.hedera.node.config.data.HederaConfig; +import com.hedera.node.config.data.LedgerConfig; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.charset.StandardCharsets; /** * Implementation of {@link AttributeValidator}. @@ -27,29 +44,108 @@ * GitHub Issue (#6701) */ public class AttributeValidatorImpl implements AttributeValidator { + + private final HandleContext context; + + public AttributeValidatorImpl(@NonNull final HandleContext context) { + this.context = requireNonNull(context, "context must not be null"); + } + + /** + * {@inheritDoc} + */ @Override - public void validateKey(Key key) { - // TODO: Implement validateKey + public void validateKey(@NonNull final Key key) { + validateKeyAtLevel(key, 1); + + // If key is mappable in all levels, validate the key is valid + if (!isValid(key)) { + throw new HandleException(BAD_ENCODING); + } } + /** + * {@inheritDoc} + */ @Override - public void validateMemo(String memo) { - // TODO: Implement validateMemo + public void validateMemo(@Nullable final String memo) { + if (memo == null) { + return; + } + final var raw = memo.getBytes(StandardCharsets.UTF_8); + final var maxMemoUtf8Bytes = + context.configuration().getConfigData(HederaConfig.class).transactionMaxMemoUtf8Bytes(); + if (raw.length > maxMemoUtf8Bytes) { + throw new HandleException(MEMO_TOO_LONG); + } else if (containsZeroByte(raw)) { + throw new HandleException(INVALID_ZERO_BYTE_IN_STRING); + } } + /** + * {@inheritDoc} + */ @Override public void validateExpiry(long expiry) { - // TODO: Implement validateExpiry + final var maxEntityLifetime = + context.configuration().getConfigData(EntitiesConfig.class).maxLifetime(); + final var now = context.consensusNow().getEpochSecond(); + final var expiryGivenMaxLifetime = now + maxEntityLifetime; + validateTrue(expiry > now && expiry <= expiryGivenMaxLifetime, INVALID_EXPIRATION_TIME); } + /** + * {@inheritDoc} + */ @Override public void validateAutoRenewPeriod(long autoRenewPeriod) { - // TODO: Implement validateAutoRenewPeriod + final var ledgerConfig = context.configuration().getConfigData(LedgerConfig.class); + validateTrue( + autoRenewPeriod >= ledgerConfig.autoRenewPeriodMinDuration() + && autoRenewPeriod <= ledgerConfig.autoRenewPeriodMaxDuration(), + AUTORENEW_DURATION_NOT_IN_RANGE); } - @Override - public boolean isImmutableKey(@NonNull Key key) { - // TODO: Implement isImmutableKey + private void validateKeyAtLevel(@NonNull final Key key, final int level) { + if (level > MAX_NESTED_KEY_LEVELS) { + throw new HandleException(BAD_ENCODING); + } + if (!key.hasThresholdKey() && !key.hasKeyList()) { + validateSimple(key); + } else if (key.hasThresholdKey() + && key.thresholdKeyOrThrow().hasKeys() + && key.thresholdKeyOrThrow().keysOrThrow().hasKeys()) { + key.thresholdKeyOrThrow().keysOrThrow().keysOrThrow().forEach(k -> validateKeyAtLevel(k, level + 1)); + } else if (key.keyListOrThrow().hasKeys()) { + key.keyListOrThrow().keysOrThrow().forEach(k -> validateKeyAtLevel(k, level + 1)); + } + } + + /** + * Current behavior is to only invalidate a simple key structure if it has no explicit type. Other validations, + * like on the number of bytes in the public key; or on the size of the threshold key; are done elsewhere. + * + * @param key the key to validate + */ + private void validateSimple(@NonNull final Key key) { + if (key.key().kind() == Key.KeyOneOfType.UNSET) { + throw new HandleException(BAD_ENCODING); + } + } + + private static boolean containsZeroByte(@NonNull final byte[] bytes) { + for (final byte b : bytes) { + if (b == 0) { + return true; + } + } return false; } + + @Override + public boolean isImmutableKey(@NonNull final Key key) { + requireNonNull(key); + return key.hasKeyList() + && requireNonNull(key.keyList()).keysOrElse(emptyList()).isEmpty(); + } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/ExpiryValidatorImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/ExpiryValidatorImpl.java index 11280a158456..361113dd26a2 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/ExpiryValidatorImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/ExpiryValidatorImpl.java @@ -16,10 +16,27 @@ package com.hedera.node.app.workflows.handle.validation; +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_EXPIRED_AND_PENDING_REMOVAL; +import static com.hedera.hapi.node.base.ResponseCodeEnum.CONTRACT_EXPIRED_AND_PENDING_REMOVAL; +import static com.hedera.hapi.node.base.ResponseCodeEnum.EXPIRATION_REDUCTION_NOT_ALLOWED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_AUTORENEW_ACCOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; +import static com.hedera.node.app.spi.workflows.HandleException.validateFalse; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static java.util.Objects.requireNonNull; + +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.node.app.service.evm.exceptions.InvalidTransactionException; +import com.hedera.node.app.service.mono.pbj.PbjConverter; +import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.spi.validation.EntityType; import com.hedera.node.app.spi.validation.ExpiryMeta; import com.hedera.node.app.spi.validation.ExpiryValidator; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.config.data.AutoRenewConfig; +import com.hedera.node.config.data.HederaConfig; import edu.umd.cs.findbugs.annotations.NonNull; /** @@ -30,23 +47,146 @@ */ public class ExpiryValidatorImpl implements ExpiryValidator { + private final HandleContext context; + + public ExpiryValidatorImpl(@NonNull final HandleContext context) { + this.context = requireNonNull(context, "context must not be null"); + } + + /** + * {@inheritDoc} + */ + @NonNull @Override - public ExpiryMeta resolveCreationAttempt(boolean entityCanSelfFundRenewal, ExpiryMeta creationMetadata) { - // TODO: Implement resolveCreationAttempt - return creationMetadata; + public ExpiryMeta resolveCreationAttempt( + final boolean entityCanSelfFundRenewal, @NonNull final ExpiryMeta creationMeta) { + if (creationMeta.hasAutoRenewNum()) { + validateAutoRenewAccount( + creationMeta.autoRenewShard(), creationMeta.autoRenewRealm(), creationMeta.autoRenewNum()); + } + + long effectiveExpiry = creationMeta.expiry(); + // We prioritize the expiry implied by auto-renew configuration, if it is present + // and complete (meaning either both auto-renew period and auto-renew account are + // present; or auto-renew period is present, and the entity can self-fund) + if (hasCompleteAutoRenewSpec(entityCanSelfFundRenewal, creationMeta)) { + effectiveExpiry = context.consensusNow().getEpochSecond() + creationMeta.autoRenewPeriod(); + } + context.attributeValidator().validateExpiry(effectiveExpiry); + + // Even if the effective expiry is valid, we still also require any explicit auto-renew period to be valid + if (creationMeta.hasAutoRenewPeriod()) { + context.attributeValidator().validateAutoRenewPeriod(creationMeta.autoRenewPeriod()); + } + return new ExpiryMeta(effectiveExpiry, creationMeta.autoRenewPeriod(), creationMeta.autoRenewNum()); } + /** + * {@inheritDoc} + */ + @NonNull @Override - public ExpiryMeta resolveUpdateAttempt(ExpiryMeta currentMetadata, ExpiryMeta updateMetadata) { - // TODO: Implement resolveUpdateAttempt - return updateMetadata; + public ExpiryMeta resolveUpdateAttempt( + @NonNull final ExpiryMeta currentMeta, @NonNull final ExpiryMeta updateMeta) { + if (updateMeta.hasAutoRenewNum()) { + validateAutoRenewAccount( + updateMeta.autoRenewShard(), updateMeta.autoRenewRealm(), updateMeta.autoRenewNum()); + } + + var resolvedExpiry = currentMeta.expiry(); + if (updateMeta.hasExplicitExpiry()) { + validateFalse(updateMeta.expiry() < currentMeta.expiry(), EXPIRATION_REDUCTION_NOT_ALLOWED); + context.attributeValidator().validateExpiry(updateMeta.expiry()); + resolvedExpiry = updateMeta.expiry(); + } + + var resolvedAutoRenewPeriod = currentMeta.autoRenewPeriod(); + if (updateMeta.hasAutoRenewPeriod()) { + context.attributeValidator().validateAutoRenewPeriod(updateMeta.autoRenewPeriod()); + resolvedAutoRenewPeriod = updateMeta.autoRenewPeriod(); + } + + var resolvedAutoRenewNum = currentMeta.autoRenewNum(); + if (updateMeta.hasAutoRenewNum()) { + // If just now adding an auto-renew account, confirm the resolved auto-renew period is valid + if (!currentMeta.hasAutoRenewNum()) { + context.attributeValidator().validateAutoRenewPeriod(resolvedAutoRenewPeriod); + } + resolvedAutoRenewNum = updateMeta.autoRenewNum(); + } + return new ExpiryMeta(resolvedExpiry, resolvedAutoRenewPeriod, resolvedAutoRenewNum); } + /** + * {@inheritDoc} + */ @NonNull @Override public ResponseCodeEnum expirationStatus( - @NonNull EntityType entityType, boolean isMarkedExpired, long balanceAvailableForSelfRenewal) { - // TODO: Implement expirationStatus - return ResponseCodeEnum.OK; + @NonNull final EntityType entityType, + final boolean isMarkedExpired, + final long balanceAvailableForSelfRenewal) { + final var isSmartContract = entityType.equals(EntityType.CONTRACT); + final var autoRenewConfig = context.configuration().getConfigData(AutoRenewConfig.class); + if (!autoRenewConfig.isAutoRenewEnabled() + || balanceAvailableForSelfRenewal > 0 + || !isMarkedExpired + || isExpiryDisabled( + isSmartContract, autoRenewConfig.expireAccounts(), autoRenewConfig.expireContracts())) { + return OK; + } + + return isSmartContract ? CONTRACT_EXPIRED_AND_PENDING_REMOVAL : ACCOUNT_EXPIRED_AND_PENDING_REMOVAL; + } + + /** + * Helper to check if an entity with the given metadata has a completely specified + * auto-renew configuration. This is true if either the {@link ExpiryMeta} includes + * both an auto-renew period and an auto-renew account; or if the {@link ExpiryMeta} + * includes only an auto-renew period, and the entity can self-fund its auto-renewal. + * + * @param entityCanSelfFundRenewal whether the entity can self-fund its auto-renewal + * @param creationMetadata the entity's proposed {@link ExpiryMeta} + * @return whether the entity has a complete auto-renew configuration + */ + private boolean hasCompleteAutoRenewSpec( + final boolean entityCanSelfFundRenewal, final ExpiryMeta creationMetadata) { + return creationMetadata.hasFullAutoRenewSpec() + || (!creationMetadata.hasExplicitExpiry() && entityCanSelfFundRenewal); + } + + /** + * Helper to validate that the given account number is a valid auto-renew account. + * + * @param shard the account shard to validate + * @param realm the account realm to validate + * @param num the account number to validate + * @throws HandleException if the account number is invalid + */ + private void validateAutoRenewAccount(final long shard, final long realm, final long num) { + final var hederaConfig = context.configuration().getConfigData(HederaConfig.class); + validateTrue(shard == hederaConfig.shard() && realm == hederaConfig.realm(), INVALID_AUTORENEW_ACCOUNT); + if (num == 0L) { + // 0L is a sentinel number that says to remove the current auto-renew account + return; + } + final var autoRenewId = AccountID.newBuilder() + .shardNum(shard) + .realmNum(realm) + .accountNum(num) + .build(); + final var accountStore = context.readableStore(ReadableAccountStore.class); + try { + final var account = accountStore.getAccountById(autoRenewId); + if (account == null) { + throw new HandleException(INVALID_AUTORENEW_ACCOUNT); + } + } catch (final InvalidTransactionException e) { + throw new HandleException(PbjConverter.toPbj(e.getResponseCode())); + } + } + + private boolean isExpiryDisabled(boolean smartContract, boolean expireAccounts, boolean expireContracts) { + return (smartContract && !expireContracts) || (!smartContract && !expireAccounts); } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/StandardizedAttributeValidator.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/StandardizedAttributeValidator.java index 881ff947db1c..04ff11123a03 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/StandardizedAttributeValidator.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/StandardizedAttributeValidator.java @@ -34,6 +34,7 @@ import com.hedera.node.app.spi.validation.AttributeValidator; import com.hedera.node.app.spi.workflows.HandleException; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.charset.StandardCharsets; import java.util.function.LongSupplier; import javax.inject.Inject; @@ -42,7 +43,10 @@ /** * An implementation of {@link AttributeValidator} that encapsulates the current policies for * validating attributes of entities, without any use of {@code mono-service} code. + * + * @deprecated Use {@link AttributeValidatorImpl} instead. */ +@Deprecated(forRemoval = true) @Singleton public class StandardizedAttributeValidator implements AttributeValidator { private final long maxEntityLifetime; @@ -97,7 +101,10 @@ public void validateAutoRenewPeriod(long autoRenewPeriod) { * {@inheritDoc} */ @Override - public void validateMemo(@NonNull final String memo) { + public void validateMemo(@Nullable final String memo) { + if (memo == null) { + return; + } final var raw = memo.getBytes(StandardCharsets.UTF_8); if (raw.length > dynamicProperties.maxMemoUtf8Bytes()) { throw new HandleException(MEMO_TOO_LONG); diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/StandardizedExpiryValidator.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/StandardizedExpiryValidator.java index 8f0ba8878e16..ed901e1fce3c 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/StandardizedExpiryValidator.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/validation/StandardizedExpiryValidator.java @@ -44,7 +44,10 @@ * An implementation of {@link ExpiryValidator} that encapsulates the current policies * for validating expiry metadata of create and update transactions, without using * any {@code mono-service} components. + * + * @deprecated Use {@link ExpiryValidatorImpl} instead. */ +@Deprecated(forRemoval = true) public class StandardizedExpiryValidator implements ExpiryValidator { private final Consumer idValidator; private final LongSupplier consensusSecondNow; @@ -69,8 +72,10 @@ public StandardizedExpiryValidator( /** * {@inheritDoc} */ + @NonNull @Override - public ExpiryMeta resolveCreationAttempt(final boolean entityCanSelfFundRenewal, final ExpiryMeta creationMeta) { + public ExpiryMeta resolveCreationAttempt( + final boolean entityCanSelfFundRenewal, @NonNull final ExpiryMeta creationMeta) { if (creationMeta.hasAutoRenewNum()) { validateAutoRenewAccount( creationMeta.autoRenewShard(), creationMeta.autoRenewRealm(), creationMeta.autoRenewNum()); @@ -96,8 +101,10 @@ public ExpiryMeta resolveCreationAttempt(final boolean entityCanSelfFundRenewal, /** * {@inheritDoc} */ + @NonNull @Override - public ExpiryMeta resolveUpdateAttempt(final ExpiryMeta currentMeta, final ExpiryMeta updateMeta) { + public ExpiryMeta resolveUpdateAttempt( + @NonNull final ExpiryMeta currentMeta, @NonNull final ExpiryMeta updateMeta) { if (updateMeta.hasAutoRenewNum()) { validateAutoRenewAccount( updateMeta.autoRenewShard(), updateMeta.autoRenewRealm(), updateMeta.autoRenewNum()); @@ -130,6 +137,7 @@ public ExpiryMeta resolveUpdateAttempt(final ExpiryMeta currentMeta, final Expir /** * {@inheritDoc} */ + @NonNull @Override public ResponseCodeEnum expirationStatus( @NonNull final EntityType entityType, @@ -159,7 +167,7 @@ public ResponseCodeEnum expirationStatus( * @return whether the entity has a complete auto-renew configuration */ private boolean hasCompleteAutoRenewSpec( - final boolean entityCanSelfFundRenewal, final ExpiryMeta creationMetadata) { + final boolean entityCanSelfFundRenewal, @NonNull final ExpiryMeta creationMetadata) { return creationMetadata.hasFullAutoRenewSpec() || (!creationMetadata.hasExplicitExpiry() && entityCanSelfFundRenewal); } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/ingest/IngestChecker.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/ingest/IngestChecker.java index e67e81db74ba..fd5fb372ce9c 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/ingest/IngestChecker.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/ingest/IngestChecker.java @@ -16,35 +16,51 @@ package com.hedera.node.app.workflows.ingest; +import static com.hedera.hapi.node.base.ResponseCodeEnum.BUSY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_SIGNATURE; import static com.hedera.hapi.node.base.ResponseCodeEnum.PLATFORM_NOT_ACTIVE; +import static com.hedera.hapi.node.base.ResponseCodeEnum.UNAUTHORIZED; import static com.swirlds.common.system.status.PlatformStatus.ACTIVE; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.HederaFunctionality; -import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.hapi.node.base.Transaction; import com.hedera.hapi.node.base.TransactionID; -import com.hedera.node.app.signature.SignaturePreparer; +import com.hedera.node.app.info.CurrentPlatformStatus; +import com.hedera.node.app.service.token.ReadableAccountStore; +import com.hedera.node.app.signature.ExpandedSignaturePair; +import com.hedera.node.app.signature.SignatureExpander; +import com.hedera.node.app.signature.SignatureVerifier; import com.hedera.node.app.solvency.SolvencyPreCheck; -import com.hedera.node.app.spi.info.CurrentPlatformStatus; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.state.HederaState; import com.hedera.node.app.throttle.ThrottleAccumulator; import com.hedera.node.app.workflows.TransactionChecker; import com.hedera.node.app.workflows.TransactionInfo; +import com.hedera.node.app.workflows.dispatcher.ReadableStoreFactory; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.HashSet; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import javax.inject.Inject; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; /** * The {@code IngestChecker} contains checks that are specific to the ingest workflow */ -public class IngestChecker { +public final class IngestChecker { + private static final Logger logger = LogManager.getLogger(IngestChecker.class); + private final CurrentPlatformStatus currentPlatformStatus; private final TransactionChecker transactionChecker; private final ThrottleAccumulator throttleAccumulator; private final SolvencyPreCheck solvencyPreCheck; - private final SignaturePreparer signaturePreparer; + private final SignatureVerifier signatureVerifier; + private final SignatureExpander signatureExpander; /** * Constructor of the {@code IngestChecker} @@ -53,7 +69,8 @@ public class IngestChecker { * @param transactionChecker the {@link TransactionChecker} that pre-processes the bytes of a transaction * @param throttleAccumulator the {@link ThrottleAccumulator} for throttling * @param solvencyPreCheck the {@link SolvencyPreCheck} that checks payer balance - * @param signaturePreparer the {@link SignaturePreparer} that prepares signature data + * @param signatureExpander the {@link SignatureExpander} that expands signatures + * @param signatureVerifier the {@link SignatureVerifier} that verifies signature data * @throws NullPointerException if one of the arguments is {@code null} */ @Inject @@ -62,12 +79,14 @@ public IngestChecker( @NonNull final TransactionChecker transactionChecker, @NonNull final ThrottleAccumulator throttleAccumulator, @NonNull final SolvencyPreCheck solvencyPreCheck, - @NonNull final SignaturePreparer signaturePreparer) { + @NonNull final SignatureExpander signatureExpander, + @NonNull final SignatureVerifier signatureVerifier) { this.currentPlatformStatus = requireNonNull(currentPlatformStatus); this.transactionChecker = requireNonNull(transactionChecker); this.throttleAccumulator = requireNonNull(throttleAccumulator); this.solvencyPreCheck = solvencyPreCheck; - this.signaturePreparer = requireNonNull(signaturePreparer); + this.signatureVerifier = requireNonNull(signatureVerifier); + this.signatureExpander = requireNonNull(signatureExpander); } /** @@ -91,9 +110,9 @@ public void checkNodeState() throws PreCheckException { public TransactionInfo runAllChecks(@NonNull final HederaState state, @NonNull final Transaction tx) throws PreCheckException { // 1. Check the syntax - final var transactionInfo = transactionChecker.check(tx); - final var txBody = transactionInfo.txBody(); - final var functionality = transactionInfo.functionality(); + final var txInfo = transactionChecker.check(tx); + final var txBody = txInfo.txBody(); + final var functionality = txInfo.functionality(); // This should never happen, because HapiUtils#checkFunctionality() will throw // UnknownHederaFunctionality if it cannot map to a proper value, and WorkflowOnset @@ -104,21 +123,83 @@ public TransactionInfo runAllChecks(@NonNull final HederaState state, @NonNull f // TODO: Integrate solution from preHandle workflow once it is merged // 3. Check throttles - if (throttleAccumulator.shouldThrottle(transactionInfo.txBody())) { - throw new PreCheckException(ResponseCodeEnum.BUSY); + if (throttleAccumulator.shouldThrottle(txInfo.txBody())) { + throw new PreCheckException(BUSY); } // 4. Get payer account - final AccountID payerID = + final AccountID payerId = txBody.transactionIDOrElse(TransactionID.DEFAULT).accountIDOrElse(AccountID.DEFAULT); - solvencyPreCheck.checkPayerAccountStatus(state, payerID); + + solvencyPreCheck.checkPayerAccountStatus(state, payerId); // 5. Check account balance solvencyPreCheck.checkSolvencyOfVerifiedPayer(state, tx); // 6. Verify payer's signatures - signaturePreparer.syncGetPayerSigStatus(tx); + verifyPayerSignature(state, txInfo, payerId); + + return txInfo; + } + + private void verifyPayerSignature( + @NonNull final HederaState state, @NonNull final TransactionInfo txInfo, @NonNull final AccountID payerId) + throws PreCheckException { + + // Get the payer account + final var stores = new ReadableStoreFactory(state); + final var payerAccount = stores.getStore(ReadableAccountStore.class).getAccountById(payerId); + + // If there is no payer account for this ID, then the transaction is invalid + if (payerAccount == null) { + throw new PreCheckException(INVALID_ACCOUNT_ID); + } + + // There should, absolutely, be a key for this account. If there isn't, then something is wrong in + // state. So we will log this with a warning. We will also have to do something about the fact that + // the key is missing -- so we will fail with unauthorized. + if (!payerAccount.hasKey()) { + // FUTURE: Have an alert and metric in our monitoring tools to make sure we are aware if this happens + logger.warn("Payer account {} has no key, indicating a problem with state", payerId); + throw new PreCheckException(UNAUTHORIZED); + } + + // Expand the signatures + final var expandedSigs = new HashSet(); + signatureExpander.expand( + payerAccount.keyOrThrow(), txInfo.signatureMap().sigPairOrThrow(), expandedSigs); - return transactionInfo; + // Verify the signatures + final var results = signatureVerifier.verify(txInfo.signedBytes(), expandedSigs); + final var future = results.get(payerAccount.keyOrThrow()); + + // This can happen if the signature map was missing a signature for the payer account. + if (future == null) { + throw new PreCheckException(INVALID_SIGNATURE); + } + + // Wait for the verification to complete. We have a timeout here of 1 second, which is WAY more time + // than it should take (maybe three orders of magnitude more time). Even if this happens spuriously + // (like, for example, if there was a really long GC pause), the worst case is the client will get an + // internal error and retry. We just want to log it. + try { + final var verificationResult = future.get(1, TimeUnit.SECONDS); + if (!verificationResult.passed()) { + throw new PreCheckException(INVALID_SIGNATURE); + } + } catch (TimeoutException e) { + // FUTURE: Have an alert and metric in our monitoring tools to make sure we are aware if this happens + logger.warn("Signature verification timed out during ingest"); + throw new RuntimeException(e); + } catch (ExecutionException e) { + // FUTURE: Have an alert and metric in our monitoring tools to make sure we are aware if this happens + logger.warn("Signature verification failed during ingest", e); + throw new RuntimeException(e); + } catch (InterruptedException e) { + // This might not be a warn / error situation, if we were interrupted, it means that someone + // is trying to shut down the server. So we can just throw and get out of here. + Thread.currentThread().interrupt(); + throw new RuntimeException(e); + } } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/ingest/SubmissionManager.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/ingest/SubmissionManager.java index d83c6e27c0e9..3ffd2a027200 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/ingest/SubmissionManager.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/ingest/SubmissionManager.java @@ -21,11 +21,13 @@ import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.mono.context.properties.NodeLocalProperties; import com.hedera.node.app.service.mono.context.properties.Profile; import com.hedera.node.app.service.mono.pbj.PbjConverter; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.state.DeduplicationCache; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.HederaConfig; +import com.hedera.node.config.data.StatsConfig; import com.hedera.pbj.runtime.io.buffer.Bytes; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.metrics.SpeedometerMetric; @@ -85,23 +87,27 @@ public class SubmissionManager { * * @param platform the {@link Platform} to which transactions will be submitted * @param deduplicationCache used to prevent submission of duplicate transactions - * @param nodeLocalProperties the {@link NodeLocalProperties} that keep local properties + * @param configProvider the {@link ConfigProvider} * @param metrics metrics related to submissions */ @Inject public SubmissionManager( @NonNull final Platform platform, @NonNull final DeduplicationCache deduplicationCache, - @NonNull final NodeLocalProperties nodeLocalProperties, + @NonNull final ConfigProvider configProvider, @NonNull final Metrics metrics) { this.platform = requireNonNull(platform); this.submittedTxns = requireNonNull(deduplicationCache); - this.isProduction = requireNonNull(nodeLocalProperties).activeProfile() == Profile.PROD; + + final var hederaConfig = configProvider.getConfiguration().getConfigData(HederaConfig.class); + this.isProduction = Profile.valueOf(hederaConfig.activeProfile()) == Profile.PROD; + + final var statsConfig = configProvider.getConfiguration().getConfigData(StatsConfig.class); this.platformTxnRejections = metrics.getOrCreate(new SpeedometerMetric.Config("app", PLATFORM_TXN_REJECTIONS_NAME) .withDescription(PLATFORM_TXN_REJECTIONS_DESC) .withFormat(SPEEDOMETER_FORMAT) - .withHalfLife(nodeLocalProperties.statsSpeedometerHalfLifeSecs())); + .withHalfLife(statsConfig.speedometerHalfLifeSecs())); } /** diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleResult.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleResult.java index 0d3d6b14d22c..36e8f8b98a6a 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleResult.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleResult.java @@ -17,13 +17,12 @@ package com.hedera.node.app.workflows.prehandle; import static com.hedera.hapi.node.base.ResponseCodeEnum.UNKNOWN; -import static java.util.Collections.emptyList; +import static com.hedera.node.app.spi.signatures.SignatureVerification.failedVerification; import static java.util.Objects.requireNonNull; import static java.util.concurrent.CompletableFuture.completedFuture; import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Key; -import com.hedera.hapi.node.base.KeyList; import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.node.app.signature.SignatureVerificationFuture; import com.hedera.node.app.spi.signatures.SignatureVerification; @@ -31,8 +30,8 @@ import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.Future; /** @@ -44,10 +43,12 @@ * status is {@link Status#UNKNOWN_FAILURE}, then the payer will be null. If the status is * {@link Status#NODE_DUE_DILIGENCE_FAILURE}, then the payer will be the node account. In all other cases, * the payer is extracted from the transaction body. + * @param payerKey The cryptographic key of the payer. This will be {@code null} if the payer is {@code null}. * @param status {@link Status} of this pre-handle. Will always be set. * @param responseCode {@link ResponseCodeEnum} to the transaction as determined during pre-handle. Will always be set. * @param txInfo Information about the transaction that is being handled. If the transaction was not parseable, then * this will be null, and an appropriate error status will be set. + * @param requiredKeys The set of cryptographic keys that are required to be present. * @param verificationResults A map of {@link Future} yielding the * {@link SignatureVerificationFuture} for a given cryptographic key. Ony cryptographic keys * are used as the key of this map. @@ -60,6 +61,7 @@ public record PreHandleResult( @NonNull Status status, @NonNull ResponseCodeEnum responseCode, @Nullable TransactionInfo txInfo, + @Nullable Set requiredKeys, @Nullable Map verificationResults, @Nullable PreHandleResult innerResult, long configVersion) { @@ -93,86 +95,6 @@ public enum Status { requireNonNull(responseCode); } - /** - * Get a {@link Future} for the given key. - * - *

If the key is a cryptographic key (i.e. a basic key like ED25519 or ECDSA_SECP256K1), and the cryptographic - * key was in the signature map of the transaction, then a {@link Future} will be returned that will yield the - * {@link SignatureVerification} for that key. If there was no such cryptographic key in the signature map, then - * a completed, failed future is returned. - * - *

If the key is a key list, then a {@link Future} will be returned that aggregates the results of each key in - * the key list, possibly nested. - * - *

If the key is a threshold key, then a {@link Future} will be returned that aggregates the results of each key - * in the threshold key, possibly nested, based on the threshold for that key. - * - * @param key The key to check on the verification results for. - * @return A {@link Future} that will yield the {@link SignatureVerification} for the given key. - */ - @NonNull - public Future verificationFor(@NonNull final Key key) { - requireNonNull(key); - if (verificationResults == null) return failedVerificationFuture(key); - return switch (key.key().kind()) { - case ED25519, ECDSA_SECP256K1 -> { - final var result = verificationResults.get(key); - yield result == null ? failedVerificationFuture(key) : result; - } - case KEY_LIST -> { - final var keys = key.keyListOrThrow().keysOrElse(emptyList()); - yield verificationFor(key, keys, 0); - } - case THRESHOLD_KEY -> { - final var thresholdKey = key.thresholdKeyOrThrow(); - final var keyList = thresholdKey.keysOrElse(KeyList.DEFAULT); - final var keys = keyList.keysOrElse(emptyList()); - final var threshold = thresholdKey.threshold(); - final var clampedThreshold = Math.min(Math.max(1, threshold), keys.size()); - yield verificationFor(key, keys, keys.size() - clampedThreshold); - } - case CONTRACT_ID, DELEGATABLE_CONTRACT_ID, ECDSA_384, RSA_3072, UNSET -> failedVerificationFuture(key); - }; - } - - /** - * Utility method that converts the keys into a list of {@link Future} and then aggregates - * them into a single {@link Future}. - * - * @param key The key that is being verified. - * @param keys The sub-keys of the key being verified - * @param numCanFail The number of sub-keys that can fail verification before the key itself does - * @return A {@link Future} - */ - @NonNull - private Future verificationFor( - @NonNull final Key key, @NonNull final List keys, final int numCanFail) { - // If there are no keys, then we always fail. There must be at least one key in a key list or threshold key - // for it to be a valid key and to pass any form of verification. - if (keys.isEmpty() || numCanFail < 0) return failedVerificationFuture(key); - final var futures = keys.stream().map(this::verificationFor).toList(); - return new CompoundSignatureVerificationFuture(key, null, futures, numCanFail); - } - - /** - * Look for a {@link SignatureVerification} that applies to the given hollow account. - * @param evmAlias The evm alias to lookup verification for. - * @return The {@link SignatureVerification} for the given hollow account. - */ - @NonNull - public Future verificationFor(@NonNull final Bytes evmAlias) { - requireNonNull(evmAlias); - if (verificationResults != null && evmAlias.length() == 20) { - for (final var result : verificationResults.values()) { - final var account = result.evmAlias(); - if (account != null && evmAlias.matchesPrefix(account)) { - return result; - } - } - } - return failedVerificationFuture(evmAlias); - } - /** * Creates a new {@link PreHandleResult} in the event of a random failure that should not be automatically * charged to the node. Instead, during the handle phase, we will try again and charge the node if it fails again. @@ -183,13 +105,14 @@ public Future verificationFor(@NonNull final Bytes evmAli */ @NonNull public static PreHandleResult unknownFailure() { - return new PreHandleResult(null, null, Status.UNKNOWN_FAILURE, UNKNOWN, null, null, null, UNKNOWN_VERSION); + return new PreHandleResult( + null, null, Status.UNKNOWN_FAILURE, UNKNOWN, null, null, null, null, UNKNOWN_VERSION); } /** - * Creates a new {@link PreHandleResult} in the event of node due diligence failure. The node itself will be - * charged for the transaction. If the {@link TransactionInfo} is not available because the failure happened while - * parsing the bytes, then it may be omitted as {@code null}. The {@link #status()} will be set to + * Creates a new {@link PreHandleResult} in the event of node due diligence failure. The node itself will be charged + * for the transaction. If the {@link TransactionInfo} is not available because the failure happened while parsing + * the bytes, then it may be omitted as {@code null}. The {@link #status()} will be set to * {@link Status#NODE_DUE_DILIGENCE_FAILURE}. * * @param node The node that is responsible for paying for this due diligence failure. @@ -203,7 +126,7 @@ public static PreHandleResult nodeDueDiligenceFailure( @NonNull final ResponseCodeEnum responseCode, @Nullable final TransactionInfo txInfo) { return new PreHandleResult( - node, null, Status.NODE_DUE_DILIGENCE_FAILURE, responseCode, txInfo, null, null, UNKNOWN_VERSION); + node, null, Status.NODE_DUE_DILIGENCE_FAILURE, responseCode, txInfo, null, null, null, UNKNOWN_VERSION); } /** @@ -213,8 +136,8 @@ public static PreHandleResult nodeDueDiligenceFailure( * @param responseCode The responseCode code of the failure. * @param txInfo The transaction info * @param verificationResults A map of {@link Future} yielding the - * {@link SignatureVerificationFuture} for a given cryptographic key. Ony cryptographic keys - * are used as the key of this map. + * {@link SignatureVerificationFuture} for a given cryptographic key. Ony cryptographic keys are used as the key of + * this map. * @return A new {@link PreHandleResult} with the given parameters. */ @NonNull @@ -223,6 +146,7 @@ public static PreHandleResult preHandleFailure( @Nullable final Key payerKey, @NonNull final ResponseCodeEnum responseCode, @NonNull final TransactionInfo txInfo, + @Nullable Set requiredKeys, @Nullable Map verificationResults) { return new PreHandleResult( payer, @@ -230,6 +154,7 @@ public static PreHandleResult preHandleFailure( Status.PRE_HANDLE_FAILURE, responseCode, txInfo, + requiredKeys, verificationResults, null, UNKNOWN_VERSION); @@ -238,40 +163,12 @@ public static PreHandleResult preHandleFailure( /** Convenience method to create a SignatureVerification that failed */ @NonNull private static Future failedVerificationFuture(@NonNull final Key key) { - return completedFuture(new SignatureVerification() { - @NonNull - @Override - public Key key() { - return key; - } - - @Override - public boolean passed() { - return false; - } - }); + return completedFuture(failedVerification(key)); } /** Convenience method to create a SignatureVerification for a hollow account that failed */ @NonNull private static Future failedVerificationFuture(@NonNull final Bytes evmAlias) { - return completedFuture(new SignatureVerification() { - @Nullable - @Override - public Key key() { - return null; - } - - @NonNull - @Override - public Bytes evmAlias() { - return evmAlias; - } - - @Override - public boolean passed() { - return false; - } - }); + return completedFuture(failedVerification(evmAlias)); } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleWorkflowImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleWorkflowImpl.java index c48647e25221..933a65b349b7 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleWorkflowImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleWorkflowImpl.java @@ -48,6 +48,7 @@ import com.swirlds.common.system.transaction.Transaction; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.HashSet; +import java.util.Set; import java.util.stream.Stream; import javax.inject.Inject; import javax.inject.Singleton; @@ -169,7 +170,7 @@ public PreHandleResult preHandleTransaction( // If the payer account doesn't exist, then we cannot gather signatures for it, and will need to do // so later during the handle phase. Technically, we could still try to gather and verify the other // signatures, but that might be tricky and complicated with little gain. So just throw. - return preHandleFailure(creator, null, PAYER_ACCOUNT_NOT_FOUND, txInfo, null); + return preHandleFailure(creator, null, PAYER_ACCOUNT_NOT_FOUND, txInfo, null, null); } // Bootstrap the expanded signature pairs by grabbing all prefixes that are "full" keys already @@ -217,21 +218,27 @@ public PreHandleResult preHandleTransaction( // In that case, the payer will end up paying for the transaction. So we still need to do the signature // verifications that we have determined so far. final var results = signatureVerifier.verify(txInfo.signedBytes(), expanded); - return preHandleFailure(payer, payerKey, preCheck.responseCode(), txInfo, results); + return preHandleFailure(payer, payerKey, preCheck.responseCode(), txInfo, Set.of(), results); } // 5. Expand additional SignaturePairs based on gathered keys (we can safely ignore hollow accounts because we // already grabbed them when expanding the "full prefix" keys above) - final var nonPayerKeys = context.requiredNonPayerKeys(); - for (final var key : nonPayerKeys) { - signatureExpander.expand(key, originals, expanded); - } + signatureExpander.expand(context.requiredNonPayerKeys(), originals, expanded); + signatureExpander.expand(context.optionalNonPayerKeys(), originals, expanded); // 6. Submit the expanded SignaturePairs to the cryptography engine for verification final var results = signatureVerifier.verify(txInfo.signedBytes(), expanded); // 7. Create and return TransactionMetadata return new PreHandleResult( - payer, payerKey, SO_FAR_SO_GOOD, OK, txInfo, results, null, configuration.getVersion()); + payer, + payerKey, + SO_FAR_SO_GOOD, + OK, + txInfo, + context.requiredNonPayerKeys(), + results, + null, + configuration.getVersion()); } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryChecker.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryChecker.java index 9a1ab50d605a..c33f576866bd 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryChecker.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryChecker.java @@ -29,7 +29,6 @@ import com.hedera.node.app.fees.QueryFeeCheck; import com.hedera.node.app.service.token.impl.handlers.CryptoTransferHandler; import com.hedera.node.app.solvency.SolvencyPreCheck; -import com.hedera.node.app.spi.numbers.HederaAccountNumbers; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.workflows.TransactionInfo; import edu.umd.cs.findbugs.annotations.NonNull; @@ -41,7 +40,6 @@ @Singleton public class QueryChecker { - private final HederaAccountNumbers accountNumbers; private final QueryFeeCheck queryFeeCheck; private final Authorizer authorizer; private final CryptoTransferHandler cryptoTransferHandler; @@ -50,7 +48,6 @@ public class QueryChecker { /** * Constructor of {@code QueryChecker} * - * @param accountNumbers the {@link HederaAccountNumbers} that contains a list of special accounts * @param queryFeeCheck the {@link QueryFeeCheck} that checks if fees can be paid * @param authorizer the {@link Authorizer} that checks, if the caller is authorized * @param cryptoTransferHandler the {@link CryptoTransferHandler} that validates a contained @@ -60,12 +57,10 @@ public class QueryChecker { */ @Inject public QueryChecker( - @NonNull final HederaAccountNumbers accountNumbers, @NonNull final QueryFeeCheck queryFeeCheck, @NonNull final Authorizer authorizer, @NonNull final CryptoTransferHandler cryptoTransferHandler, @NonNull final SolvencyPreCheck solvencyPreCheck) { - this.accountNumbers = requireNonNull(accountNumbers); this.queryFeeCheck = requireNonNull(queryFeeCheck); this.authorizer = requireNonNull(authorizer); this.cryptoTransferHandler = requireNonNull(cryptoTransferHandler); @@ -111,7 +106,7 @@ public void validateAccountBalances( queryFeeCheck.validateQueryPaymentTransfers(txBody, fee); // A super-user cannot use an alias. Sorry, Clark Kent. - if (payer.hasAccountNum() && accountNumbers.isSuperuser(payer.accountNumOrThrow())) { + if (authorizer.isSuperUser(payer)) { return; } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryContextImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryContextImpl.java index cdd4cf14de7c..19854ad8f7bd 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryContextImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryContextImpl.java @@ -19,6 +19,7 @@ import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.transaction.Query; +import com.hedera.node.app.spi.records.RecordCache; import com.hedera.node.app.spi.workflows.QueryContext; import com.hedera.node.app.workflows.dispatcher.ReadableStoreFactory; import com.swirlds.config.api.Configuration; @@ -32,6 +33,7 @@ public class QueryContextImpl implements QueryContext { private final ReadableStoreFactory storeFactory; private final Query query; private final Configuration configuration; + private final RecordCache recordCache; /** * Constructor of {@code QueryContextImpl}. @@ -44,10 +46,12 @@ public class QueryContextImpl implements QueryContext { public QueryContextImpl( @NonNull final ReadableStoreFactory storeFactory, @NonNull final Query query, - @NonNull final Configuration configuration) { + @NonNull final Configuration configuration, + @NonNull final RecordCache recordCache) { this.storeFactory = requireNonNull(storeFactory, "The supplied argument 'storeFactory' cannot be null!"); this.query = requireNonNull(query, "The supplied argument 'query' cannot be null!"); this.configuration = requireNonNull(configuration, "The supplied argument 'configuration' cannot be null!"); + this.recordCache = requireNonNull(recordCache, "The supplied argument 'recordCache' cannot be null!"); } @Override @@ -67,4 +71,10 @@ public C createStore(@NonNull Class storeInterface) { public Configuration configuration() { return configuration; } + + @NonNull + @Override + public RecordCache recordCache() { + return recordCache; + } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryWorkflowImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryWorkflowImpl.java index 34eb56c48298..ca46be958483 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryWorkflowImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryWorkflowImpl.java @@ -38,6 +38,7 @@ import com.hedera.node.app.service.mono.pbj.PbjConverter; import com.hedera.node.app.spi.HapiUtils; import com.hedera.node.app.spi.UnknownHederaFunctionality; +import com.hedera.node.app.spi.records.RecordCache; import com.hedera.node.app.spi.workflows.InsufficientBalanceException; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.state.HederaState; @@ -84,6 +85,7 @@ public final class QueryWorkflowImpl implements QueryWorkflow { private final FeeAccumulator feeAccumulator; private final Codec queryParser; private final ConfigProvider configProvider; + private final RecordCache recordCache; /** * Constructor of {@code QueryWorkflowImpl} @@ -107,7 +109,8 @@ public QueryWorkflowImpl( @NonNull final QueryDispatcher dispatcher, @NonNull final FeeAccumulator feeAccumulator, @NonNull final Codec queryParser, - @NonNull final ConfigProvider configProvider) { + @NonNull final ConfigProvider configProvider, + @NonNull final RecordCache recordCache) { this.stateAccessor = requireNonNull(stateAccessor); this.throttleAccumulator = requireNonNull(throttleAccumulator); this.submissionManager = requireNonNull(submissionManager); @@ -117,6 +120,7 @@ public QueryWorkflowImpl( this.feeAccumulator = requireNonNull(feeAccumulator); this.queryParser = requireNonNull(queryParser); this.configProvider = requireNonNull(configProvider); + this.recordCache = requireNonNull(recordCache); } @Override @@ -190,7 +194,8 @@ public void handleQuery(@NonNull final Bytes requestBuffer, @NonNull final Buffe } // 5. Check validity of query - final var context = new QueryContextImpl(storeFactory, query, configProvider.getConfiguration()); + final var context = + new QueryContextImpl(storeFactory, query, configProvider.getConfiguration(), recordCache); handler.validate(context); if (handler.needsAnswerOnlyCost(responseType)) { diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryWorkflowInjectionModule.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryWorkflowInjectionModule.java index 8d46a6c311b4..41cc27a059b8 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryWorkflowInjectionModule.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryWorkflowInjectionModule.java @@ -19,8 +19,6 @@ import com.hedera.hapi.node.base.ResponseType; import com.hedera.hapi.node.transaction.Query; import com.hedera.node.app.components.QueryInjectionComponent; -import com.hedera.node.app.fees.FeeAccumulator; -import com.hedera.node.app.fees.MonoFeeAccumulator; import com.hedera.node.app.service.consensus.impl.handlers.ConsensusHandlers; import com.hedera.node.app.service.contract.impl.handlers.ContractHandlers; import com.hedera.node.app.service.file.impl.handlers.FileHandlers; @@ -47,10 +45,6 @@ public interface QueryWorkflowInjectionModule { @Singleton QueryWorkflow bindQueryWorkflow(QueryWorkflowImpl queryWorkflow); - @Binds - @Singleton - FeeAccumulator bindFeeAccumulator(MonoFeeAccumulator feeAccumulator); - @Provides @Singleton @SuppressWarnings({"unchecked", "rawtypes"}) diff --git a/hedera-node/hedera-app/src/main/java/module-info.java b/hedera-node/hedera-app/src/main/java/module-info.java index 23d196bacf0f..bebb40c13855 100644 --- a/hedera-node/hedera-app/src/main/java/module-info.java +++ b/hedera-node/hedera-app/src/main/java/module-info.java @@ -73,4 +73,11 @@ exports com.hedera.node.app.workflows.handle.validation; exports com.hedera.node.app.state.recordcache to com.swirlds.common; + exports com.hedera.node.app.info to + com.hedera.node.app.test.fixtures, + com.swirlds.common; + exports com.hedera.node.app.services to + com.swirlds.platform; + exports com.hedera.node.app.signature to + com.hedera.node.app.test.fixtures; } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/AppTestBase.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/AppTestBase.java index a98c8a11e91f..44a4f714e901 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/AppTestBase.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/AppTestBase.java @@ -73,13 +73,13 @@ protected void setupStandardStates() { @NonNull @Override public ReadableStates createReadableStates(@NonNull String serviceName) { - return serviceName == TokenService.NAME ? writableStates : null; + return TokenService.NAME.equals(serviceName) ? writableStates : null; } @NonNull @Override public WritableStates createWritableStates(@NonNull String serviceName) { - return serviceName == TokenService.NAME ? writableStates : null; + return TokenService.NAME.equals(serviceName) ? writableStates : null; } }; } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/authorization/AuthorizerTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/authorization/AuthorizerTest.java index 2b29f83169fc..3503c8c21a05 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/authorization/AuthorizerTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/authorization/AuthorizerTest.java @@ -17,29 +17,34 @@ package com.hedera.node.app.authorization; import static com.hedera.hapi.node.base.HederaFunctionality.CONSENSUS_CREATE_TOPIC; -import static com.hedera.hapi.node.base.ResponseCodeEnum.AUTHORIZATION_FAILED; -import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.BDDMockito.given; -import static org.mockito.Mockito.mock; import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.HederaFunctionality; -import com.hedera.node.app.service.mono.context.domain.security.HapiOpPermissions; +import com.hedera.node.app.config.VersionedConfigImpl; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.AccountsConfig; +import com.hedera.node.config.data.ApiPermissionConfig; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; final class AuthorizerTest { - private HapiOpPermissions hapiOpPermissions; + private ConfigProvider configProvider; private AccountID accountID; private HederaFunctionality hapiFunction; @BeforeEach void setUp() { - hapiOpPermissions = mock(HapiOpPermissions.class); + configProvider = () -> new VersionedConfigImpl( + HederaTestConfigBuilder.create(false) + .withConfigDataType(ApiPermissionConfig.class) + .withConfigDataType(AccountsConfig.class) + .getOrCreateConfig(), + 1); + accountID = AccountID.newBuilder().build(); hapiFunction = CONSENSUS_CREATE_TOPIC; } @@ -48,7 +53,7 @@ void setUp() { @DisplayName("Account ID is null throws") void accountIdIsNullThrows() { // given: - final var authorizer = new AuthorizerImpl(hapiOpPermissions); + final var authorizer = new AuthorizerImpl(configProvider); // expect: //noinspection DataFlowIssue @@ -59,7 +64,7 @@ void accountIdIsNullThrows() { @DisplayName("Hapi function is null throws") void hapiFunctionIsNullThrows() { // given: - final var authorizer = new AuthorizerImpl(hapiOpPermissions); + final var authorizer = new AuthorizerImpl(configProvider); // expect: //noinspection DataFlowIssue @@ -70,8 +75,16 @@ void hapiFunctionIsNullThrows() { @DisplayName("Account is not permitted") void accountIsNotPermitted() { // given: - final var authorizer = new AuthorizerImpl(hapiOpPermissions); - given(hapiOpPermissions.permissibilityOf2(any(), any())).willReturn(AUTHORIZATION_FAILED); + configProvider = () -> new VersionedConfigImpl( + HederaTestConfigBuilder.create(false) + .withConfigDataType(ApiPermissionConfig.class) + .withConfigDataType(AccountsConfig.class) + .withValue("createTopic", "1-1000") + .getOrCreateConfig(), + 1); + + final var authorizer = new AuthorizerImpl(configProvider); + accountID = AccountID.newBuilder().accountNum(1234L).build(); // expect: final var authorized = authorizer.isAuthorized(accountID, hapiFunction); @@ -82,8 +95,16 @@ void accountIsNotPermitted() { @DisplayName("Account is permitted") void accountIsPermitted() { // given: - final var authorizer = new AuthorizerImpl(hapiOpPermissions); - given(hapiOpPermissions.permissibilityOf2(any(), any())).willReturn(OK); + configProvider = () -> new VersionedConfigImpl( + HederaTestConfigBuilder.create(false) + .withConfigDataType(ApiPermissionConfig.class) + .withConfigDataType(AccountsConfig.class) + .withValue("createTopic", "1-1234") + .getOrCreateConfig(), + 1); + + final var authorizer = new AuthorizerImpl(configProvider); + accountID = AccountID.newBuilder().accountNum(1234L).build(); // expect: final var authorized = authorizer.isAuthorized(accountID, hapiFunction); diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/components/IngestComponentTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/components/IngestComponentTest.java index 3e15297fc742..9818e71b05e3 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/components/IngestComponentTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/components/IngestComponentTest.java @@ -24,6 +24,7 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.node.app.DaggerHederaInjectionComponent; import com.hedera.node.app.HederaInjectionComponent; +import com.hedera.node.app.config.ConfigProviderImpl; import com.hedera.node.app.service.mono.context.properties.BootstrapProperties; import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import com.swirlds.common.context.PlatformContext; @@ -33,8 +34,9 @@ import com.swirlds.common.system.InitTrigger; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.Platform; +import com.swirlds.common.system.status.PlatformStatus; import com.swirlds.config.api.Configuration; -import com.swirlds.platform.gui.SwirldsGui; +import java.time.InstantSource; import java.util.Set; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -59,22 +61,21 @@ void setUp() { when(platformContext.getConfiguration()).thenReturn(configuration); when(platform.getContext()).thenReturn(platformContext); - given(platformContext.getCryptography()).willReturn(cryptography); - final var selfNodeId = new NodeId(666L); app = DaggerHederaInjectionComponent.builder() .initTrigger(InitTrigger.GENESIS) .platform(platform) .crypto(CryptographyHolder.get()) - .consoleCreator(SwirldsGui::createConsole) .staticAccountMemo("memo") .bootstrapProps(new BootstrapProperties()) + .configuration(new ConfigProviderImpl(false)) .selfId(AccountID.newBuilder().accountNum(selfNodeId.id() + 3).build()) .initialHash(new Hash()) .maxSignedTxnSize(1024) - .genesisUsage(false) + .currentPlatformStatus(() -> PlatformStatus.ACTIVE) .servicesRegistry(Set::of) + .instantSource(InstantSource.system()) .build(); } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/config/ConfigProviderImplTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/config/ConfigProviderImplTest.java index 182c63ed4291..1047c099976b 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/config/ConfigProviderImplTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/config/ConfigProviderImplTest.java @@ -32,12 +32,6 @@ @ExtendWith(SystemStubsExtension.class) class ConfigProviderImplTest { - @Test - void testNullConfig() { - // then - assertThatThrownBy(() -> new ConfigProviderImpl(null)).isInstanceOf(NullPointerException.class); - } - @ParameterizedTest @ValueSource(booleans = {true, false}) void testInitialConfig(final boolean isGenesis) { diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/fees/MonoFeeAccumulatorTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/fees/MonoFeeAccumulatorTest.java deleted file mode 100644 index 1590a0cad48b..000000000000 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/fees/MonoFeeAccumulatorTest.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.fees; - -import static com.hederahashgraph.api.proto.java.HederaFunctionality.ConsensusGetTopicInfo; -import static org.junit.jupiter.api.Assertions.assertSame; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.BDDMockito.given; - -import com.hedera.hapi.node.base.HederaFunctionality; -import com.hedera.hapi.node.base.Timestamp; -import com.hedera.hapi.node.transaction.Query; -import com.hedera.node.app.hapi.utils.fee.FeeObject; -import com.hedera.node.app.service.consensus.ReadableTopicStore; -import com.hedera.node.app.service.mono.context.primitives.StateView; -import com.hedera.node.app.service.mono.fees.calculation.UsageBasedFeeCalculator; -import com.hedera.node.app.service.mono.fees.calculation.UsagePricesProvider; -import com.hedera.node.app.service.mono.pbj.PbjConverter; -import com.hedera.node.app.workflows.dispatcher.ReadableStoreFactory; -import com.hederahashgraph.api.proto.java.FeeData; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class MonoFeeAccumulatorTest { - private final Query mockQuery = Query.newBuilder().build(); - private final FeeData mockUsage = FeeData.getDefaultInstance(); - private final FeeData mockPrices = FeeData.getDefaultInstance(); - private final com.hederahashgraph.api.proto.java.Timestamp mockTime = - PbjConverter.fromPbj(Timestamp.newBuilder().seconds(1_234_567L).build()); - - @Mock - private UsageBasedFeeCalculator usageBasedFeeCalculator; - - @Mock - private UsagePricesProvider usagePricesProvider; - - @Mock - private StateView stateView; - - @Mock - private MonoGetTopicInfoUsage getTopicInfoUsage; - - @Mock - private ReadableStoreFactory readableStoreFactory; - - @Mock - private ReadableTopicStore readableTopicStore; - - private MonoFeeAccumulator subject; - - @BeforeEach - void setUp() { - subject = new MonoFeeAccumulator( - usageBasedFeeCalculator, getTopicInfoUsage, usagePricesProvider, () -> stateView); - } - - @Test - void usesMonoAdapterDirectlyForGetTopicInfo() { - final var expectedFees = new FeeObject(100L, 0L, 100L); - given(usagePricesProvider.defaultPricesGiven(ConsensusGetTopicInfo, mockTime)) - .willReturn(mockPrices); - given(readableStoreFactory.getStore(ReadableTopicStore.class)).willReturn(readableTopicStore); - given(getTopicInfoUsage.computeUsage(PbjConverter.fromPbj(mockQuery), readableTopicStore)) - .willReturn(mockUsage); - given(usageBasedFeeCalculator.computeFromQueryResourceUsage(mockUsage, mockUsage, mockTime)) - .willReturn(expectedFees); - - final var actualFees = subject.computePayment( - readableStoreFactory, - HederaFunctionality.CONSENSUS_GET_TOPIC_INFO, - mockQuery, - PbjConverter.toPbj(mockTime)); - - assertSame(expectedFees, actualFees); - } - - @Test - void delegatedComputePaymentForQuery() { - final var queryFunction = HederaFunctionality.CONSENSUS_GET_TOPIC_INFO; - final var expectedFee = new FeeObject(100L, 0L, 100L); - - given(usagePricesProvider.defaultPricesGiven(eq(ConsensusGetTopicInfo), any())) - .willReturn(mockPrices); - given(readableStoreFactory.getStore(ReadableTopicStore.class)).willReturn(readableTopicStore); - given(getTopicInfoUsage.computeUsage(any(), eq(readableTopicStore))).willReturn(mockUsage); - given(usageBasedFeeCalculator.computeFromQueryResourceUsage(eq(mockUsage), eq(mockPrices), any())) - .willReturn(expectedFee); - - final var fee = - subject.computePayment(readableStoreFactory, queryFunction, mockQuery, PbjConverter.toPbj(mockTime)); - - assertSame(expectedFee, fee); - } -} diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/fees/MonoGetTopicInfoUsageTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/fees/MonoGetTopicInfoUsageTest.java deleted file mode 100644 index 556bcbd9c18b..000000000000 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/fees/MonoGetTopicInfoUsageTest.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.fees; - -import static com.hedera.node.app.service.mono.Utils.asHederaKey; -import static com.hedera.node.app.service.mono.pbj.PbjConverter.toProtoQuery; -import static com.hedera.test.utils.IdUtils.asAccount; -import static com.hedera.test.utils.KeyUtils.A_COMPLEX_KEY; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.BDDMockito.given; - -import com.hedera.hapi.node.base.QueryHeader; -import com.hedera.hapi.node.base.TopicID; -import com.hedera.hapi.node.consensus.ConsensusGetTopicInfoQuery; -import com.hedera.hapi.node.state.consensus.Topic; -import com.hedera.node.app.service.consensus.ReadableTopicStore; -import com.hedera.node.app.service.mono.fees.calculation.consensus.queries.GetTopicInfoResourceUsage; -import com.hedera.node.app.service.mono.legacy.core.jproto.JKey; -import com.hedera.node.app.service.mono.pbj.PbjConverter; -import com.hedera.node.app.service.mono.state.merkle.MerkleTopic; -import com.hedera.node.app.service.mono.state.submerkle.EntityId; -import com.hedera.node.app.service.mono.state.submerkle.RichInstant; -import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.pbj.runtime.io.buffer.Bytes; -import com.hederahashgraph.api.proto.java.AccountID; -import com.hederahashgraph.api.proto.java.FeeData; -import com.hederahashgraph.api.proto.java.Key; -import com.hederahashgraph.api.proto.java.ResponseType; -import java.util.Optional; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class MonoGetTopicInfoUsageTest { - @Mock - private GetTopicInfoResourceUsage delegate; - - @Mock - private ReadableTopicStore topicStore; - - private final FeeData mockUsage = FeeData.getDefaultInstance(); - private final AccountID autoRenewId = asAccount("0.0.4"); - private final byte[] runningHash = "runningHash".getBytes(); - - private final Key key = PbjConverter.fromPbj(A_COMPLEX_KEY); - private final EntityNum topicEntityNum = EntityNum.fromLong(1L); - private final TopicID topicId = - TopicID.newBuilder().topicNum(topicEntityNum.longValue()).build(); - private final String memo = "test memo"; - private final long expirationTime = 1_234_567L; - private final long sequenceNumber = 1L; - private final long autoRenewSecs = 100L; - private final boolean deleted = true; - - private final Topic topic = new Topic( - topicId, - sequenceNumber, - expirationTime, - autoRenewSecs, - autoRenewId.getAccountNum(), - deleted, - Bytes.wrap(runningHash), - memo, - PbjConverter.fromGrpcKey(key), - PbjConverter.fromGrpcKey(key)); - - private final MerkleTopic adapterTopic = new MerkleTopic( - memo, - (JKey) asHederaKey(key).orElseThrow(), - (JKey) asHederaKey(key).orElseThrow(), - autoRenewSecs, - new EntityId(0, 0, autoRenewId.getAccountNum()), - new RichInstant(expirationTime, 0)); - - { - adapterTopic.setRunningHash(runningHash); - adapterTopic.setSequenceNumber(sequenceNumber); - adapterTopic.setDeleted(deleted); - } - - private MonoGetTopicInfoUsage subject; - - @BeforeEach - void setUp() { - subject = new MonoGetTopicInfoUsage(delegate); - } - - @Test - void usesDelegateWithAdaptedMerkleTopic() { - final var query = com.hedera.hapi.node.transaction.Query.newBuilder() - .consensusGetTopicInfo(ConsensusGetTopicInfoQuery.newBuilder() - .header(QueryHeader.newBuilder() - .responseType(com.hedera.hapi.node.base.ResponseType.ANSWER_STATE_PROOF)) - .topicID(topicId)) - .build(); - given(topicStore.getTopicLeaf(topicId)).willReturn(Optional.of(topic)); - given(delegate.usageGivenTypeAndTopic(adapterTopic, ResponseType.ANSWER_STATE_PROOF)) - .willReturn(mockUsage); - - final var usage = subject.computeUsage(toProtoQuery(query), topicStore); - - assertSame(mockUsage, usage); - } -} diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/signature/impl/SignatureVerifierImplTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/signature/impl/SignatureVerifierImplTest.java index 7a88a20ab2d1..013486969fb0 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/signature/impl/SignatureVerifierImplTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/signature/impl/SignatureVerifierImplTest.java @@ -16,6 +16,9 @@ package com.hedera.node.app.signature.impl; +import static com.hedera.node.app.fixtures.signature.ExpandedSignaturePairFactory.ecdsaPair; +import static com.hedera.node.app.fixtures.signature.ExpandedSignaturePairFactory.ed25519Pair; +import static com.hedera.node.app.fixtures.signature.ExpandedSignaturePairFactory.hollowPair; import static java.util.Collections.emptySet; import static java.util.concurrent.CompletableFuture.completedFuture; import static org.assertj.core.api.Assertions.assertThat; @@ -24,9 +27,6 @@ import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doNothing; -import com.hedera.hapi.node.base.Key; -import com.hedera.hapi.node.base.SignaturePair; -import com.hedera.hapi.node.state.token.Account; import com.hedera.node.app.AppTestBase; import com.hedera.node.app.service.mono.sigs.utils.MiscCryptoUtils; import com.hedera.node.app.signature.ExpandedSignaturePair; @@ -36,7 +36,6 @@ import com.swirlds.common.crypto.Cryptography; import com.swirlds.common.crypto.TransactionSignature; import com.swirlds.common.crypto.VerificationStatus; -import edu.umd.cs.findbugs.annotations.NonNull; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; @@ -181,39 +180,4 @@ void cryptoEngineIsGivenAllTheData() { .isTrue(); } } - - /** Simple utility to create an ECDSA_SECP256K1 expanded signature */ - private ExpandedSignaturePair ecdsaPair(final Key key) { - final var compressed = key.ecdsaSecp256k1OrThrow(); - final var array = new byte[(int) compressed.length()]; - compressed.getBytes(0, array); - final var decompressed = MiscCryptoUtils.decompressSecp256k1(array); - final var sigPair = SignaturePair.newBuilder() - .pubKeyPrefix(key.ecdsaSecp256k1OrThrow()) - .ecdsaSecp256k1(key.ecdsaSecp256k1OrThrow()) - .build(); - return new ExpandedSignaturePair(key, Bytes.wrap(decompressed), null, sigPair); - } - - /** Simple utility to create an ED25519 expanded signature */ - private ExpandedSignaturePair ed25519Pair(final Key key) { - final var sigPair = SignaturePair.newBuilder() - .pubKeyPrefix(key.ed25519OrThrow()) - .ed25519(key.ed25519OrThrow()) - .build(); - return new ExpandedSignaturePair(key, key.ed25519OrThrow(), null, sigPair); - } - - /** Simple utility to create an ECDSA_SECP256K1 hollow account based expanded signature */ - private ExpandedSignaturePair hollowPair(final Key key, @NonNull final Account hollowAccount) { - final var compressed = key.ecdsaSecp256k1OrThrow(); - final var array = new byte[(int) compressed.length()]; - compressed.getBytes(0, array); - final var decompressed = MiscCryptoUtils.decompressSecp256k1(array); - final var sigPair = SignaturePair.newBuilder() - .pubKeyPrefix(key.ecdsaSecp256k1OrThrow()) - .ecdsaSecp256k1(key.ecdsaSecp256k1OrThrow()) - .build(); - return new ExpandedSignaturePair(key, Bytes.wrap(decompressed), hollowAccount.alias(), sigPair); - } } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/ResponsibleVMapUser.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/ResponsibleVMapUser.java deleted file mode 100644 index bc8824f3f4a2..000000000000 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/ResponsibleVMapUser.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.state.merkle; - -import com.hedera.node.app.service.mono.state.adapters.VirtualMapLike; -import com.swirlds.virtualmap.VirtualKey; -import com.swirlds.virtualmap.VirtualMap; -import com.swirlds.virtualmap.VirtualValue; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; -import org.junit.jupiter.api.AfterEach; - -public abstract class ResponsibleVMapUser { - private static final AtomicInteger numReleased = new AtomicInteger(); - private final List statesToRelease = new ArrayList<>(); - private final List> mapsToRelease = new ArrayList<>(); - - protected VirtualMap trackedMap( - @Nullable final VirtualMap map) { - if (map != null) { - mapsToRelease.add(map); - } - return map; - } - - protected MerkleHederaState tracked(@Nullable final MerkleHederaState state) { - if (state != null) { - statesToRelease.add(state); - } - - return state; - } - - @AfterEach - @SuppressWarnings({"unchecked", "rawtypes"}) - void releaseTracked() throws IOException { - for (final var map : mapsToRelease) { - release(VirtualMapLike.from((VirtualMap) map)); - } - for (final var state : statesToRelease) { - release(state); - } - } - - private void release(@NonNull final MerkleHederaState state) throws IOException { - release(state.getStateChildrenProvider(null).storage()); - release(state.getStateChildrenProvider(null).contractStorage()); - - final var accounts = state.getStateChildrenProvider(null).accounts(); - if (accounts != null && accounts.areOnDisk()) { - release(accounts.getOnDiskAccounts()); - } - final var tokenRels = state.getStateChildrenProvider(null).tokenAssociations(); - if (tokenRels != null && tokenRels.areOnDisk()) { - release(tokenRels.getOnDiskRels()); - } - final var nfts = state.getStateChildrenProvider(null).uniqueTokens(); - if (nfts != null && nfts.isVirtual()) { - release(nfts.getOnDiskNfts()); - } - } - - private void release(@Nullable final VirtualMapLike map) throws IOException { - if (map != null) { - if (map.toString().contains("Mock")) { - System.out.println("Skipping mock " + map); - return; - } - map.release(); - System.out.println("Released #" + numReleased.incrementAndGet()); - try { - map.getDataSource().close(); - } catch (final NullPointerException ignore) { - // A few tests use the VirtualMap default constructor, which doesn't initialize root - } - } - } -} diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/StateLifecyclesTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/StateLifecyclesTest.java deleted file mode 100644 index e9fd2e9df9eb..000000000000 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/StateLifecyclesTest.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.state.merkle; - -import static com.hedera.node.app.Hedera.MAX_SIGNED_TXN_SIZE; -import static com.hedera.node.app.service.mono.ServicesState.EMPTY_HASH; -import static com.hedera.node.app.service.mono.context.AppsManager.APPS; -import static com.swirlds.common.threading.manager.AdHocThreadManager.getStaticThreadManager; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.BDDMockito.given; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.google.common.primitives.Ints; -import com.google.common.primitives.Longs; -import com.hedera.hapi.node.base.AccountID; -import com.hedera.node.app.DaggerHederaInjectionComponent; -import com.hedera.node.app.HederaInjectionComponent; -import com.hedera.node.app.service.mono.context.properties.BootstrapProperties; -import com.hedera.node.app.service.mono.state.migration.StateChildIndices; -import com.hedera.node.app.service.mono.stream.RecordsRunningHashLeaf; -import com.swirlds.common.config.singleton.ConfigurationHolder; -import com.swirlds.common.context.DefaultPlatformContext; -import com.swirlds.common.context.PlatformContext; -import com.swirlds.common.crypto.CryptographyHolder; -import com.swirlds.common.crypto.Hash; -import com.swirlds.common.crypto.RunningHash; -import com.swirlds.common.crypto.SerializablePublicKey; -import com.swirlds.common.crypto.engine.CryptoEngine; -import com.swirlds.common.metrics.noop.NoOpMetrics; -import com.swirlds.common.system.InitTrigger; -import com.swirlds.common.system.NodeId; -import com.swirlds.common.system.Platform; -import com.swirlds.common.system.address.Address; -import com.swirlds.common.system.address.AddressBook; -import com.swirlds.platform.state.DualStateImpl; -import com.swirlds.platform.state.signed.SignedState; -import com.swirlds.platform.state.signed.SignedStateFileReader; -import java.io.IOException; -import java.nio.file.Paths; -import java.security.PublicKey; -import java.util.List; -import java.util.concurrent.atomic.AtomicReference; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class StateLifecyclesTest extends ResponsibleVMapUser { - private final String signedStateDir = "src/test/resources/signedState/"; - - @Mock - private AddressBook addressBook; - - @Test - void noOpConstructorExists() { - assertDoesNotThrow(() -> new MerkleHederaState()); - } - - @Test - @Disabled("Apparently this test is no longer testing anything meaningful.") - void testGenesisState() { - ClassLoaderHelper.loadClassPathDependencies(); - - final var merkleState = tracked(new MerkleHederaState((t, s) -> {}, (e, m, p) -> {}, (s, p, ds, t, v) -> {})); - - final var platform = createMockPlatformWithCrypto(); - final var recordsRunningHashLeaf = new RecordsRunningHashLeaf(); - recordsRunningHashLeaf.setRunningHash(new RunningHash(EMPTY_HASH)); - merkleState.setChild(StateChildIndices.RECORD_STREAM_RUNNING_HASH, recordsRunningHashLeaf); - final var app = createApp(platform); - - APPS.save(platform.getSelfId(), app); - - assertDoesNotThrow(() -> merkleState.init(platform, new DualStateImpl(), InitTrigger.GENESIS, null)); - } - - @Test - void testLoadingMHState() { - ClassLoaderHelper.loadClassPathDependencies(); - final AtomicReference ref = new AtomicReference<>(); - assertThrows( - com.swirlds.common.io.exceptions.ClassNotFoundException.class, - () -> ref.set(loadSignedState(signedStateDir + "MHS/SignedState.swh"))); - - // TODO - continue as below after fixing ClassNotFoundException, which - // should satisfy Sonar's coverage requirements for MerkleHederaState - /* - final var mockPlatform = createMockPlatformWithCrypto(); - given(mockPlatform.getAddressBook()).willReturn(addressBook); - final var mhs = (MerkleHederaState) ref.get().getSwirldState(); - tracked(mhs).init(mockPlatform, new DualStateImpl(), RESTART, forHapiAndHedera("0.30.0", "0.30.5")); - */ - } - - private Platform createMockPlatformWithCrypto() { - final var platform = mock(Platform.class); - when(platform.getSelfId()).thenReturn(new NodeId(0)); - when(platform.getContext().getCryptography()) - .thenReturn(new CryptoEngine(getStaticThreadManager(), CryptoConfigUtils.MINIMAL_CRYPTO_CONFIG)); - assertNotNull(platform.getContext().getCryptography()); - return platform; - } - - private AddressBook createPretendBookFrom(final Platform platform, final boolean withKeyDetails) { - final var pubKey = mock(PublicKey.class); - given(pubKey.getAlgorithm()).willReturn("EC"); - if (withKeyDetails) { - given(pubKey.getEncoded()).willReturn(Longs.toByteArray(Long.MAX_VALUE)); - } - final var node = platform.getSelfId(); - final var address = new Address( - node, - "", - "", - 1L, - false, - null, - -1, - Ints.toByteArray(123456789), - -1, - null, - -1, - null, - -1, - new SerializablePublicKey(pubKey), - null, - new SerializablePublicKey(pubKey), - ""); - return new AddressBook(List.of(address)); - } - - private static HederaInjectionComponent createApp(final Platform platform) { - return DaggerHederaInjectionComponent.builder() - .initTrigger(InitTrigger.GENESIS) - .initialHash(new Hash()) - .platform(platform) - .crypto(CryptographyHolder.get()) - .consoleCreator((ignore, visible) -> null) - .selfId(AccountID.newBuilder() - .accountNum(platform.getSelfId().id() + 3) - .build()) - .staticAccountMemo("memo") - .maxSignedTxnSize(MAX_SIGNED_TXN_SIZE) - .bootstrapProps(new BootstrapProperties()) - .build(); - } - - private static SignedState loadSignedState(final String path) throws IOException { - final PlatformContext platformContext = new DefaultPlatformContext( - ConfigurationHolder.getInstance().get(), new NoOpMetrics(), CryptographyHolder.get()); - final var signedPair = SignedStateFileReader.readStateFile(platformContext, Paths.get(path)); - // Because it's possible we are loading old data, we cannot check equivalence of the hash. - try (var reservedSignedState = signedPair.reservedSignedState()) { - Assertions.assertNotNull(reservedSignedState.get()); - return reservedSignedState.get(); - } - } -} diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/recordcache/RecordCacheServiceTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/recordcache/RecordCacheServiceTest.java index 8cb18e51f9a6..0ee4eb8865f5 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/recordcache/RecordCacheServiceTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/recordcache/RecordCacheServiceTest.java @@ -46,8 +46,7 @@ void schema(@Mock final SchemaRegistry registry) { svc.registerSchemas(registry); verify(registry).register(captor.capture()); final var schema = captor.getValue(); - assertThat(schema.getVersion()) - .isEqualTo(SemanticVersion.newBuilder().minor(38).build()); + assertThat(schema.getVersion()).isEqualTo(SemanticVersion.newBuilder().build()); assertThat(schema.statesToCreate()).hasSize(1); } } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/TransactionCheckerTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/TransactionCheckerTest.java index 9997cc31be62..8920c8cd02bb 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/TransactionCheckerTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/TransactionCheckerTest.java @@ -38,9 +38,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mockStatic; -import static org.mockito.Mockito.when; import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Duration; @@ -55,10 +53,13 @@ import com.hedera.hapi.node.transaction.SignedTransaction; import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.AppTestBase; -import com.hedera.node.app.service.mono.context.properties.GlobalDynamicProperties; +import com.hedera.node.app.config.VersionedConfigImpl; import com.hedera.node.app.spi.HapiUtils; import com.hedera.node.app.spi.UnknownHederaFunctionality; import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.HederaConfig; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import com.hedera.pbj.runtime.Codec; import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; @@ -87,7 +88,7 @@ final class TransactionCheckerTest extends AppTestBase { /** The standard {@link TransactionBody#transactionValidDuration()} for most tests */ private static final Duration ONE_MINUTE = Duration.newBuilder().seconds(60).build(); - private GlobalDynamicProperties props; + private ConfigProvider props; private Transaction tx; private SignatureMap signatureMap; @@ -165,11 +166,15 @@ void setup() { inputBuffer = Bytes.wrap(asByteArray(tx)); // Set up the properties - props = mock(GlobalDynamicProperties.class); - when(props.maxMemoUtf8Bytes()).thenReturn(MAX_MEMO_SIZE); - when(props.minTxnDuration()).thenReturn(MIN_DURATION); - when(props.maxTxnDuration()).thenReturn(MAX_DURATION); - when(props.minValidityBuffer()).thenReturn(MIN_VALIDITY_BUFFER); + props = () -> new VersionedConfigImpl( + HederaTestConfigBuilder.create(false) + .withConfigDataType(HederaConfig.class) + .withValue("hedera.transaction.maxMemoUtf8Bytes", MAX_MEMO_SIZE) + .withValue("hedera.transaction.minValidityBufferSecs", MIN_VALIDITY_BUFFER) + .withValue("hedera.transaction.minValidDuration", MIN_DURATION) + .withValue("hedera.transaction.maxValidDuration", MAX_DURATION) + .getOrCreateConfig(), + 1); // And create the checker itself checker = new TransactionChecker(MAX_TX_SIZE, nodeSelfAccountId, props, metrics); diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/AdaptedMonoFeeCalculatorTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/AdaptedMonoFeeCalculatorTest.java deleted file mode 100644 index 30df0d84015b..000000000000 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/AdaptedMonoFeeCalculatorTest.java +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.workflows.handle; - -import static com.hedera.test.utils.KeyUtils.A_COMPLEX_KEY; -import static com.hedera.test.utils.KeyUtils.B_COMPLEX_KEY; -import static com.hederahashgraph.api.proto.java.HederaFunctionality.ConsensusUpdateTopic; -import static com.hederahashgraph.api.proto.java.HederaFunctionality.ContractCall; -import static com.hederahashgraph.api.proto.java.ResponseType.ANSWER_ONLY; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.BDDMockito.given; -import static org.mockito.Mockito.verify; - -import com.hedera.hapi.node.base.TopicID; -import com.hedera.hapi.node.state.consensus.Topic; -import com.hedera.node.app.fees.AdaptedMonoFeeCalculator; -import com.hedera.node.app.fees.MonoGetTopicInfoUsage; -import com.hedera.node.app.hapi.utils.exception.InvalidTxBodyException; -import com.hedera.node.app.hapi.utils.fee.FeeObject; -import com.hedera.node.app.hapi.utils.fee.SigValueObj; -import com.hedera.node.app.service.consensus.ConsensusService; -import com.hedera.node.app.service.consensus.impl.ConsensusServiceImpl; -import com.hedera.node.app.service.mono.context.primitives.StateView; -import com.hedera.node.app.service.mono.fees.HbarCentExchange; -import com.hedera.node.app.service.mono.fees.calculation.RenewAssessment; -import com.hedera.node.app.service.mono.fees.calculation.UsageBasedFeeCalculator; -import com.hedera.node.app.service.mono.fees.calculation.UsagePricesProvider; -import com.hedera.node.app.service.mono.fees.calculation.consensus.txns.UpdateTopicResourceUsage; -import com.hedera.node.app.service.mono.legacy.core.jproto.JEd25519Key; -import com.hedera.node.app.service.mono.legacy.core.jproto.JKey; -import com.hedera.node.app.service.mono.state.migration.HederaAccount; -import com.hedera.node.app.service.mono.utils.accessors.TxnAccessor; -import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; -import com.hedera.node.app.spi.state.ReadableStates; -import com.hedera.node.app.state.HederaState; -import com.hedera.node.app.state.WorkingStateAccessor; -import com.hedera.pbj.runtime.io.buffer.Bytes; -import com.hederahashgraph.api.proto.java.ConsensusUpdateTopicTransactionBody; -import com.hederahashgraph.api.proto.java.ExchangeRate; -import com.hederahashgraph.api.proto.java.FeeData; -import com.hederahashgraph.api.proto.java.Query; -import com.hederahashgraph.api.proto.java.SubType; -import com.hederahashgraph.api.proto.java.Timestamp; -import com.hederahashgraph.api.proto.java.TransactionBody; -import com.swirlds.common.utility.AutoCloseableWrapper; -import java.time.Instant; -import java.util.Collections; -import java.util.Map; -import java.util.function.Supplier; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class AdaptedMonoFeeCalculatorTest { - private static final JKey PAYER_KEY = new JEd25519Key("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".getBytes()); - private static final Query MOCK_QUERY = Query.getDefaultInstance(); - private static final Instant NOW = Instant.ofEpochSecond(1_234_567L); - private static final Timestamp AT = - Timestamp.newBuilder().setSeconds(1_234_567).build(); - private static final FeeData MOCK_USAGE = FeeData.getDefaultInstance(); - private static final FeeData MOCK_PRICES = FeeData.getDefaultInstance(); - private static final FeeObject MOCK_FEES = new FeeObject(1, 2, 3); - private static final SigValueObj MOCK_SIG_USAGE = new SigValueObj(1, 2, 3); - private static final ExchangeRate MOCK_RATE = ExchangeRate.getDefaultInstance(); - private static final Map TYPED_PRICES = Map.of(SubType.DEFAULT, FeeData.getDefaultInstance()); - - private static final Topic MOCK_TOPIC = new Topic( - TopicID.newBuilder().topicNum(1L).build(), - 2L, - 3L, - 4L, - 5L, - true, - Bytes.wrap("MOCK_RUNNING_HASH".getBytes()), - "MOCK_MEMO", - A_COMPLEX_KEY, - B_COMPLEX_KEY); - - private static final TransactionBody MOCK_TXN = TransactionBody.newBuilder() - .setConsensusUpdateTopic(ConsensusUpdateTopicTransactionBody.newBuilder() - .setTopicID(com.hederahashgraph.api.proto.java.TopicID.newBuilder() - .setTopicNum(MOCK_TOPIC.id().topicNum()) - .build())) - .build(); - - @Mock - private HederaState state; - - @Mock - private HederaAccount account; - - @Mock - private ReadableStates readableStates; - - @Mock - private StateView view; - - @Mock - private TxnAccessor accessor; - - @Mock - private HbarCentExchange exchange; - - @Mock - private UsagePricesProvider usagePrices; - - @Mock - private UsageBasedFeeCalculator monoFeeCalculator; - - @Mock - private WorkingStateAccessor workingStateAccessor; - - @Mock - private UpdateTopicResourceUsage monoUpdateTopicUsage; - - @Mock - private Supplier> stateAccessor; - - @Mock - private AutoCloseableWrapper wrapper; - - private AdaptedMonoFeeCalculator subject; - - @BeforeEach - void setUp() { - subject = new AdaptedMonoFeeCalculator( - exchange, usagePrices, monoFeeCalculator, workingStateAccessor, monoUpdateTopicUsage, stateAccessor); - } - - @Test - void delegatesInit() { - subject.init(); - - verify(monoFeeCalculator).init(); - } - - @Test - void delegatesEstimatedGasPrice() { - final var function = ContractCall; - final var at = Timestamp.getDefaultInstance(); - given(monoFeeCalculator.estimatedGasPriceInTinybars(ContractCall, at)).willReturn(1L); - - assertEquals(1L, subject.estimatedGasPriceInTinybars(function, at)); - } - - @Test - void delegatesEstimatedNonFeeAdjust() { - final var at = Timestamp.getDefaultInstance(); - given(monoFeeCalculator.estimatedNonFeePayerAdjustments(accessor, at)).willReturn(1L); - - assertEquals(1L, subject.estimatedNonFeePayerAdjustments(accessor, at)); - } - - @Test - void delegatesNonTopicUpdateComputeFee() { - final var payerKey = new JEd25519Key("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".getBytes()); - final var at = Instant.ofEpochSecond(1_234_567L); - given(monoFeeCalculator.computeFee(accessor, payerKey, view, at)).willReturn(MOCK_FEES); - - assertSame(MOCK_FEES, subject.computeFee(accessor, payerKey, view, at)); - } - - @Test - void computesUpdateTopicFeeViaWorkingStates() throws InvalidTxBodyException { - given(accessor.getTxn()).willReturn(MOCK_TXN); - given(accessor.getFunction()).willReturn(ConsensusUpdateTopic); - given(accessor.getSubType()).willReturn(SubType.DEFAULT); - - given(workingStateAccessor.getHederaState()).willReturn(state); - given(state.createReadableStates(ConsensusService.NAME)).willReturn(readableStates); - given(readableStates.get(ConsensusServiceImpl.TOPICS_KEY)) - .willReturn(wellKnownTopicsKVS()); - - final var mappedTopic = MonoGetTopicInfoUsage.monoTopicFrom(MOCK_TOPIC); - given(monoFeeCalculator.getSigUsage(accessor, PAYER_KEY)).willReturn(MOCK_SIG_USAGE); - given(monoUpdateTopicUsage.usageGivenExplicit(MOCK_TXN, MOCK_SIG_USAGE, mappedTopic)) - .willReturn(MOCK_USAGE); - - given(exchange.activeRate(NOW)).willReturn(MOCK_RATE); - given(usagePrices.activePrices(accessor)).willReturn(TYPED_PRICES); - given(monoFeeCalculator.feesIncludingCongestion(MOCK_USAGE, MOCK_PRICES, accessor, MOCK_RATE)) - .willReturn(MOCK_FEES); - - final var updateTopicFees = subject.computeFee(accessor, PAYER_KEY, view, NOW); - - assertSame(MOCK_FEES, updateTopicFees); - } - - @Test - void estimatesUpdateTopicFeeViaAccessibleStates() throws InvalidTxBodyException { - given(accessor.getTxn()).willReturn(MOCK_TXN); - given(accessor.getFunction()).willReturn(ConsensusUpdateTopic); - given(accessor.getSubType()).willReturn(SubType.DEFAULT); - - given(stateAccessor.get()).willReturn(wrapper); - given(wrapper.get()).willReturn(state); - given(state.createReadableStates(ConsensusService.NAME)).willReturn(readableStates); - given(readableStates.get(ConsensusServiceImpl.TOPICS_KEY)) - .willReturn(wellKnownTopicsKVS()); - - final var mappedTopic = MonoGetTopicInfoUsage.monoTopicFrom(MOCK_TOPIC); - given(monoFeeCalculator.getSigUsage(accessor, PAYER_KEY)).willReturn(MOCK_SIG_USAGE); - given(monoUpdateTopicUsage.usageGivenExplicit(MOCK_TXN, MOCK_SIG_USAGE, mappedTopic)) - .willReturn(MOCK_USAGE); - - given(exchange.rate(AT)).willReturn(MOCK_RATE); - given(monoFeeCalculator.uncheckedPricesGiven(accessor, AT)).willReturn(TYPED_PRICES); - given(monoFeeCalculator.feesIncludingCongestion(MOCK_USAGE, MOCK_PRICES, accessor, MOCK_RATE)) - .willReturn(MOCK_FEES); - - final var updateTopicFees = subject.estimateFee(accessor, PAYER_KEY, view, AT); - - assertSame(MOCK_FEES, updateTopicFees); - } - - @Test - void delegatesEstimatePayment() { - given(monoFeeCalculator.estimatePayment(MOCK_QUERY, MOCK_PRICES, view, AT, ANSWER_ONLY)) - .willReturn(MOCK_FEES); - - final var estimatePayment = subject.estimatePayment(MOCK_QUERY, MOCK_PRICES, view, AT, ANSWER_ONLY); - - assertSame(MOCK_FEES, estimatePayment); - } - - @Test - void delegatesComputePayment() { - given(monoFeeCalculator.computePayment(MOCK_QUERY, MOCK_PRICES, view, AT, Collections.emptyMap())) - .willReturn(MOCK_FEES); - - final var computedPayment = subject.computePayment(MOCK_QUERY, MOCK_PRICES, view, AT, Collections.emptyMap()); - - assertSame(MOCK_FEES, computedPayment); - } - - @Test - void delegatesAutoRenewalAssessment() { - final var expected = new RenewAssessment(1L, 2L); - given(monoFeeCalculator.assessCryptoAutoRenewal(account, 3L, NOW, account)) - .willReturn(expected); - - final var assessment = subject.assessCryptoAutoRenewal(account, 3L, NOW, account); - - assertSame(expected, assessment); - } - - private MapReadableKVState wellKnownTopicsKVS() { - return MapReadableKVState.builder(ConsensusServiceImpl.TOPICS_KEY) - .value(MOCK_TOPIC.id(), MOCK_TOPIC) - .build(); - } -} diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/AdaptedMonoProcessLogicTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/AdaptedMonoProcessLogicTest.java deleted file mode 100644 index aa52205d5d8a..000000000000 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/AdaptedMonoProcessLogicTest.java +++ /dev/null @@ -1,221 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.workflows.handle; - -import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; -import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; -import static com.hedera.node.app.service.mono.context.properties.SemanticVersions.SEMANTIC_VERSIONS; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.BDDMockito.given; -import static org.mockito.Mockito.verify; - -import com.hedera.hapi.node.base.Key; -import com.hedera.hapi.node.base.Transaction; -import com.hedera.node.app.AppTestBase; -import com.hedera.node.app.service.mono.legacy.core.jproto.JEd25519Key; -import com.hedera.node.app.service.mono.legacy.core.jproto.JKey; -import com.hedera.node.app.service.mono.state.logic.StandardProcessLogic; -import com.hedera.node.app.service.mono.utils.accessors.SwirldsTxnAccessor; -import com.hedera.node.app.service.mono.utils.accessors.TxnAccessor; -import com.hedera.node.app.signature.SignatureVerificationFuture; -import com.hedera.node.app.signature.impl.SignatureVerificationFutureImpl; -import com.hedera.node.app.spi.fixtures.Scenarios; -import com.hedera.node.app.spi.key.HederaKey; -import com.hedera.node.app.workflows.prehandle.PreHandleResult; -import com.hedera.node.app.workflows.prehandle.PreHandleResult.Status; -import com.hedera.pbj.runtime.io.buffer.Bytes; -import com.swirlds.common.crypto.TransactionSignature; -import com.swirlds.common.system.SoftwareVersion; -import com.swirlds.common.system.transaction.internal.ConsensusTransactionImpl; -import java.util.List; -import java.util.Map; -import java.util.Set; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class AdaptedMonoProcessLogicTest extends AppTestBase implements Scenarios { - - private static final long DEFAULT_CONFIG_VERSION = 1L; - - @Mock - private StandardProcessLogic monoProcessLogic; - - @Mock - private ConsensusTransactionImpl platformTxn; - - @Mock - private TxnAccessor accessor; - - @Mock - private TransactionSignature signature; - - private AdaptedMonoProcessLogic subject; - private final SoftwareVersion eventVersion = SEMANTIC_VERSIONS.deployedSoftwareVersion(); - - @BeforeEach - void setUp() { - subject = new AdaptedMonoProcessLogic(monoProcessLogic); - } - - @Test - void passesThroughNonPreHandleResult() { - given(platformTxn.getMetadata()).willReturn(accessor); - - subject.incorporateConsensusTxn(platformTxn, 1L, eventVersion); - - verify(monoProcessLogic).incorporateConsensusTxn(platformTxn, 1L, eventVersion); - } - - @Test - void adaptsPreHandleResultAsPayerAndOthersIfOK() { - final ArgumentCaptor captor = ArgumentCaptor.forClass(SwirldsTxnAccessor.class); - - final var noopTxn = Transaction.newBuilder().build(); - final var meta = new PreHandleResult( - // payer, status, responseCode, txInfo, payerFuture, nonPayerFutures, nonPayerHollowFutures, innerResult - ALICE.accountID(), - PAYER_KEY_PBJ, - Status.SO_FAR_SO_GOOD, - OK, - null, - verificationResults(PAYER_KEY_PBJ), - null, - DEFAULT_CONFIG_VERSION); - - given(platformTxn.getMetadata()).willReturn(meta); - given(platformTxn.getContents()).willReturn(asByteArray(noopTxn)); - - subject.incorporateConsensusTxn(platformTxn, 1L, eventVersion); - - verify(platformTxn).setMetadata(captor.capture()); - final var accessor = captor.getValue(); - assertEquals(com.hederahashgraph.api.proto.java.ResponseCodeEnum.OK, accessor.getExpandedSigStatus()); - assertNotNull(accessor.getLinkedRefs()); - final var sigMeta = accessor.getSigMeta(); - assertTrue(sigMeta.couldRationalizePayer()); - assertTrue(sigMeta.couldRationalizeOthers()); - } - - @Test - void adaptsTransactionPayerOnlyIfNotOK() { - final ArgumentCaptor captor = ArgumentCaptor.forClass(SwirldsTxnAccessor.class); - - final var noopTxn = Transaction.newBuilder().build(); - final var cryptoSigs = List.of(signature); - // TODO OTHER_PARTY_KEYS? cryptoSigs? - final var meta = new PreHandleResult( - ALICE.accountID(), - PAYER_KEY_PBJ, - Status.NODE_DUE_DILIGENCE_FAILURE, - INVALID_ACCOUNT_ID, - null, - verificationResults(PAYER_KEY_PBJ), - null, - DEFAULT_CONFIG_VERSION); - - given(platformTxn.getMetadata()).willReturn(meta); - given(platformTxn.getContents()).willReturn(asByteArray(noopTxn)); - - subject.incorporateConsensusTxn(platformTxn, 1L, eventVersion); - - verify(platformTxn).setMetadata(captor.capture()); - final var accessor = captor.getValue(); - assertEquals( - com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ACCOUNT_ID, - accessor.getExpandedSigStatus()); - assertNotNull(accessor.getLinkedRefs()); - final var sigMeta = accessor.getSigMeta(); - assertTrue(sigMeta.couldRationalizePayer()); - assertFalse(sigMeta.couldRationalizeOthers()); - } - - @Test - void adaptsTransactionNonAvailableIfNullPayerKey() { - final ArgumentCaptor captor = ArgumentCaptor.forClass(SwirldsTxnAccessor.class); - - final var noopTxn = Transaction.newBuilder().build(); - final var meta = new PreHandleResult( - ALICE.accountID(), - null, - Status.NODE_DUE_DILIGENCE_FAILURE, - INVALID_ACCOUNT_ID, - null, - verificationResults(PAYER_KEY_PBJ), - null, - DEFAULT_CONFIG_VERSION); - - given(platformTxn.getMetadata()).willReturn(meta); - given(platformTxn.getContents()).willReturn(asByteArray(noopTxn)); - - subject.incorporateConsensusTxn(platformTxn, 1L, eventVersion); - - verify(platformTxn).setMetadata(captor.capture()); - final var accessor = captor.getValue(); - assertEquals( - com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ACCOUNT_ID, - accessor.getExpandedSigStatus()); - assertNotNull(accessor.getLinkedRefs()); - final var sigMeta = accessor.getSigMeta(); - assertFalse(sigMeta.couldRationalizePayer()); - assertFalse(sigMeta.couldRationalizeOthers()); - } - - @Test - void translatesUnparseableContentsAsISE() { - final ArgumentCaptor captor = ArgumentCaptor.forClass(SwirldsTxnAccessor.class); - - final var nonsenseTxn = Transaction.newBuilder() - .signedTransactionBytes(Bytes.wrap("NONSENSE")) - .build(); - final var meta = new PreHandleResult( - ALICE.accountID(), - null, - Status.NODE_DUE_DILIGENCE_FAILURE, - INVALID_ACCOUNT_ID, - null, - Map.of(), - null, - DEFAULT_CONFIG_VERSION); - - given(platformTxn.getMetadata()).willReturn(meta); - given(platformTxn.getContents()).willReturn(asByteArray(nonsenseTxn)); - - assertThrows(IllegalStateException.class, () -> subject.incorporateConsensusTxn(platformTxn, 1L, eventVersion)); - } - - private static final JKey PAYER_KEY = new JEd25519Key("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".getBytes()); - private static final Key PAYER_KEY_PBJ = Key.newBuilder() - .ed25519(Bytes.wrap("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")) - .build(); - private static final Set OTHER_PARTY_KEYS = Set.of( - new JEd25519Key("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb".getBytes()), - new JEd25519Key("cccccccccccccccccccccccccccccccc".getBytes())); - - private static Map verificationResults(Key key) { - final var txSig = new TransactionSignature(new byte[3], 0, 1, 1, 1, 2, 1); - return Map.of(key, new SignatureVerificationFutureImpl(key, null, txSig)); - } -} diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/AdaptedMonoTransitionRunnerTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/AdaptedMonoTransitionRunnerTest.java deleted file mode 100644 index 92f49f7a5d24..000000000000 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/AdaptedMonoTransitionRunnerTest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.workflows.handle; - -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class AdaptedMonoTransitionRunnerTest { - // private final TransactionBody mockTxn = TransactionBody.getDefaultInstance(); - // - // @Mock - // private EntityIdSource ids; - // - // @Mock - // private TransactionContext txnCtx; - // - // @Mock - // private TransactionDispatcher dispatcher; - // - // @Mock - // private TransitionLogicLookup lookup; - // - // @Mock - // private GlobalStaticProperties staticProperties; - // - // @Mock - // private TxnAccessor accessor; - // - // @Mock - // private WritableStoreFactory storeFactory; - // - // private AdaptedMonoTransitionRunner subject; - // - // @BeforeEach - // void setUp() { - // given(staticProperties.workflowsEnabled()).willReturn(Set.of(ConsensusCreateTopic)); - // subject = new AdaptedMonoTransitionRunner(ids, txnCtx, dispatcher, lookup, staticProperties, - // storeFactory); - // } - // - // @Test - // void delegatesConsensusCreateAndTracksSuccess() { - // given(accessor.getFunction()).willReturn(ConsensusCreateTopic); - // given(accessor.body()).willReturn(mockTxn); - // - // subject.tryTransition(accessor); - // - // verify(dispatcher).dispatchHandle(ConsensusCreateTopic, mockTxn, storeFactory); - // verify(txnCtx).setStatus(SUCCESS); - // } - // - // @Test - // void delegatesConsensusCreateAndTracksFailureIfThrows() { - // given(accessor.getFunction()).willReturn(ConsensusCreateTopic); - // given(accessor.body()).willReturn(mockTxn); - // willThrow(new HandleStatusException(INVALID_EXPIRATION_TIME)) - // .given(dispatcher) - // .dispatchHandle(ConsensusCreateTopic, mockTxn, storeFactory); - // - // assertTrue(subject.tryTransition(accessor)); - // - // verify(dispatcher).dispatchHandle(ConsensusCreateTopic, mockTxn, storeFactory); - // verify(txnCtx).setStatus(INVALID_EXPIRATION_TIME); - // } - // - // @Test - // void doesNotDelegateOthers() { - // given(accessor.getFunction()).willReturn(CryptoTransfer); - // given(accessor.body()).willReturn(mockTxn); - // given(lookup.lookupFor(CryptoTransfer, mockTxn)).willReturn(Optional.empty()); - // - // assertFalse(subject.tryTransition(accessor)); - // - // verifyNoInteractions(dispatcher); - // verify(txnCtx).setStatus(FAIL_INVALID); - // } -} diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/CompoundSignatureVerificationFutureTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/CompoundSignatureVerificationFutureTest.java similarity index 99% rename from hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/CompoundSignatureVerificationFutureTest.java rename to hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/CompoundSignatureVerificationFutureTest.java index c9cccfda9ae2..79a48e0727a2 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/CompoundSignatureVerificationFutureTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/CompoundSignatureVerificationFutureTest.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.hedera.node.app.workflows.prehandle; +package com.hedera.node.app.workflows.handle; import static java.util.Collections.emptyList; import static java.util.concurrent.CompletableFuture.completedFuture; diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleContextImplTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleContextImplTest.java index 7573df134c13..77e4e4c8fdba 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleContextImplTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleContextImplTest.java @@ -16,6 +16,7 @@ package com.hedera.node.app.workflows.handle; +import static com.hedera.node.app.spi.fixtures.Scenarios.ALICE; import static com.hedera.node.app.spi.fixtures.Scenarios.ERIN; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; @@ -44,8 +45,6 @@ import com.hedera.node.app.spi.signatures.SignatureVerification; import com.hedera.node.app.spi.state.ReadableStates; import com.hedera.node.app.spi.state.WritableStates; -import com.hedera.node.app.spi.validation.AttributeValidator; -import com.hedera.node.app.spi.validation.ExpiryValidator; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleContext.TransactionCategory; import com.hedera.node.app.spi.workflows.HandleException; @@ -108,6 +107,8 @@ void setup() { private HandleContextImpl createContext(TransactionBody txBody) { return new HandleContextImpl( txBody, + ALICE.accountID(), + ALICE.account().keyOrThrow(), TransactionCategory.USER, recordBuilder, stack, @@ -121,8 +122,12 @@ private HandleContextImpl createContext(TransactionBody txBody) { @SuppressWarnings("ConstantConditions") @Test void testConstructorWithInvalidArguments() { + final var payer = ALICE.accountID(); + final var payerKey = ALICE.account().keyOrThrow(); assertThatThrownBy(() -> new HandleContextImpl( null, + payer, + payerKey, TransactionCategory.USER, recordBuilder, stack, @@ -135,6 +140,34 @@ void testConstructorWithInvalidArguments() { assertThatThrownBy(() -> new HandleContextImpl( TransactionBody.DEFAULT, null, + payerKey, + TransactionCategory.USER, + recordBuilder, + stack, + verifier, + recordListBuilder, + checker, + dispatcher, + serviceScopeLookup)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> new HandleContextImpl( + TransactionBody.DEFAULT, + payer, + null, + TransactionCategory.USER, + recordBuilder, + stack, + verifier, + recordListBuilder, + checker, + dispatcher, + serviceScopeLookup)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> new HandleContextImpl( + TransactionBody.DEFAULT, + payer, + payerKey, + null, recordBuilder, stack, verifier, @@ -145,6 +178,8 @@ void testConstructorWithInvalidArguments() { .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleContextImpl( TransactionBody.DEFAULT, + payer, + payerKey, TransactionCategory.USER, null, stack, @@ -156,6 +191,8 @@ void testConstructorWithInvalidArguments() { .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleContextImpl( TransactionBody.DEFAULT, + payer, + payerKey, TransactionCategory.USER, recordBuilder, null, @@ -167,6 +204,8 @@ void testConstructorWithInvalidArguments() { .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleContextImpl( TransactionBody.DEFAULT, + payer, + payerKey, TransactionCategory.USER, recordBuilder, stack, @@ -178,6 +217,8 @@ void testConstructorWithInvalidArguments() { .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleContextImpl( TransactionBody.DEFAULT, + payer, + payerKey, TransactionCategory.USER, recordBuilder, stack, @@ -189,6 +230,8 @@ void testConstructorWithInvalidArguments() { .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleContextImpl( TransactionBody.DEFAULT, + payer, + payerKey, TransactionCategory.USER, recordBuilder, stack, @@ -200,6 +243,8 @@ void testConstructorWithInvalidArguments() { .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleContextImpl( TransactionBody.DEFAULT, + payer, + payerKey, TransactionCategory.USER, recordBuilder, stack, @@ -211,6 +256,8 @@ void testConstructorWithInvalidArguments() { .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleContextImpl( TransactionBody.DEFAULT, + payer, + payerKey, TransactionCategory.USER, recordBuilder, stack, @@ -299,43 +346,6 @@ void testNewEntityNum() { assertThat(actual2).isSameAs(2L); } - @Test - void testAccessAttributeValidator( - @Mock AttributeValidator attributeValidator1, @Mock AttributeValidator attributeValidator2) { - // given - when(savepoint1.attributeValidator()).thenReturn(attributeValidator1); - when(savepoint2.attributeValidator()).thenReturn(attributeValidator2); - when(stack.peek()).thenReturn(savepoint1); - final var context = createContext(TransactionBody.DEFAULT); - - // when - final var actual1 = context.attributeValidator(); - when(stack.peek()).thenReturn(savepoint2); - final var actual2 = context.attributeValidator(); - - // then - assertThat(actual1).isSameAs(attributeValidator1); - assertThat(actual2).isSameAs(attributeValidator2); - } - - @Test - void testAccessExpiryValidator(@Mock ExpiryValidator expiryValidator1, @Mock ExpiryValidator expiryValidator2) { - // given - when(savepoint1.expiryValidator()).thenReturn(expiryValidator1); - when(savepoint2.expiryValidator()).thenReturn(expiryValidator2); - when(stack.peek()).thenReturn(savepoint1); - final var context = createContext(TransactionBody.DEFAULT); - - // when - final var actual1 = context.expiryValidator(); - when(stack.peek()).thenReturn(savepoint2); - final var actual2 = context.expiryValidator(); - - // then - assertThat(actual1).isSameAs(expiryValidator1); - assertThat(actual2).isSameAs(expiryValidator2); - } - @Test void testCreateReadableStore(@Mock ReadableStates readableStates) { // given @@ -543,6 +553,8 @@ void setup() { private HandleContextImpl createContext(TransactionBody txBody, TransactionCategory category) { return new HandleContextImpl( txBody, + ALICE.accountID(), + ALICE.account().keyOrThrow(), category, recordBuilder, stack, @@ -657,7 +669,7 @@ void testDispatchPreHandleFails(Consumer contextDispatcher) throw @ParameterizedTest @MethodSource("createContextDispatchers") - void testDispatchHandleFails(Consumer contextDispatcher) throws PreCheckException { + void testDispatchHandleFails(Consumer contextDispatcher) { // given final var txBody = TransactionBody.newBuilder().build(); doThrow(new HandleException(ResponseCodeEnum.ACCOUNT_DOES_NOT_OWN_WIPED_NFT)) diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleContextVerifierTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleContextVerifierTest.java index 9c877f54b27b..f9b0bd01cdb8 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleContextVerifierTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleContextVerifierTest.java @@ -20,99 +20,1114 @@ import static com.hedera.node.app.spi.fixtures.Scenarios.BOB; import static com.hedera.node.app.spi.fixtures.Scenarios.CAROL; import static com.hedera.node.app.spi.fixtures.Scenarios.ERIN; +import static com.hedera.node.app.spi.fixtures.Scenarios.FAKE_ECDSA_KEY_INFOS; +import static com.hedera.node.app.spi.fixtures.Scenarios.FAKE_ED25519_KEY_INFOS; +import static com.hedera.node.app.workflows.prehandle.FakeSignatureVerificationFuture.badFuture; +import static com.hedera.node.app.workflows.prehandle.FakeSignatureVerificationFuture.goodFuture; +import static java.util.Collections.emptyMap; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Named.named; +import static org.junit.jupiter.params.provider.Arguments.of; +import static org.mockito.Mockito.mock; +import com.google.common.collect.Streams; import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.KeyList; +import com.hedera.hapi.node.base.ThresholdKey; +import com.hedera.node.app.signature.SignatureVerificationFuture; import com.hedera.node.app.signature.impl.SignatureVerificationImpl; import com.hedera.node.app.spi.signatures.SignatureVerification; +import com.hedera.node.app.workflows.prehandle.FakeSignatureVerificationFuture; +import com.hedera.node.config.data.HederaConfig; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; +import java.util.Map.Entry; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.junit.jupiter.MockitoExtension; class HandleContextVerifierTest { - private static final SignatureVerificationImpl ALICE_VERIFICATION = - new SignatureVerificationImpl(ALICE.keyInfo().publicKey(), null, true); - private static final SignatureVerificationImpl BOB_VERIFICATION = - new SignatureVerificationImpl(BOB.keyInfo().publicKey(), null, false); - private static final SignatureVerificationImpl ERIN_VERIFICATION = new SignatureVerificationImpl( - ERIN.keyInfo().publicKey(), ERIN.account().alias(), false); - - private static final Map VERIFICATIONS = Map.of( - ALICE.keyInfo().publicKey(), ALICE_VERIFICATION, - BOB.keyInfo().publicKey(), BOB_VERIFICATION, - ERIN.keyInfo().publicKey(), ERIN_VERIFICATION); + private static final HederaConfig HEDERA_CONFIG = + HederaTestConfigBuilder.createConfig().getConfigData(HederaConfig.class); @SuppressWarnings("ConstantConditions") @Test void testMethodsWithInvalidArguments() { // given - final var verifier = new HandleContextVerifier(VERIFICATIONS); + final var keyVerifications = Map.of(); + final var verifier = createVerifier(keyVerifications); // then - assertThatThrownBy(() -> new HandleContextVerifier(null)).isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> new HandleContextVerifier(null, keyVerifications)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> new HandleContextVerifier(HEDERA_CONFIG, null)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> verifier.verificationFor((Key) null)).isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> verifier.verificationFor((Bytes) null)).isInstanceOf(NullPointerException.class); } - @Test - void testVerificationForExistingKey() { - // given - final var verifier = new HandleContextVerifier(VERIFICATIONS); + /** + * Tests to verify that finding a {@link SignatureVerification} for cryptographic keys (ED25519, ECDSA_SECP256K1) + * work as expected. No key lists or threshold keys involved. + */ + @Nested + @DisplayName("Finding SignatureVerification With Cryptographic Keys") + @ExtendWith(MockitoExtension.class) + final class FindingSignatureVerificationWithCryptoKeyTests { + @ParameterizedTest + @MethodSource("provideCompoundKeys") + @DisplayName("If there are no verification results, then the result is failed") + void noVerificationResults(@NonNull final Key key) { + final var result = createVerifier(Map.of()); + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(false); + } - // when - final var verification = verifier.verificationFor(ALICE.keyInfo().publicKey()); + @Test + @DisplayName("If the key is a cryptographic key in the results then it is returned") + void cryptoKeyIsPresent() { + final var aliceKey = ALICE.keyInfo().publicKey(); // ECDSA + final var aliceVerification = mock(SignatureVerification.class); + final var aliceFuture = new FakeSignatureVerificationFuture(aliceVerification); + final var bobKey = BOB.keyInfo().publicKey(); // ED25519 + final var bobVerification = mock(SignatureVerification.class); + final var bobFuture = new FakeSignatureVerificationFuture(bobVerification); + final var verificationResults = + Map.of(aliceKey, aliceFuture, bobKey, bobFuture); + final var result = createVerifier(verificationResults); - // then - assertThat(verification).isEqualTo(ALICE_VERIFICATION); + assertThat(result.verificationFor(aliceKey)).isSameAs(aliceVerification); + assertThat(result.verificationFor(bobKey)).isSameAs(bobVerification); + } + + @Test + @DisplayName("If the key is a cryptographic key not in the results then null returned") + void cryptoKeyIsMissing() { + final var aliceKey = ALICE.keyInfo().publicKey(); // ECDSA + final var aliceVerification = mock(SignatureVerification.class); + final var aliceFuture = new FakeSignatureVerificationFuture(aliceVerification); + final var bobKey = BOB.keyInfo().publicKey(); // ED25519 + final var bobVerification = mock(SignatureVerification.class); + final var bobFuture = new FakeSignatureVerificationFuture(bobVerification); + final var verificationResults = + Map.of(aliceKey, aliceFuture, bobKey, bobFuture); + final var result = createVerifier(verificationResults); + + // ERIN is another ECDSA key, but one that is not in the verification results + assertThat(result.verificationFor(ERIN.keyInfo().publicKey())) + .extracting(SignatureVerification::passed) + .isEqualTo(false); + } + + /** A provider that supplies basic cryptographic keys */ + static Stream provideCompoundKeys() { + // FUTURE: Add RSA keys to this list + return Stream.of( + Arguments.of(named("ED25519", FAKE_ED25519_KEY_INFOS[0].publicKey())), + Arguments.of(named("ECDSA_SECP256K1", FAKE_ECDSA_KEY_INFOS[0].publicKey()))); + } } - @Test - void testVerificationForNonExistingKey() { - // given - final var verifier = new HandleContextVerifier(VERIFICATIONS); + /** + * Tests to verify that finding a {@link SignatureVerification} for compound keys (threshold keys, key lists) that + * also have duplicated keys. The point of these tests is really to verify that duplicate keys are counted multiple + * times as expected when meeting threshold requirements. + * + *

We try testing all the boundary conditions: + *

+ * + * + * + * + * + * + *

And for those testing "more than needed" and "less than needed", we try to get right on the boundary condition + * as well as all the other permutations. + */ + @Nested + @DisplayName("Finding SignatureVerification With Complex Keys with Duplicates") + @ExtendWith(MockitoExtension.class) + final class FindingSignatureVerificationWithDuplicateKeysTests { + // Used once in the key list + private static final Key ECDSA_X1 = FAKE_ECDSA_KEY_INFOS[1].publicKey(); + // Used twice in the key list + private static final Key ECDSA_X2 = FAKE_ECDSA_KEY_INFOS[2].publicKey(); + // Used once in the key list + private static final Key ED25519_X1 = FAKE_ED25519_KEY_INFOS[1].publicKey(); + // Used twice in the key list + private static final Key ED25519_X2 = FAKE_ED25519_KEY_INFOS[2].publicKey(); - // when - final var verification = verifier.verificationFor(Key.DEFAULT); + private Map verificationResults(Map keysAndPassFail) { + final var results = new HashMap(); + for (final var entry : keysAndPassFail.entrySet()) { + results.put( + entry.getKey(), + new FakeSignatureVerificationFuture( + new SignatureVerificationImpl(entry.getKey(), null, entry.getValue()))); + } + return results; + } - // then - assertThat(verification).isNull(); + @Test + @DisplayName("All signatures are valid for the KeyList") + void allValidInKeyList() { + // Given a KeyList with 6 different keys with 2 duplicates (4 unique keys) and + // verification results for ALL 4 different keys that are PASSING + final var keyList = KeyList.newBuilder() + .keys(ECDSA_X2, ECDSA_X2, ECDSA_X1, ED25519_X2, ED25519_X2, ED25519_X1) + .build(); + var key = Key.newBuilder().keyList(keyList).build(); + var verificationResults = verificationResults(Map.of( + ECDSA_X1, true, + ECDSA_X2, true, + ED25519_X1, true, + ED25519_X2, true)); + // When we pre handle + var result = createVerifier(verificationResults); + // Then we find the verification results are passing because we have all keys signed + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(true); + } + + /** + * If there are just enough signatures to meet the threshold and all are valid signatures, then the overall + * verification will pass. + */ + @ParameterizedTest + @MethodSource("provideJustEnoughSignaturesAndAllAreValid") + @DisplayName("Just enough signatures and all are valid") + void justEnoughAndAllAreValid(@NonNull final Map keysAndPassFail) { + // Given a ThresholdList with a threshold of 3 and 6 different keys with 2 duplicates (4 unique keys) and + // verification results for only 2 keys (1 that is a duplicate, one that is not), so that the threshold is + // met + final var keyList = KeyList.newBuilder() + .keys(ECDSA_X1, ECDSA_X2, ECDSA_X2, ED25519_X1, ED25519_X2, ED25519_X2) + .build(); + final var thresholdKey = + ThresholdKey.newBuilder().threshold(3).keys(keyList).build(); + final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); + final var verificationResults = verificationResults(keysAndPassFail); + // When we pre handle + final var result = createVerifier(verificationResults); + // Then we find the verification results are passing because we have met the minimum threshold + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(true); + } + + static Stream provideJustEnoughSignaturesAndAllAreValid() { + return Stream.of( + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=pass", + Map.of( + ECDSA_X1, true, + ECDSA_X2, true))), + Arguments.of(named( + "ECDSA_X2=pass, ED25519_X1=pass", + Map.of( + ECDSA_X2, true, + ED25519_X1, true))), + Arguments.of(named( + "ECDSA_X1=pass, ED25519_X2=pass", + Map.of( + ECDSA_X1, true, + ED25519_X2, true))), + Arguments.of(named( + "ED25519_X1=pass, ED25519_X2=pass", + Map.of( + ED25519_X1, true, + ED25519_X2, true)))); + } + + /** + * If there are more than enough signatures, but only *just barely* enough signatures are valid that the + * threshold is met, then the verification will still pass. + */ + @ParameterizedTest + @MethodSource("provideMoreThanEnoughAndJustEnoughValid") + @DisplayName("More than enough signatures but only a sufficient number are valid") + void moreThanEnoughAndJustEnoughValid(@NonNull final Map keysAndPassFail) { + // Given a ThresholdList with a threshold of 3 and 6 different keys with 2 duplicates (4 unique keys) and + // verification results for 3 keys (1 that is a duplicate, two that are not), but only 2 of the three are + // passing (where one of them is the duplicate), so that the threshold is met + final var keyList = KeyList.newBuilder() + .keys(ECDSA_X1, ECDSA_X2, ECDSA_X2, ED25519_X1, ED25519_X2, ED25519_X2) + .build(); + final var thresholdKey = + ThresholdKey.newBuilder().threshold(3).keys(keyList).build(); + final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); + final var verificationResults = verificationResults(keysAndPassFail); + // When we pre handle + final var result = createVerifier(verificationResults); + // Then we find the verification results are passing because we have met the minimum threshold + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(true); + } + + static Stream provideMoreThanEnoughAndJustEnoughValid() { + return Stream.of( + // Every key answers, but just enough are valid to pass + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X1=fail, ED25519_X2=fail", + Map.of( + ECDSA_X1, true, + ECDSA_X2, true, + ED25519_X1, false, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X1=pass, ED25519_X2=fail", + Map.of( + ECDSA_X1, false, + ECDSA_X2, true, + ED25519_X1, true, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X1=fail, ED25519_X2=pass", + Map.of( + ECDSA_X1, true, + ECDSA_X2, false, + ED25519_X1, false, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X1=fail, ECDSA_X2=fail, ED25519_X1=pass, ED25519_X2=pass", + Map.of( + ECDSA_X1, false, + ECDSA_X2, false, + ED25519_X1, true, + ED25519_X2, true))), + // Some keys don't answer, but just enough are valid to pass + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X1=fail", + Map.of( + ECDSA_X1, true, + ECDSA_X2, true, + ED25519_X1, false))), + Arguments.of(named( + "ECDSA_X2=pass, ED25519_X1=pass, ED25519_X2=fail", + Map.of( + ECDSA_X2, true, + ED25519_X1, true, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=pass, ED25519_X1=fail, ED25519_X2=pass", + Map.of( + ECDSA_X1, true, + ED25519_X1, false, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X1=fail, ED25519_X1=pass, ED25519_X2=pass", + Map.of( + ECDSA_X1, false, + ED25519_X1, true, + ED25519_X2, true))), + // Some other keys don't answer, but just enough are valid to pass + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X2=fail", + Map.of( + ECDSA_X1, true, + ECDSA_X2, true, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X1=pass", + Map.of( + ECDSA_X1, false, + ECDSA_X2, true, + ED25519_X1, true))), + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X2=pass", + Map.of( + ECDSA_X1, true, + ECDSA_X2, false, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X2=fail, ED25519_X1=pass, ED25519_X2=pass", + Map.of( + ECDSA_X2, false, + ED25519_X1, true, + ED25519_X2, true)))); + } + + /** + * More than enough signatures were provided, and more than were needed actually passed. The overall + * verification therefore also passes. + */ + @ParameterizedTest + @MethodSource("provideMoreThanEnoughAndMoreThanNeededAreValid") + @DisplayName("More than enough signatures and more than enough are valid") + void moreThanEnoughAndMoreThanNeededAreValid(@NonNull final Map keysAndPassFail) { + // Given a ThresholdList with a threshold of 3 and 6 different keys with 2 duplicates (4 unique keys) and + // verification results for 3 keys (1 that is a duplicate, two that are not), and all three are passing, + // so that the threshold is met, plus more! + final var keyList = KeyList.newBuilder() + .keys(ECDSA_X1, ECDSA_X2, ECDSA_X2, ED25519_X1, ED25519_X2, ED25519_X2) + .build(); + final var thresholdKey = + ThresholdKey.newBuilder().threshold(3).keys(keyList).build(); + final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); + final var verificationResults = verificationResults(keysAndPassFail); + // When we pre handle + final var result = createVerifier(verificationResults); + // Then we find the verification results are passing because we have met the minimum threshold + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(true); + } + + static Stream provideMoreThanEnoughAndMoreThanNeededAreValid() { + return Stream.of( + // Every key answers, and all are valid + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X1=pass, ED25519_X2=pass", + Map.of( + ECDSA_X1, true, + ECDSA_X2, true, + ED25519_X1, true, + ED25519_X2, true))), + + // Every key answers, one or more is invalid, but still more than we need + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X1=pass, ED25519_X2=fail", + Map.of( + ECDSA_X1, true, + ECDSA_X2, true, + ED25519_X1, true, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X1=fail, ED25519_X2=pass", + Map.of( + ECDSA_X1, true, + ECDSA_X2, true, + ED25519_X1, false, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X1=pass, ED25519_X2=pass", + Map.of( + ECDSA_X1, false, + ECDSA_X2, true, + ED25519_X1, true, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X1=fail, ED25519_X2=pass", + Map.of( + ECDSA_X1, false, + ECDSA_X2, true, + ED25519_X1, false, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X1=pass, ED25519_X2=pass", + Map.of( + ECDSA_X1, true, + ECDSA_X2, false, + ED25519_X1, true, + ED25519_X2, true))), + + // Some keys don't answer, but all are valid (more than enough) + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X1=pass", + Map.of( + ECDSA_X1, true, + ECDSA_X2, true, + ED25519_X1, true))), + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X2=pass", + Map.of( + ECDSA_X1, true, + ECDSA_X2, true, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X2=pass, ED25519_X1=pass, ED25519_X2=pass", + Map.of( + ECDSA_X2, true, + ED25519_X1, true, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X2=pass, ED25519_X2=pass", + Map.of( + ECDSA_X2, true, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X1=pass, ED25519_X1=pass, ED25519_X2=pass", + Map.of( + ECDSA_X1, true, + ED25519_X1, true, + ED25519_X2, true)))); + } + + /** + * In this test there are more than enough keys in the signature ot meet the threshold, if they all passed. + * But it turns out, that enough of them did NOT pass, that the threshold is not met, and the overall + * verification is therefore failed. + */ + @ParameterizedTest + @MethodSource("provideMoreThanEnoughButNotEnoughValid") + @DisplayName("More than enough signatures but not enough are valid") + void moreThanEnoughButNotEnoughValid(@NonNull final Map keysAndPassFail) { + // Given a ThresholdList with a threshold of 3 and 6 different keys with 2 duplicates (4 unique keys) and + // verification results for 3 keys (1 that is a duplicate, two that are not), and only the two non-duplicate + // keys are passing, so the threshold is NOT met. + final var keyList = KeyList.newBuilder() + .keys(ECDSA_X1, ECDSA_X2, ECDSA_X2, ED25519_X1, ED25519_X2, ED25519_X2) + .build(); + final var thresholdKey = + ThresholdKey.newBuilder().threshold(3).keys(keyList).build(); + final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); + final var verificationResults = verificationResults(keysAndPassFail); + // When we pre handle + final var result = createVerifier(verificationResults); + // Then we find the verification results are NOT passing because we have NOT met the minimum threshold + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(false); + } + + static Stream provideMoreThanEnoughButNotEnoughValid() { + return Stream.of( + // Every key answers, but not enough are valid + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X1=fail, ED25519_X2=fail", + Map.of( + ECDSA_X1, true, + ECDSA_X2, false, + ED25519_X1, false, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X1=fail, ED25519_X2=fail", + Map.of( + ECDSA_X1, false, + ECDSA_X2, true, + ED25519_X1, false, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=fail, ECDSA_X2=fail, ED25519_X1=pass, ED25519_X2=fail", + Map.of( + ECDSA_X1, false, + ECDSA_X2, false, + ED25519_X1, true, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=fail, ECDSA_X2=fail, ED25519_X1=fail, ED25519_X2=pass", + Map.of( + ECDSA_X1, false, + ECDSA_X2, false, + ED25519_X1, false, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X1=pass, ED25519_X2=fail", + Map.of( + ECDSA_X1, true, + ECDSA_X2, false, + ED25519_X1, true, + ED25519_X2, false))), + + // Some keys don't answer, and those that do don't cross the threshold + Arguments.of(named( + "ECDSA_X2=pass, ED25519_X1=fail, ED25519_X2=fail", + Map.of( + ECDSA_X2, true, + ED25519_X1, false, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=pass, ED25519_X1=fail, ED25519_X2=fail", + Map.of( + ECDSA_X1, true, + ED25519_X1, false, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=fail, ED25519_X1=fail, ED25519_X2=pass", + Map.of( + ECDSA_X1, false, + ED25519_X1, false, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X1=fail, ED25519_X1=pass, ED25519_X2=fail", + Map.of( + ECDSA_X1, false, + ED25519_X1, true, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=pass, ED25519_X1=pass, ED25519_X2=fail", + Map.of( + ECDSA_X1, true, + ED25519_X1, true, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X1=fail", + Map.of( + ECDSA_X1, false, + ECDSA_X2, true, + ED25519_X1, false))), + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X1=fail", + Map.of( + ECDSA_X1, true, + ECDSA_X2, false, + ED25519_X1, false))), + Arguments.of(named( + "ECDSA_X2=fail, ED25519_X1=fail, ED25519_X2=pass", + Map.of( + ECDSA_X2, false, + ED25519_X1, false, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X2=fail, ED25519_X1=pass, ED25519_X2=fail", + Map.of( + ECDSA_X2, false, + ED25519_X1, true, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X1=pass", + Map.of( + ECDSA_X1, true, + ECDSA_X2, false, + ED25519_X1, true))), + Arguments.of(named( + "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X2=fail", + Map.of( + ECDSA_X1, false, + ECDSA_X2, true, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X2=fail", + Map.of( + ECDSA_X1, true, + ECDSA_X2, false, + ED25519_X2, false))), + Arguments.of(named( + "ECDSA_X1=fail, ECDSA_X2=fail, ED25519_X2=pass", + Map.of( + ECDSA_X1, false, + ECDSA_X2, false, + ED25519_X2, true))), + Arguments.of(named( + "ECDSA_X1=fail, ECDSA_X2=fail, ED25519_X1=pass", + Map.of( + ECDSA_X1, false, + ECDSA_X2, false, + ED25519_X1, true)))); + } + + /** + * In this test, every signature is valid, but there just are not enough signatures to meet the threshold, + * so the overall verification must fail. + */ + @ParameterizedTest + @MethodSource("provideNotEnoughSignatures") + @DisplayName("Not enough signatures but all are valid") + void notEnoughSignatures(@NonNull final Map keysAndPassFail) { + // Given a ThresholdList with a threshold of 3 and 6 different keys with 2 duplicates (4 unique keys) and + // there are only verification results for 1 key, which isn't enough to meet the threshold. + final var keyList = KeyList.newBuilder() + .keys(ECDSA_X1, ECDSA_X2, ECDSA_X2, ED25519_X1, ED25519_X2, ED25519_X2) + .build(); + final var thresholdKey = + ThresholdKey.newBuilder().threshold(3).keys(keyList).build(); + final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); + final var verificationResults = verificationResults(keysAndPassFail); + // When we pre handle + final var result = createVerifier(verificationResults); + // Then we find the verification results are passing because we have met the minimum threshold + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(false); + } + + static Stream provideNotEnoughSignatures() { + return Stream.of( + // Every key answers, but not enough are valid + Arguments.of(named("ECDSA_X1=pass", Map.of(ECDSA_X1, true))), // 1 of 3 + Arguments.of(named("ECDSA_X2=pass", Map.of(ECDSA_X2, true))), // 2 of 3 + Arguments.of(named("ED25519_X1=pass", Map.of(ED25519_X1, true))), // 1 of 3 + Arguments.of(named("ED25519_X2=pass", Map.of(ED25519_X2, true))), // 2 of 3 + Arguments.of(named( + "ECDSA_X1=pass, ED25519_X1=pass", Map.of(ECDSA_X1, true, ED25519_X1, true)))); // 2 of 3 + } } - @Test - void testVerificationForExistingAlias() { - // given - final var verifier = new HandleContextVerifier(VERIFICATIONS); + /** + * Various targeted tests for {@link ThresholdKey} and {@link KeyList} lookup. + */ + @Nested + @DisplayName("Finding SignatureVerification With Threshold and KeyList Keys") + @ExtendWith(MockitoExtension.class) + final class FindingSignatureVerificationWithCompoundKeyTests { - // when - final var verification = verifier.verificationFor(ERIN.account().alias()); + // A ThresholdKey with a threshold greater than max keys acts like a KeyList - // then - assertThat(verification).isEqualTo(ERIN_VERIFICATION); + @Test + @DisplayName("An empty KeyList never validates") + void emptyKeyList() { + // Given a KeyList with no keys + final var keyList = KeyList.newBuilder().build(); + final var key = Key.newBuilder().keyList(keyList).build(); + // When we pre handle + final var result = createVerifier(emptyMap()); + // Then we find the verification results will fail + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(false); + } + + @ParameterizedTest + @ValueSource(ints = {-1, 0}) + @DisplayName("A threshold of less than 1 is clamped to 1") + void thresholdLessThanOne(final int threshold) { + // Given a ThresholdKey with a threshold less than 1 + final var thresholdKey = ThresholdKey.newBuilder() + .threshold(threshold) + .keys(KeyList.newBuilder() + .keys(FAKE_ECDSA_KEY_INFOS[0].publicKey(), FAKE_ED25519_KEY_INFOS[0].publicKey())) + .build(); + final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); + + // First, verify that if there are NO valid verification results the threshold verification fails + Map verificationResults = + Map.of(FAKE_ECDSA_KEY_INFOS[1].publicKey(), goodFuture(FAKE_ECDSA_KEY_INFOS[1].publicKey())); + var result = createVerifier(verificationResults); + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(false); + + // Now verify that if we verify with one valid verification result, the threshold verification passes + verificationResults = + Map.of(FAKE_ECDSA_KEY_INFOS[0].publicKey(), goodFuture(FAKE_ECDSA_KEY_INFOS[0].publicKey())); + // When we pre handle + result = createVerifier(verificationResults); + // Then we find the verification results will pass if we have at least 1 valid signature + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(true); + } + + @Test + @DisplayName("A threshold greater than the number of keys is clamped to the number of keys") + void thresholdGreaterThanNumKeys() { + // Given a ThresholdKey with a threshold greater than the number of keys + final var thresholdKey = ThresholdKey.newBuilder() + .threshold(3) + .keys(KeyList.newBuilder() + .keys(FAKE_ECDSA_KEY_INFOS[0].publicKey(), FAKE_ED25519_KEY_INFOS[0].publicKey())) + .build(); + final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); + final Map verificationResults = Map.of( + FAKE_ECDSA_KEY_INFOS[0].publicKey(), goodFuture(FAKE_ECDSA_KEY_INFOS[0].publicKey()), + FAKE_ED25519_KEY_INFOS[0].publicKey(), goodFuture(FAKE_ED25519_KEY_INFOS[0].publicKey())); + + // When we pre handle + var result = createVerifier(verificationResults); + + // Then we find the verification results will pass + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(true); + } + + /** + * If there are no verification results at all, then no matter what key we throw at it, we should get back + * a failed verification. + */ + @ParameterizedTest + @MethodSource("provideCompoundKeys") + @DisplayName("A ThresholdKey or KeyList with no verification results returns a failed SignatureVerification") + void keyWithNoVerificationResults(@NonNull final Key key) { + final var result = createVerifier(emptyMap()); + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(false); + } + + /** + * If there are just enough signatures to meet the threshold and all are valid signatures, then the overall + * verification will pass. + */ + @ParameterizedTest + @MethodSource("provideCompoundKeys") + @DisplayName("Just enough signatures and all are valid") + void justEnoughAndAllAreValid(@NonNull final Key key) { + // Given a barely sufficient number of signatures, all of which are valid + final var verificationResults = allVerifications(key); + removeVerificationsFrom(key, verificationResults, false); + + // When we pre handle + final var result = createVerifier(verificationResults); + + // Then we find the verification results are passing because we have met the minimum threshold + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(true); + } + + /** + * If there are more than enough signatures, but only *just barely* enough signatures are valid that the + * threshold is met, then the verification will still pass. + */ + @ParameterizedTest + @MethodSource("provideCompoundKeys") + @DisplayName("More than enough signatures but only a sufficient number are valid") + void moreThanEnoughAndJustEnoughValid(@NonNull final Key key) { + // Given more than enough validations but just barely enough of them are valid + final var verificationResults = allVerifications(key); + failVerificationsIn(key, verificationResults, false); + + // When we pre handle + final var result = createVerifier(verificationResults); + + // Then we find the verification results are passing because we have met the minimum threshold + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(true); + } + + /** + * More than enough signatures were provided, and more than were needed actually passed. The overall + * verification therefore also passes. + */ + @ParameterizedTest + @MethodSource("provideCompoundKeys") + @DisplayName("More than enough signatures and more than enough are valid") + void moreThanEnoughAndMoreThanNeededAreValid(@NonNull final Key key) { + // Given more than enough validations but just barely enough of them are valid + final Map verificationResults = allVerifications(key); + + // When we pre handle + final var result = createVerifier(verificationResults); + + // Then we find the verification results are passing because we have met the minimum threshold + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(true); + } + + /** + * In this test there are more than enough keys in the signature ot meet the threshold, if they all passed. + * But it turns out, that enough of them did NOT pass, that the threshold is not met, and the overall + * verification is therefore failed. + */ + @ParameterizedTest + @MethodSource("provideCompoundKeys") + @DisplayName("More than enough signatures but not enough are valid") + void moreThanEnoughButNotEnoughValid(@NonNull final Key key) { + // Given more than enough validations but not enough of them are valid + final var verificationResults = allVerifications(key); + failVerificationsIn(key, verificationResults, true); + + // When we pre handle + final var result = createVerifier(verificationResults); + + // Then we find the verification results are passing because we have met the minimum threshold + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(false); + } + + /** + * In this test, every signature is valid, but there just are not enough signatures to meet the threshold, + * so the overall verification must fail. + */ + @ParameterizedTest + @MethodSource("provideCompoundKeys") + @DisplayName("Not enough signatures but all are valid") + void notEnoughSignatures(@NonNull final Key key) { + // Given not enough signatures + final var verificationResults = allVerifications(key); + removeVerificationsFrom(key, verificationResults, true); + + // When we pre handle + final var result = createVerifier(verificationResults); + + // Then we find the verification results are passing because we have met the minimum threshold + assertThat(result.verificationFor(key)) + .extracting(SignatureVerification::passed) + .isEqualTo(false); + } + + /** A provider that supplies a mixture of KeyLists and ThresholdKeys, all of which are good keys. */ + static Stream provideCompoundKeys() { + // FUTURE: Add RSA keys to this list + return Streams.concat(provideKeyLists(), provideThresholdKeys()); + } + + /** + * Provides a comprehensive set of KeyLists, including with nesting of KeyLists and ThresholdKeys. At most, we + * return a KeyList with a depth of 3 and with up to 4 elements, one for each type of key that we support. This + * provider does not create duplicates, those scenarios are tested separately. + */ + static Stream provideKeyLists() { + return keyListPermutations().entrySet().stream() + .map(entry -> of(named( + "KeyList(" + entry.getKey() + ")", + Key.newBuilder().keyList(entry.getValue()).build()))); + } + + /** + * A provider specifically for all permutations of a valid threshold key, including those with duplicate keys + * and nesting. + */ + static Stream provideThresholdKeys() { + return keyListPermutations().entrySet().stream().map(entry -> { + final var keys = entry.getValue().keysOrThrow(); + final var threshold = Math.max(1, keys.size() / 2); + final var thresholdKey = Key.newBuilder() + .thresholdKey(ThresholdKey.newBuilder() + .threshold(threshold) + .keys(KeyList.newBuilder().keys(keys))) + .build(); + return of(named("ThresholdKey(" + threshold + ", " + entry.getKey() + ")", thresholdKey)); + }); + } + + /** Generates the set of test permutations shared between KeyLists and ThresholdKeys. */ + private static Map keyListPermutations() { + final var map = new LinkedHashMap(); + // FUTURE: Add RSA keys to this list + final List>> creators = List.of( + (i) -> Map.entry("ED25519", FAKE_ED25519_KEY_INFOS[i].publicKey()), + (i) -> Map.entry("ECDSA_SECP256K1", FAKE_ECDSA_KEY_INFOS[i].publicKey()), + (i) -> Map.entry( + "KeyList(ECDSA_SECP256K1, ED25519)", + keyList(FAKE_ECDSA_KEY_INFOS[i].publicKey(), FAKE_ED25519_KEY_INFOS[i].publicKey())), + (i) -> Map.entry( + "ThresholdKey(1, ED25519, ECDSA_SECP256K1)", + thresholdKey( + 1, FAKE_ED25519_KEY_INFOS[i].publicKey(), FAKE_ECDSA_KEY_INFOS[i].publicKey()))); + + // Compute every permutation of 1, 2, 3, and 4 elements. + for (int i = -1; i < 4; i++) { + for (int j = -1; j < 4; j++) { + for (int k = -1; k < 4; k++) { + for (int el = 0; el < 4; el++) { + int keyIndex = 0; + final var names = new ArrayList(); + final var keys = new ArrayList(); + if (i >= 0) { + final var entry = creators.get(i).apply(keyIndex++); + final var name = entry.getKey(); + final var key = entry.getValue(); + names.add(name); + keys.add(key); + } + if (j >= 0) { + final var entry = creators.get(j).apply(keyIndex++); + final var name = entry.getKey(); + final var key = entry.getValue(); + names.add(name); + keys.add(key); + } + if (k >= 0) { + final var entry = creators.get(k).apply(keyIndex++); + final var name = entry.getKey(); + final var key = entry.getValue(); + names.add(name); + keys.add(key); + } + final var entry = creators.get(el).apply(keyIndex); + final var name = entry.getKey(); + final var key = entry.getValue(); + names.add(name); + keys.add(key); + + final var keyList = KeyList.newBuilder().keys(keys).build(); + map.put(String.join(", ", names), keyList); + } + } + } + } + return map; + } + + /** Provides all {@link SignatureVerificationFuture}s for every cryptographic key in the {@link Key}. */ + private static Map allVerifications(@NonNull final Key key) { + return switch (key.key().kind()) { + case KEY_LIST -> allVerifications(key.keyListOrThrow()); + case THRESHOLD_KEY -> allVerifications(key.thresholdKeyOrThrow().keysOrThrow()); + case ED25519, ECDSA_SECP256K1 -> new HashMap<>(Map.of(key, goodFuture(key))); // make mutable + default -> throw new IllegalArgumentException( + "Unsupported key type: " + key.key().kind()); + }; + } + + /** Creates a {@link SignatureVerification} for each key in the key list */ + private static Map allVerifications(@NonNull final KeyList key) { + return key.keysOrThrow().stream() + .map(FindingSignatureVerificationWithCompoundKeyTests::allVerifications) + .flatMap(map -> map.entrySet().stream()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + /** + * Removes some number of {@link SignatureVerificationFuture}s from the map such that either there are only + * just barely enough remaining to pass any threshold checks (if {@code removeTooMany} is {@code false}), or too + * many of them such that there are not enough for threshold checks to pass (if {@code removeToMany} is + * {@code true}). + */ + private static void removeVerificationsFrom( + @NonNull final Key key, + @NonNull final Map map, + final boolean removeTooMany) { + + switch (key.key().kind()) { + case KEY_LIST -> { + // A Key list cannot have ANY removed and still pass. So we only remove a single key's worth of + // verifications if we are removing too many. + if (removeTooMany) { + final var subKeys = key.keyListOrThrow().keysOrThrow(); + final var subKey = subKeys.get(0); + removeVerificationsFrom(subKey, map, true); + } + } + case THRESHOLD_KEY -> { + // We remove verifications associated with keys. If we are removing too many, we remove one more + // than is supported by the threshold. Otherwise, we just remove down to the threshold + final var threshold = key.thresholdKeyOrThrow().threshold(); + final var subKeys = key.thresholdKeyOrThrow().keysOrThrow().keysOrThrow(); + final var numToRemove = subKeys.size() - threshold + (removeTooMany ? 1 : 0); + for (int i = 0; i < numToRemove; i++) { + final var subKey = subKeys.get(i); + removeVerificationsFrom(subKey, map, removeTooMany); + } + } + case ED25519, ECDSA_SECP256K1 -> { + if (removeTooMany) { + map.remove(key); + } + } + default -> throw new IllegalArgumentException( + "Unsupported key type: " + key.key().kind()); + } + } + + /** Similar to the above, except we fail verifications instead of removing them. */ + private static void failVerificationsIn( + @NonNull final Key key, @NonNull Map map, boolean failTooMany) { + switch (key.key().kind()) { + case KEY_LIST -> { + // A Key list cannot have ANY failed and still pass. So we only fail a single key's worth of + // verifications if we are failing too many. + if (failTooMany) { + final var subKeys = key.keyListOrThrow().keysOrThrow(); + final var subKey = subKeys.get(0); + failVerificationsIn(subKey, map, true); + } + } + case THRESHOLD_KEY -> { + // We fail verifications associated with keys. If we are failing too many, we fail one more + // than is supported by the threshold. Otherwise, we just fail down to the threshold + final var threshold = key.thresholdKeyOrThrow().threshold(); + final var subKeys = key.thresholdKeyOrThrow().keysOrThrow().keysOrThrow(); + final var numToFail = subKeys.size() - threshold + (failTooMany ? 1 : 0); + for (int i = 0; i < numToFail; i++) { + final var subKey = subKeys.get(i); + failVerificationsIn(subKey, map, failTooMany); + } + } + case ED25519, ECDSA_SECP256K1 -> { + if (failTooMany) { + map.put(key, badFuture(key)); + } + } + default -> throw new IllegalArgumentException( + "Unsupported key type: " + key.key().kind()); + } + } } - @Test - void testVerificationForShortenedAlias() { - // given - final var verifier = new HandleContextVerifier(VERIFICATIONS); + @Nested + @DisplayName("Hollow Account based Verification") + final class HollowAccountBasedTest { + /** As with key verification, with hollow account verification, an empty list of signatures should fail. */ + @Test + @DisplayName("Cannot verify hollow account when the signature list is empty") + void failToVerifyIfSignaturesAreEmpty() { + // Given a hollow account and no verification results + final var alias = ERIN.account().alias(); + // When we pre-handle the transaction + final var result = createVerifier(emptyMap()); + // Then we find the verification result is failed + assertThat(result.verificationFor(alias)) + .extracting(SignatureVerification::passed) + .isEqualTo(false); + } - // when - final var verification = verifier.verificationFor(ERIN.account().alias().getBytes(0, 19)); + /** If there are verifications but none for this hollow account, then we get no result */ + @Test + @DisplayName("Cannot verify hollow account if it is not in the verification results") + void failToVerifyIfHollowAccountIsNotInVerificationResults() { + // Given a hollow account and no verification results + final var alias = ERIN.account().alias(); + Map verificationResults = Map.of( + ALICE.keyInfo().publicKey(), goodFuture(ALICE.keyInfo().publicKey()), + BOB.keyInfo().publicKey(), goodFuture(BOB.keyInfo().publicKey()), + CAROL.keyInfo().publicKey(), goodFuture(CAROL.keyInfo().publicKey(), CAROL.account())); + // When we pre-handle the transaction + final var result = createVerifier(verificationResults); + // Then we find the verification result is failed + assertThat(result.verificationFor(alias)) + .extracting(SignatureVerification::passed) + .isEqualTo(false); + } - // then - assertThat(verification).isNull(); + @ParameterizedTest + @ValueSource(booleans = {true, false}) + @DisplayName("Able to verify if the hollow account is in the verification results") + void failToVerifyIfHollowAccountIsNotInVerificationResults(final boolean passes) { + // Given a hollow account and no verification results + final var alias = ERIN.account().alias(); + Map verificationResults = Map.of( + ALICE.keyInfo().publicKey(), + goodFuture(ALICE.keyInfo().publicKey()), + BOB.keyInfo().publicKey(), + goodFuture(BOB.keyInfo().publicKey()), + CAROL.keyInfo().publicKey(), + goodFuture(CAROL.keyInfo().publicKey(), CAROL.account()), + ERIN.keyInfo().publicKey(), + passes + ? goodFuture(ERIN.keyInfo().publicKey(), ERIN.account()) + : badFuture(ERIN.keyInfo().publicKey(), ERIN.account())); + // When we pre-handle the transaction + final var result = createVerifier(verificationResults); + // Then we find the verification result is as expected + assertThat(result.verificationFor(alias)) + .extracting(SignatureVerification::passed) + .isEqualTo(passes); + } } - @Test - void testVerificationForNonExistingAlias() { - // given - final var verifier = new HandleContextVerifier(VERIFICATIONS); + /** A simple utility method for creating a "SO_FAR_SO_GOOD" PreHandleResult */ + private HandleContextVerifier createVerifier(@NonNull final Map map) { + return new HandleContextVerifier(HEDERA_CONFIG, map); + } - // when - final var verification = verifier.verificationFor(CAROL.account().alias()); + /** Convenience method for creating a key list */ + private static Key keyList(Key... keys) { + return Key.newBuilder().keyList(KeyList.newBuilder().keys(keys)).build(); + } - // then - assertThat(verification).isNull(); + /** Convenience method for creating a threshold key */ + private static Key thresholdKey(int threshold, Key... keys) { + return Key.newBuilder() + .thresholdKey(ThresholdKey.newBuilder() + .keys(KeyList.newBuilder().keys(keys)) + .threshold(threshold)) + .build(); } } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleWorkflowTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleWorkflowTest.java index 07b2d382becc..9d8db8ed5491 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleWorkflowTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleWorkflowTest.java @@ -18,12 +18,12 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.BDDMockito.given; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mock.Strictness.LENIENT; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -32,15 +32,15 @@ import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.node.app.AppTestBase; import com.hedera.node.app.config.VersionedConfigImpl; +import com.hedera.node.app.fixtures.signature.ExpandedSignaturePairFactory; import com.hedera.node.app.records.RecordManager; import com.hedera.node.app.service.token.TokenService; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.services.ServiceScopeLookup; -import com.hedera.node.app.signature.ExpandedSignaturePair; import com.hedera.node.app.signature.SignatureExpander; import com.hedera.node.app.signature.SignatureVerificationFuture; import com.hedera.node.app.signature.SignatureVerifier; -import com.hedera.node.app.spi.info.NodeInfo; +import com.hedera.node.app.spi.info.NetworkInfo; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; @@ -60,11 +60,12 @@ import com.swirlds.platform.internal.EventImpl; import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; +import java.time.InstantSource; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeoutException; import java.util.function.BiConsumer; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; @@ -96,13 +97,14 @@ private static PreHandleResult createPreHandleResult(@NonNull Status status, @No status, code, new TransactionScenarioBuilder().txInfo(), + Set.of(), Map.of(key, FakeSignatureVerificationFuture.goodFuture(key)), null, CONFIG_VERSION); } @Mock - private NodeInfo nodeInfo; + private NetworkInfo networkInfo; @Mock(strictness = LENIENT) private PreHandleWorkflow preHandleWorkflow; @@ -140,7 +142,7 @@ private static PreHandleResult createPreHandleResult(@NonNull Status status, @No private HandleWorkflow workflow; @BeforeEach - void setup() { + void setup(@Mock InstantSource instantSource) { setupStandardStates(); when(platformTxn.getConsensusTimestamp()).thenReturn(CONSENSUS_NOW); @@ -160,14 +162,6 @@ void setup() { final var config = new VersionedConfigImpl(HederaTestConfigBuilder.createConfig(), CONFIG_VERSION); when(configProvider.getConfiguration()).thenReturn(config); - doAnswer(invocation -> { - final var expanded = invocation.getArgument(2, Set.class); - expanded.add(mock(ExpandedSignaturePair.class)); - return null; - }) - .when(signatureExpander) - .expand(any(), any(), any()); - doAnswer(invocation -> { final var context = invocation.getArgument(0, HandleContext.class); context.writableStore(WritableAccountStore.class) @@ -178,7 +172,7 @@ void setup() { .dispatchHandle(any()); workflow = new HandleWorkflow( - nodeInfo, + networkInfo, preHandleWorkflow, dispatcher, recordManager, @@ -186,12 +180,14 @@ void setup() { signatureVerifier, checker, serviceLookup, - configProvider); + configProvider, + instantSource); } @SuppressWarnings("ConstantConditions") @Test void testContructorWithInvalidArguments() { + final var instantSource = InstantSource.system(); assertThatThrownBy(() -> new HandleWorkflow( null, preHandleWorkflow, @@ -201,10 +197,11 @@ void testContructorWithInvalidArguments() { signatureVerifier, checker, serviceLookup, - configProvider)) + configProvider, + instantSource)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleWorkflow( - nodeInfo, + networkInfo, null, dispatcher, recordManager, @@ -212,10 +209,11 @@ void testContructorWithInvalidArguments() { signatureVerifier, checker, serviceLookup, - configProvider)) + configProvider, + instantSource)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleWorkflow( - nodeInfo, + networkInfo, preHandleWorkflow, null, recordManager, @@ -223,10 +221,11 @@ void testContructorWithInvalidArguments() { signatureVerifier, checker, serviceLookup, - configProvider)) + configProvider, + instantSource)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleWorkflow( - nodeInfo, + networkInfo, preHandleWorkflow, dispatcher, null, @@ -234,10 +233,11 @@ void testContructorWithInvalidArguments() { signatureVerifier, checker, serviceLookup, - configProvider)) + configProvider, + instantSource)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleWorkflow( - nodeInfo, + networkInfo, preHandleWorkflow, dispatcher, recordManager, @@ -245,10 +245,11 @@ void testContructorWithInvalidArguments() { signatureVerifier, checker, serviceLookup, - configProvider)) + configProvider, + instantSource)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleWorkflow( - nodeInfo, + networkInfo, preHandleWorkflow, dispatcher, recordManager, @@ -256,10 +257,11 @@ void testContructorWithInvalidArguments() { null, checker, serviceLookup, - configProvider)) + configProvider, + instantSource)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleWorkflow( - nodeInfo, + networkInfo, preHandleWorkflow, dispatcher, recordManager, @@ -267,10 +269,11 @@ void testContructorWithInvalidArguments() { signatureVerifier, null, serviceLookup, - configProvider)) + configProvider, + instantSource)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleWorkflow( - nodeInfo, + networkInfo, preHandleWorkflow, dispatcher, recordManager, @@ -278,10 +281,11 @@ void testContructorWithInvalidArguments() { signatureVerifier, checker, null, - configProvider)) + configProvider, + instantSource)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new HandleWorkflow( - nodeInfo, + networkInfo, preHandleWorkflow, dispatcher, recordManager, @@ -289,6 +293,19 @@ void testContructorWithInvalidArguments() { signatureVerifier, checker, serviceLookup, + null, + instantSource)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> new HandleWorkflow( + networkInfo, + preHandleWorkflow, + dispatcher, + recordManager, + signatureExpander, + signatureVerifier, + checker, + serviceLookup, + configProvider, null)) .isInstanceOf(NullPointerException.class); } @@ -337,7 +354,7 @@ void setup() { void testPreHandleNotExecuted() { // given when(platformTxn.getMetadata()).thenReturn(null); - given(event.getCreatorId()).willReturn(new NodeId(0)); + when(event.getCreatorId()).thenReturn(new NodeId(0)); // when workflow.handleRound(state, round); @@ -347,21 +364,11 @@ void testPreHandleNotExecuted() { } @Test - @Disabled("Functionality currently not implemented (https://github.com/hashgraph/hedera-services/issues/6812)") - @DisplayName("Run preHandle, if configuration has changed between preHandle and handle") - void testConfigurationChanged() { + @DisplayName("Run preHandle, if previous execution resulted in Status.PRE_HANDLE_FAILURE") + void testPreHandleFailure() { // given - final var txInfo = new TransactionScenarioBuilder().txInfo(); - final var preHandleResult = new PreHandleResult( - ALICE.accountID(), - ALICE.account().key(), - Status.SO_FAR_SO_GOOD, - ResponseCodeEnum.OK, - txInfo, - Map.of(), - null, - CONFIG_VERSION - 1L); - when(platformTxn.getMetadata()).thenReturn(preHandleResult); + when(platformTxn.getMetadata()).thenReturn(PRE_HANDLE_FAILURE_RESULT); + when(event.getCreatorId()).thenReturn(new NodeId(0)); // when workflow.handleRound(state, round); @@ -371,11 +378,11 @@ void testConfigurationChanged() { } @Test - @DisplayName("Run preHandle, if previous execution resulted in Status.PRE_HANDLE_FAILURE") - void testPreHandleFailure() { + @DisplayName("Run preHandle, if previous execution resulted in Status.UNKNOWN_FAILURE") + void testUnknownFailure() { // given - when(platformTxn.getMetadata()).thenReturn(PRE_HANDLE_FAILURE_RESULT); - given(event.getCreatorId()).willReturn(new NodeId(0)); + when(platformTxn.getMetadata()).thenReturn(PreHandleResult.unknownFailure()); + when(event.getCreatorId()).thenReturn(new NodeId(0)); // when workflow.handleRound(state, round); @@ -385,11 +392,22 @@ void testPreHandleFailure() { } @Test - @DisplayName("Run preHandle, if previous execution resulted in Status.UNKNOWN_FAILURE") - void testUnknownFailure() { + @DisplayName("Run preHandle, if configuration changed") + void testConfigurationChanged() { // given - when(platformTxn.getMetadata()).thenReturn(PreHandleResult.unknownFailure()); - given(event.getCreatorId()).willReturn(new NodeId(0)); + final var key = ALICE.account().keyOrThrow(); + final var preHandleResult = new PreHandleResult( + ALICE.accountID(), + key, + Status.SO_FAR_SO_GOOD, + ResponseCodeEnum.OK, + new TransactionScenarioBuilder().txInfo(), + Set.of(), + Map.of(key, FakeSignatureVerificationFuture.goodFuture(key)), + null, + CONFIG_VERSION - 1L); + when(platformTxn.getMetadata()).thenReturn(preHandleResult); + when(event.getCreatorId()).thenReturn(new NodeId(0)); // when workflow.handleRound(state, round); @@ -403,7 +421,7 @@ void testUnknownFailure() { void testPreHandleSuccess() { // given when(platformTxn.getMetadata()).thenReturn(null); - given(event.getCreatorId()).willReturn(new NodeId(0)); + when(event.getCreatorId()).thenReturn(new NodeId(0)); // when workflow.handleRound(state, round); @@ -483,7 +501,7 @@ final class AddMissingSignaturesTest { @Test @DisplayName("Add passing verification result, if a key was handled in preHandle") - void testExistingKeyWithPassingSignature() throws PreCheckException { + void testRequiredExistingKeyWithPassingSignature() throws PreCheckException, TimeoutException { // given final var alicesKey = ALICE.account().keyOrThrow(); final var bobsKey = BOB.account().keyOrThrow(); @@ -496,6 +514,7 @@ void testExistingKeyWithPassingSignature() throws PreCheckException { Status.SO_FAR_SO_GOOD, ResponseCodeEnum.OK, new TransactionScenarioBuilder().txInfo(), + Set.of(bobsKey), verificationResults, null, CONFIG_VERSION); @@ -507,12 +526,18 @@ void testExistingKeyWithPassingSignature() throws PreCheckException { }) .when(dispatcher) .dispatchPreHandle(any()); + doAnswer(invocation -> { + final var expanded = invocation.getArgument(2, Set.class); + expanded.add(ExpandedSignaturePairFactory.ed25519Pair(bobsKey)); + return null; + }) + .when(signatureExpander) + .expand(eq(Set.of(bobsKey)), any(), any()); // when workflow.handleRound(state, round); // then - verify(signatureExpander, never()).expand(any(), any(), any()); final var argCapture = ArgumentCaptor.forClass(HandleContext.class); verify(dispatcher).dispatchHandle(argCapture.capture()); final var alicesVerification = argCapture.getValue().verificationFor(alicesKey); @@ -529,7 +554,7 @@ void testExistingKeyWithPassingSignature() throws PreCheckException { @Test @DisplayName("Add failing verification result, if a key was handled in preHandle") - void testExistingKeyWithFailingSignature() throws PreCheckException { + void testRequiredExistingKeyWithFailingSignature() throws PreCheckException { // given final var alicesKey = ALICE.account().keyOrThrow(); final var bobsKey = BOB.account().keyOrThrow(); @@ -542,6 +567,7 @@ void testExistingKeyWithFailingSignature() throws PreCheckException { Status.SO_FAR_SO_GOOD, ResponseCodeEnum.OK, new TransactionScenarioBuilder().txInfo(), + Set.of(bobsKey), verificationResults, null, CONFIG_VERSION); @@ -553,21 +579,29 @@ void testExistingKeyWithFailingSignature() throws PreCheckException { }) .when(dispatcher) .dispatchPreHandle(any()); + doAnswer(invocation -> { + final var expanded = invocation.getArgument(2, Set.class); + expanded.add(ExpandedSignaturePairFactory.ed25519Pair(bobsKey)); + return null; + }) + .when(signatureExpander) + .expand(eq(Set.of(bobsKey)), any(), any()); // when workflow.handleRound(state, round); // then - verify(signatureExpander, never()).expand(any(), any(), any()); verify(dispatcher, never()).dispatchHandle(any()); } @Test @DisplayName("Trigger passing verification, if new key was found") - void testNonExistingKeyWithPassingSignature() throws PreCheckException { + void testRequiredNewKeyWithPassingSignature() throws PreCheckException, TimeoutException { // given final var alicesKey = ALICE.account().keyOrThrow(); final var bobsKey = BOB.account().keyOrThrow(); + final var verificationResults = Map.of( + bobsKey, FakeSignatureVerificationFuture.goodFuture(bobsKey)); doAnswer(invocation -> { final var context = invocation.getArgument(0, PreHandleContext.class); context.requireKey(bobsKey); @@ -575,15 +609,23 @@ void testNonExistingKeyWithPassingSignature() throws PreCheckException { }) .when(dispatcher) .dispatchPreHandle(any()); - final var verificationResults = Map.of( - bobsKey, FakeSignatureVerificationFuture.goodFuture(bobsKey)); - when(signatureVerifier.verify(any(), any())).thenReturn(verificationResults); + doAnswer(invocation -> { + final var expanded = invocation.getArgument(2, Set.class); + expanded.add(ExpandedSignaturePairFactory.ed25519Pair(bobsKey)); + return null; + }) + .when(signatureExpander) + .expand(eq(Set.of(bobsKey)), any(), any()); + when(signatureVerifier.verify( + any(), + argThat(set -> set.size() == 1 + && bobsKey.equals(set.iterator().next().key())))) + .thenReturn(verificationResults); // when workflow.handleRound(state, round); // then - verify(signatureExpander).expand(eq(bobsKey), any(), any()); final var argCapture = ArgumentCaptor.forClass(HandleContext.class); verify(dispatcher).dispatchHandle(argCapture.capture()); final var alicesVerification = argCapture.getValue().verificationFor(alicesKey); @@ -600,7 +642,7 @@ void testNonExistingKeyWithPassingSignature() throws PreCheckException { @Test @DisplayName("Trigger failing verification, if new key was found") - void testNonExistingKeyWithFailingSignature() throws PreCheckException { + void testRequiredNewKeyWithFailingSignature() throws PreCheckException { // given final var bobsKey = BOB.account().keyOrThrow(); doAnswer(invocation -> { @@ -610,20 +652,228 @@ void testNonExistingKeyWithFailingSignature() throws PreCheckException { }) .when(dispatcher) .dispatchPreHandle(any()); + doAnswer(invocation -> { + final var expanded = invocation.getArgument(2, Set.class); + expanded.add(ExpandedSignaturePairFactory.ed25519Pair(bobsKey)); + return null; + }) + .when(signatureExpander) + .expand(eq(Set.of(bobsKey)), any(), any()); final var verificationResults = Map.of( bobsKey, FakeSignatureVerificationFuture.badFuture(bobsKey)); - when(signatureVerifier.verify(any(), any())).thenReturn(verificationResults); + when(signatureVerifier.verify( + any(), + argThat(set -> set.size() == 1 + && bobsKey.equals(set.iterator().next().key())))) + .thenReturn(verificationResults); // when workflow.handleRound(state, round); // then - verify(signatureExpander).expand(eq(bobsKey), any(), any()); verify(dispatcher, never()).dispatchHandle(any()); } @Test - void testComplexCase() throws PreCheckException { + @DisplayName("Add passing verification result, if a key was handled in preHandle") + void testOptionalExistingKeyWithPassingSignature() throws PreCheckException, TimeoutException { + // given + final var alicesKey = ALICE.account().keyOrThrow(); + final var bobsKey = BOB.account().keyOrThrow(); + final var verificationResults = Map.of( + alicesKey, FakeSignatureVerificationFuture.goodFuture(alicesKey), + bobsKey, FakeSignatureVerificationFuture.goodFuture(bobsKey)); + final var preHandleResult = new PreHandleResult( + ALICE.accountID(), + alicesKey, + Status.SO_FAR_SO_GOOD, + ResponseCodeEnum.OK, + new TransactionScenarioBuilder().txInfo(), + Set.of(), + verificationResults, + null, + CONFIG_VERSION); + when(platformTxn.getMetadata()).thenReturn(preHandleResult); + doAnswer(invocation -> { + final var context = invocation.getArgument(0, PreHandleContext.class); + context.optionalKey(bobsKey); + return null; + }) + .when(dispatcher) + .dispatchPreHandle(any()); + doAnswer(invocation -> { + final var expanded = invocation.getArgument(2, Set.class); + expanded.add(ExpandedSignaturePairFactory.ed25519Pair(bobsKey)); + return null; + }) + .when(signatureExpander) + .expand(eq(Set.of(bobsKey)), any(), any()); + + // when + workflow.handleRound(state, round); + + // then + final var argCapture = ArgumentCaptor.forClass(HandleContext.class); + verify(dispatcher).dispatchHandle(argCapture.capture()); + final var alicesVerification = argCapture.getValue().verificationFor(alicesKey); + assertThat(alicesVerification).isNotNull(); + assertThat(alicesVerification.key()).isEqualTo(alicesKey); + assertThat(alicesVerification.evmAlias()).isNull(); + assertThat(alicesVerification.passed()).isTrue(); + final var bobsVerification = argCapture.getValue().verificationFor(bobsKey); + assertThat(bobsVerification).isNotNull(); + assertThat(bobsVerification.key()).isEqualTo(bobsKey); + assertThat(bobsVerification.evmAlias()).isNull(); + assertThat(bobsVerification.passed()).isTrue(); + } + + @Test + @DisplayName("Add failing verification result, if a key was handled in preHandle") + void testOptionalExistingKeyWithFailingSignature() throws PreCheckException, TimeoutException { + // given + final var alicesKey = ALICE.account().keyOrThrow(); + final var bobsKey = BOB.account().keyOrThrow(); + final var verificationResults = Map.of( + alicesKey, FakeSignatureVerificationFuture.goodFuture(alicesKey), + bobsKey, FakeSignatureVerificationFuture.badFuture(bobsKey)); + final var preHandleResult = new PreHandleResult( + ALICE.accountID(), + alicesKey, + Status.SO_FAR_SO_GOOD, + ResponseCodeEnum.OK, + new TransactionScenarioBuilder().txInfo(), + Set.of(), + verificationResults, + null, + CONFIG_VERSION); + when(platformTxn.getMetadata()).thenReturn(preHandleResult); + doAnswer(invocation -> { + final var context = invocation.getArgument(0, PreHandleContext.class); + context.optionalKey(bobsKey); + return null; + }) + .when(dispatcher) + .dispatchPreHandle(any()); + doAnswer(invocation -> { + final var expanded = invocation.getArgument(2, Set.class); + expanded.add(ExpandedSignaturePairFactory.ed25519Pair(bobsKey)); + return null; + }) + .when(signatureExpander) + .expand(eq(Set.of(bobsKey)), any(), any()); + + // when + workflow.handleRound(state, round); + + // then + final var argCapture = ArgumentCaptor.forClass(HandleContext.class); + verify(dispatcher).dispatchHandle(argCapture.capture()); + final var alicesVerification = argCapture.getValue().verificationFor(alicesKey); + assertThat(alicesVerification).isNotNull(); + assertThat(alicesVerification.key()).isEqualTo(alicesKey); + assertThat(alicesVerification.evmAlias()).isNull(); + assertThat(alicesVerification.passed()).isTrue(); + final var bobsVerification = argCapture.getValue().verificationFor(bobsKey); + assertThat(bobsVerification).isNotNull(); + assertThat(bobsVerification.key()).isEqualTo(bobsKey); + assertThat(bobsVerification.evmAlias()).isNull(); + assertThat(bobsVerification.passed()).isFalse(); + } + + @Test + @DisplayName("Trigger passing verification, if new key was found") + void testOptionalNewKeyWithPassingSignature() throws PreCheckException, TimeoutException { + // given + final var alicesKey = ALICE.account().keyOrThrow(); + final var bobsKey = BOB.account().keyOrThrow(); + final var verificationResults = Map.of( + bobsKey, FakeSignatureVerificationFuture.goodFuture(bobsKey)); + doAnswer(invocation -> { + final var context = invocation.getArgument(0, PreHandleContext.class); + context.optionalKey(bobsKey); + return null; + }) + .when(dispatcher) + .dispatchPreHandle(any()); + doAnswer(invocation -> { + final var expanded = invocation.getArgument(2, Set.class); + expanded.add(ExpandedSignaturePairFactory.ed25519Pair(bobsKey)); + return null; + }) + .when(signatureExpander) + .expand(eq(Set.of(bobsKey)), any(), any()); + when(signatureVerifier.verify( + any(), + argThat(set -> set.size() == 1 + && bobsKey.equals(set.iterator().next().key())))) + .thenReturn(verificationResults); + + // when + workflow.handleRound(state, round); + + // then + final var argCapture = ArgumentCaptor.forClass(HandleContext.class); + verify(dispatcher).dispatchHandle(argCapture.capture()); + final var alicesVerification = argCapture.getValue().verificationFor(alicesKey); + assertThat(alicesVerification).isNotNull(); + assertThat(alicesVerification.key()).isEqualTo(alicesKey); + assertThat(alicesVerification.evmAlias()).isNull(); + assertThat(alicesVerification.passed()).isTrue(); + final var bobsVerification = argCapture.getValue().verificationFor(bobsKey); + assertThat(bobsVerification).isNotNull(); + assertThat(bobsVerification.key()).isEqualTo(bobsKey); + assertThat(bobsVerification.evmAlias()).isNull(); + assertThat(bobsVerification.passed()).isTrue(); + } + + @Test + @DisplayName("Trigger failing verification, if new key was found") + void testOptionalNewKeyWithFailingSignature() throws PreCheckException, TimeoutException { + // given + final var alicesKey = ALICE.account().keyOrThrow(); + final var bobsKey = BOB.account().keyOrThrow(); + doAnswer(invocation -> { + final var context = invocation.getArgument(0, PreHandleContext.class); + context.optionalKey(bobsKey); + return null; + }) + .when(dispatcher) + .dispatchPreHandle(any()); + doAnswer(invocation -> { + final var expanded = invocation.getArgument(2, Set.class); + expanded.add(ExpandedSignaturePairFactory.ed25519Pair(bobsKey)); + return null; + }) + .when(signatureExpander) + .expand(eq(Set.of(bobsKey)), any(), any()); + final var verificationResults = Map.of( + bobsKey, FakeSignatureVerificationFuture.badFuture(bobsKey)); + when(signatureVerifier.verify( + any(), + argThat(set -> set.size() == 1 + && bobsKey.equals(set.iterator().next().key())))) + .thenReturn(verificationResults); + + // when + workflow.handleRound(state, round); + + // then + final var argCapture = ArgumentCaptor.forClass(HandleContext.class); + verify(dispatcher).dispatchHandle(argCapture.capture()); + final var alicesVerification = argCapture.getValue().verificationFor(alicesKey); + assertThat(alicesVerification).isNotNull(); + assertThat(alicesVerification.key()).isEqualTo(alicesKey); + assertThat(alicesVerification.evmAlias()).isNull(); + assertThat(alicesVerification.passed()).isTrue(); + final var bobsVerification = argCapture.getValue().verificationFor(bobsKey); + assertThat(bobsVerification).isNotNull(); + assertThat(bobsVerification.key()).isEqualTo(bobsKey); + assertThat(bobsVerification.evmAlias()).isNull(); + assertThat(bobsVerification.passed()).isFalse(); + } + + @Test + void testComplexCase() throws PreCheckException, TimeoutException { // given final var alicesKey = ALICE.account().keyOrThrow(); final var bobsKey = BOB.account().keyOrThrow(); @@ -639,6 +889,7 @@ void testComplexCase() throws PreCheckException { Status.SO_FAR_SO_GOOD, ResponseCodeEnum.OK, new TransactionScenarioBuilder().txInfo(), + Set.of(erinsKey), preHandleVerificationResults, null, CONFIG_VERSION); @@ -646,14 +897,32 @@ void testComplexCase() throws PreCheckException { doAnswer(invocation -> { final var context = invocation.getArgument(0, PreHandleContext.class); context.requireKey(bobsKey); - context.requireKey(carolsKey); + context.optionalKey(carolsKey); return null; }) .when(dispatcher) .dispatchPreHandle(any()); + doAnswer(invocation -> { + final var expanded = invocation.getArgument(2, Set.class); + expanded.add(ExpandedSignaturePairFactory.ed25519Pair(bobsKey)); + return null; + }) + .when(signatureExpander) + .expand(eq(Set.of(bobsKey)), any(), any()); + doAnswer(invocation -> { + final var expanded = invocation.getArgument(2, Set.class); + expanded.add(ExpandedSignaturePairFactory.ecdsaPair(carolsKey)); + return null; + }) + .when(signatureExpander) + .expand(eq(Set.of(carolsKey)), any(), any()); final var verificationResults = Map.of( carolsKey, FakeSignatureVerificationFuture.goodFuture(carolsKey)); - when(signatureVerifier.verify(any(), any())).thenReturn(verificationResults); + when(signatureVerifier.verify( + any(), + argThat(set -> set.size() == 1 + && carolsKey.equals(set.iterator().next().key())))) + .thenReturn(verificationResults); // when workflow.handleRound(state, round); @@ -676,7 +945,11 @@ void testComplexCase() throws PreCheckException { assertThat(carolsVerification.key()).isEqualTo(carolsKey); assertThat(carolsVerification.evmAlias()).isNull(); assertThat(carolsVerification.passed()).isTrue(); - assertThat(argCapture.getValue().verificationFor(erinsKey)).isNull(); + final var erinsVerification = argCapture.getValue().verificationFor(erinsKey); + assertThat(erinsVerification).isNotNull(); + assertThat(erinsVerification.key()).isEqualTo(erinsKey); + assertThat(erinsVerification.evmAlias()).isNull(); + assertThat(erinsVerification.passed()).isFalse(); } } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/validation/AttributeValidatorImplTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/validation/AttributeValidatorImplTest.java new file mode 100644 index 000000000000..950578526556 --- /dev/null +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/validation/AttributeValidatorImplTest.java @@ -0,0 +1,172 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.workflows.handle.validation; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.AUTORENEW_DURATION_NOT_IN_RANGE; +import static com.hedera.hapi.node.base.ResponseCodeEnum.BAD_ENCODING; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_EXPIRATION_TIME; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ZERO_BYTE_IN_STRING; +import static com.hedera.hapi.node.base.ResponseCodeEnum.MEMO_TOO_LONG; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static com.hedera.node.app.spi.validation.AttributeValidator.MAX_NESTED_KEY_LEVELS; +import static org.assertj.core.api.Assertions.assertThatCode; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mock.Strictness.LENIENT; + +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.KeyList; +import com.hedera.hapi.node.base.ThresholdKey; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import java.time.Instant; +import java.util.Arrays; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class AttributeValidatorImplTest { + private static final long maxLifetime = 3_000_000L; + private static final byte[] MOCK_ED25519_KEY = "abcdefghabcdefghabcdefghabcdefgh".getBytes(); + + @Mock(strictness = LENIENT) + private HandleContext context; + + private AttributeValidatorImpl subject; + + @BeforeEach + void setUp() { + final var config = HederaTestConfigBuilder.create() + .withValue("entities.maxLifetime", maxLifetime) + .getOrCreateConfig(); + given(context.configuration()).willReturn(config); + + subject = new AttributeValidatorImpl(context); + } + + @Test + void memoCheckWorks() { + final var config = HederaTestConfigBuilder.create() + .withValue("hedera.transaction.maxMemoUtf8Bytes", 100) + .getOrCreateConfig(); + given(context.configuration()).willReturn(config); + + final char[] aaa = new char[101]; + Arrays.fill(aaa, 'a'); + final var memo = new String(aaa); + + assertThatCode(() -> subject.validateMemo("OK")).doesNotThrowAnyException(); + assertThatThrownBy(() -> subject.validateMemo(memo)) + .isInstanceOf(HandleException.class) + .has(responseCode(MEMO_TOO_LONG)); + assertThatThrownBy(() -> subject.validateMemo("Not s\u0000 ok!")) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_ZERO_BYTE_IN_STRING)); + } + + @Test + void rejectsFutureExpiryImplyingSuperMaxLifetime() { + given(context.consensusNow()).willReturn(Instant.ofEpochSecond(0L)); + Assertions.assertThatThrownBy(() -> subject.validateExpiry(maxLifetime + 1)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_EXPIRATION_TIME)); + } + + @Test + void allowsFutureExpiryBeforeMaxLifetime() { + final var now = Instant.ofEpochSecond(1_234_567L); + given(context.consensusNow()).willReturn(now); + assertThatCode(() -> subject.validateExpiry(now.getEpochSecond() + 1)).doesNotThrowAnyException(); + } + + @Test + void rejectsAnyNonFutureExpiry() { + final var now = 1_234_567L; + given(context.consensusNow()).willReturn(Instant.ofEpochSecond(now)); + assertThatThrownBy(() -> subject.validateExpiry(now)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_EXPIRATION_TIME)); + } + + @Test + void rejectsBriefAutoRenewPeriod() { + final var config = HederaTestConfigBuilder.create() + .withValue("ledger.autoRenewPeriod.minDuration", 1_000L) + .getOrCreateConfig(); + given(context.configuration()).willReturn(config); + + assertThatThrownBy(() -> subject.validateAutoRenewPeriod(55L)) + .isInstanceOf(HandleException.class) + .has(responseCode(AUTORENEW_DURATION_NOT_IN_RANGE)); + } + + @Test + void rejectsOverLongAutoRenewPeriod() { + final var config = HederaTestConfigBuilder.create() + .withValue("ledger.autoRenewPeriod.minDuration", 1_000L) + .withValue("ledger.autoRenewPeriod.maxDuration", 10_000L) + .getOrCreateConfig(); + given(context.configuration()).willReturn(config); + + assertThatThrownBy(() -> subject.validateAutoRenewPeriod(10_001L)) + .isInstanceOf(HandleException.class) + .has(responseCode(AUTORENEW_DURATION_NOT_IN_RANGE)); + } + + @Test + void rejectsOverlyNestedKey() { + final var acceptablyNested = + nestKeys(Key.newBuilder(), MAX_NESTED_KEY_LEVELS - 1).build(); + final var overlyNested = + nestKeys(Key.newBuilder(), MAX_NESTED_KEY_LEVELS).build(); + assertThatCode(() -> subject.validateKey(acceptablyNested)).doesNotThrowAnyException(); + assertThatThrownBy(() -> subject.validateKey(overlyNested)) + .isInstanceOf(HandleException.class) + .has(responseCode(BAD_ENCODING)); + } + + @Test + void unsetKeysAreNotValid() { + assertThatThrownBy(() -> subject.validateKey(Key.DEFAULT)) + .isInstanceOf(HandleException.class) + .has(responseCode(BAD_ENCODING)); + } + + private static Key.Builder nestKeys(final Key.Builder builder, final int additionalLevels) { + if (additionalLevels == 0) { + builder.ed25519(Bytes.wrap(MOCK_ED25519_KEY)); + return builder; + } + + var nestedBuilder = Key.newBuilder(); + nestKeys(nestedBuilder, additionalLevels - 1); + if (additionalLevels % 2 == 0) { + builder.keyList(KeyList.newBuilder().keys(nestedBuilder.build())); + } else { + builder.thresholdKey(ThresholdKey.newBuilder() + .threshold(1) + .keys(KeyList.newBuilder().keys(nestedBuilder.build()))); + } + return builder; + } +} diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/validation/ExpiryValidatorImplTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/validation/ExpiryValidatorImplTest.java new file mode 100644 index 000000000000..a1f726d5fe47 --- /dev/null +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/validation/ExpiryValidatorImplTest.java @@ -0,0 +1,340 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.workflows.handle.validation; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_EXPIRED_AND_PENDING_REMOVAL; +import static com.hedera.hapi.node.base.ResponseCodeEnum.AUTORENEW_DURATION_NOT_IN_RANGE; +import static com.hedera.hapi.node.base.ResponseCodeEnum.CONTRACT_EXPIRED_AND_PENDING_REMOVAL; +import static com.hedera.hapi.node.base.ResponseCodeEnum.EXPIRATION_REDUCTION_NOT_ALLOWED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_AUTORENEW_ACCOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_EXPIRATION_TIME; +import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static com.hedera.node.app.spi.validation.ExpiryMeta.NA; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.BDDMockito.given; +import static org.mockito.BDDMockito.willThrow; +import static org.mockito.Mock.Strictness.LENIENT; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.node.app.service.evm.exceptions.InvalidTransactionException; +import com.hedera.node.app.service.token.ReadableAccountStore; +import com.hedera.node.app.spi.validation.AttributeValidator; +import com.hedera.node.app.spi.validation.EntityType; +import com.hedera.node.app.spi.validation.ExpiryMeta; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import java.time.Instant; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class ExpiryValidatorImplTest { + private static final Instant NOW = Instant.ofEpochSecond(1_234_567L); + private static final long A_TIME = 666_666_666L; + private static final long B_TIME = 777_777_777L; + private static final long A_PERIOD = 666_666L; + private static final long B_PERIOD = 777_777L; + private static final long AN_AUTO_RENEW_NUM = 888; + + @Mock + private AttributeValidator attributeValidator; + + @Mock(strictness = LENIENT) + private ReadableAccountStore accountStore; + + @Mock(strictness = LENIENT) + private HandleContext context; + + private ExpiryValidatorImpl subject; + + @BeforeEach + void setUp() { + given(context.consensusNow()).willReturn(NOW); + final var config = HederaTestConfigBuilder.createConfig(); + given(context.configuration()).willReturn(config); + given(context.attributeValidator()).willReturn(attributeValidator); + given(accountStore.getAccountById(any())).willReturn(Account.DEFAULT); + given(context.readableStore(ReadableAccountStore.class)).willReturn(accountStore); + + subject = new ExpiryValidatorImpl(context); + } + + @Test + void onCreationRequiresEitherExplicitValueOrFullAutoRenewMetaIfNotSelfFunding() { + willThrow(new HandleException(INVALID_EXPIRATION_TIME)) + .given(attributeValidator) + .validateExpiry(anyLong()); + final var expiryMeta1 = new ExpiryMeta(NA, NA, AN_AUTO_RENEW_NUM); + assertThatThrownBy(() -> subject.resolveCreationAttempt(false, expiryMeta1)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_EXPIRATION_TIME)); + final var expiryMeta2 = new ExpiryMeta(NA, A_PERIOD, NA); + assertThatThrownBy(() -> subject.resolveCreationAttempt(false, expiryMeta2)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_EXPIRATION_TIME)); + } + + @Test + void validatesShard() { + final var config = + HederaTestConfigBuilder.create().withValue("hedera.shard", 1L).getOrCreateConfig(); + given(context.configuration()).willReturn(config); + final var newMeta = new ExpiryMeta(A_TIME, A_PERIOD, 2L, 2L, AN_AUTO_RENEW_NUM); + + assertThatThrownBy(() -> subject.resolveCreationAttempt(false, newMeta)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_AUTORENEW_ACCOUNT)); + } + + @Test + void validatesRealm() { + final var config = HederaTestConfigBuilder.create() + .withValue("hedera.shard", 1L) + .withValue("hedera.realm", 2L) + .getOrCreateConfig(); + given(context.configuration()).willReturn(config); + final var newMeta = new ExpiryMeta(A_TIME, A_PERIOD, 1L, 3L, AN_AUTO_RENEW_NUM); + + assertThatThrownBy(() -> subject.resolveCreationAttempt(false, newMeta)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_AUTORENEW_ACCOUNT)); + } + + @Test + void onCreationRequiresValidExpiryIfExplicit() { + willThrow(new HandleException(INVALID_EXPIRATION_TIME)) + .given(attributeValidator) + .validateExpiry(A_TIME); + + final var expiryMeta = new ExpiryMeta(A_TIME, NA, AN_AUTO_RENEW_NUM); + assertThatThrownBy(() -> subject.resolveCreationAttempt(false, expiryMeta)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_EXPIRATION_TIME)); + } + + @Test + void translatesFailureOnExplicitAutoRenewAccount() { + given(accountStore.getAccountById( + AccountID.newBuilder().accountNum(AN_AUTO_RENEW_NUM).build())) + .willThrow(new InvalidTransactionException( + com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_AUTORENEW_ACCOUNT)); + + final var expiryMeta = new ExpiryMeta(A_TIME, NA, AN_AUTO_RENEW_NUM); + assertThatThrownBy(() -> subject.resolveCreationAttempt(false, expiryMeta)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_AUTORENEW_ACCOUNT)); + } + + @Test + void onCreationUsesAutoRenewPeriodEvenWithoutFullSpecIfSelfFunding() { + assertThatCode(() -> subject.resolveCreationAttempt(true, new ExpiryMeta(NA, A_PERIOD, NA))) + .doesNotThrowAnyException(); + } + + @Test + void onCreationRequiresValidExpiryIfImplicit() { + willThrow(new HandleException(INVALID_EXPIRATION_TIME)) + .given(attributeValidator) + .validateExpiry(NOW.getEpochSecond() + A_PERIOD); + + final var expiryMeta = new ExpiryMeta(NA, A_PERIOD, AN_AUTO_RENEW_NUM); + assertThatThrownBy(() -> subject.resolveCreationAttempt(false, expiryMeta)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_EXPIRATION_TIME)); + } + + @Test + void validatesAutoRenewPeriodIfSet() { + willThrow(new HandleException(AUTORENEW_DURATION_NOT_IN_RANGE)) + .given(attributeValidator) + .validateAutoRenewPeriod(A_PERIOD); + + final var expiryMeta = new ExpiryMeta(A_TIME, A_PERIOD, NA); + assertThatThrownBy(() -> subject.resolveCreationAttempt(false, expiryMeta)) + .isInstanceOf(HandleException.class) + .has(responseCode(AUTORENEW_DURATION_NOT_IN_RANGE)); + } + + @Test + void validatesImpliedExpiry() { + willThrow(new HandleException(AUTORENEW_DURATION_NOT_IN_RANGE)) + .given(attributeValidator) + .validateAutoRenewPeriod(A_PERIOD); + + final var expiryMeta = new ExpiryMeta(A_TIME, A_PERIOD, NA); + assertThatThrownBy(() -> subject.resolveCreationAttempt(false, expiryMeta)) + .isInstanceOf(HandleException.class) + .has(responseCode(AUTORENEW_DURATION_NOT_IN_RANGE)); + } + + @Test + void summarizesExpiryOnlyCase() { + assertThatCode(() -> subject.resolveCreationAttempt(false, new ExpiryMeta(A_TIME, NA, NA))) + .doesNotThrowAnyException(); + } + + @Test + void summarizesExpiryAndAutoRenewNumCase() { + assertThatCode(() -> subject.resolveCreationAttempt(false, new ExpiryMeta(A_TIME, NA, AN_AUTO_RENEW_NUM))) + .doesNotThrowAnyException(); + } + + @Test + void summarizesExpiryAndValidAutoRenewPeriodCase() { + assertThatCode(() -> subject.resolveCreationAttempt(false, new ExpiryMeta(A_TIME, A_PERIOD, NA))) + .doesNotThrowAnyException(); + } + + @Test + void summarizesFullAutoRenewSpecPeriodCase() { + assertThatCode(() -> subject.resolveCreationAttempt( + false, new ExpiryMeta(NOW.getEpochSecond() + A_PERIOD, A_PERIOD, AN_AUTO_RENEW_NUM))) + .doesNotThrowAnyException(); + } + + @Test + void updateCannotExplicitlyReduceExpiry() { + final var current = new ExpiryMeta(A_TIME, NA, NA); + final var update = new ExpiryMeta(A_TIME - 1, NA, NA); + + assertThatThrownBy(() -> subject.resolveUpdateAttempt(current, update)) + .isInstanceOf(HandleException.class) + .has(responseCode(EXPIRATION_REDUCTION_NOT_ALLOWED)); + } + + @Test + void explicitExpiryExtensionMustBeValid() { + final var current = new ExpiryMeta(A_TIME, NA, NA); + final var update = new ExpiryMeta(A_TIME - 1, NA, NA); + + assertThatThrownBy(() -> subject.resolveUpdateAttempt(current, update)) + .isInstanceOf(HandleException.class) + .has(responseCode(EXPIRATION_REDUCTION_NOT_ALLOWED)); + } + + @Test + void ifJustSettingAutoRenewAccountThenNetPeriodMustBeValid() { + final var current = new ExpiryMeta(A_TIME, 0, NA); + final var update = new ExpiryMeta(NA, NA, AN_AUTO_RENEW_NUM); + + willThrow(new HandleException(AUTORENEW_DURATION_NOT_IN_RANGE)) + .given(attributeValidator) + .validateAutoRenewPeriod(0L); + + assertThatThrownBy(() -> subject.resolveUpdateAttempt(current, update)) + .isInstanceOf(HandleException.class) + .has(responseCode(AUTORENEW_DURATION_NOT_IN_RANGE)); + } + + @Test + void ifSettingAutoRenewPeriodThenMustBeValid() { + final var current = new ExpiryMeta(A_TIME, 0, NA); + final var update = new ExpiryMeta(NA, B_PERIOD, AN_AUTO_RENEW_NUM); + + willThrow(new HandleException(AUTORENEW_DURATION_NOT_IN_RANGE)) + .given(attributeValidator) + .validateAutoRenewPeriod(B_PERIOD); + + assertThatThrownBy(() -> subject.resolveUpdateAttempt(current, update)) + .isInstanceOf(HandleException.class) + .has(responseCode(AUTORENEW_DURATION_NOT_IN_RANGE)); + } + + @Test + void ifUpdatingAutoRenewNumMustBeValid() { + final var current = new ExpiryMeta(A_TIME, 0, NA); + final var update = new ExpiryMeta(NA, B_PERIOD, AN_AUTO_RENEW_NUM); + + given(accountStore.getAccountById( + AccountID.newBuilder().accountNum(AN_AUTO_RENEW_NUM).build())) + .willThrow(new InvalidTransactionException( + com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_AUTORENEW_ACCOUNT)); + + assertThatThrownBy(() -> subject.resolveUpdateAttempt(current, update)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_AUTORENEW_ACCOUNT)); + } + + @Test + void ifUpdatingExpiryMustBeValid() { + final var current = new ExpiryMeta(A_TIME, 0, NA); + final var update = new ExpiryMeta(B_TIME, B_PERIOD, AN_AUTO_RENEW_NUM); + + willThrow(new HandleException(INVALID_EXPIRATION_TIME)) + .given(attributeValidator) + .validateExpiry(B_TIME); + + assertThatThrownBy(() -> subject.resolveUpdateAttempt(current, update)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_EXPIRATION_TIME)); + } + + @Test + void canSetEverythingValidly() { + final var current = new ExpiryMeta(A_TIME, 0, NA); + final var update = new ExpiryMeta(B_TIME, B_PERIOD, AN_AUTO_RENEW_NUM); + + assertThat(subject.resolveUpdateAttempt(current, update)).isEqualTo(update); + } + + @Test + void canUseWildcardForRemovingAutoRenewAccount() { + final var current = new ExpiryMeta(A_TIME, 0, NA); + final var update = new ExpiryMeta(B_TIME, B_PERIOD, 0); + + assertThat(subject.resolveUpdateAttempt(current, update)).isEqualTo(update); + } + + @Test + void checksIfAccountIsDetachedIfBalanceZero() { + assertThat(subject.expirationStatus(EntityType.ACCOUNT, false, 0)).isEqualTo(OK); + assertThat(subject.isDetached(EntityType.ACCOUNT, false, 0)).isFalse(); + } + + @Test + void failsIfAccountExpiredAndPendingRemoval() { + assertThat(subject.expirationStatus(EntityType.ACCOUNT, true, 0L)) + .isEqualTo(ACCOUNT_EXPIRED_AND_PENDING_REMOVAL); + assertThat(subject.isDetached(EntityType.ACCOUNT, true, 0)).isTrue(); + + assertThat(subject.expirationStatus(EntityType.CONTRACT, true, 0L)) + .isEqualTo(CONTRACT_EXPIRED_AND_PENDING_REMOVAL); + assertThat(subject.isDetached(EntityType.CONTRACT, true, 0)).isTrue(); + } + + @Test + void notDetachedIfAccountNotExpired() { + assertThat(subject.expirationStatus(EntityType.ACCOUNT, false, 0L)).isEqualTo(OK); + assertThat(subject.isDetached(EntityType.ACCOUNT, false, 10)).isFalse(); + } + + @Test + void notDetachedIfAutoRenewDisabled() { + assertThat(subject.expirationStatus(EntityType.ACCOUNT, false, 0L)).isEqualTo(OK); + assertThat(subject.isDetached(EntityType.ACCOUNT, false, 0)).isFalse(); + } +} diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/validation/MonoExpiryValidatorTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/validation/MonoExpiryValidatorTest.java index 43763e541de9..9d04d4b72abc 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/validation/MonoExpiryValidatorTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/validation/MonoExpiryValidatorTest.java @@ -31,7 +31,6 @@ import static org.mockito.BDDMockito.willThrow; import com.hedera.hapi.node.base.ResponseCodeEnum; -import com.hedera.hapi.node.state.token.Account; import com.hedera.node.app.config.VersionedConfigImpl; import com.hedera.node.app.service.evm.exceptions.InvalidTransactionException; import com.hedera.node.app.service.mono.config.HederaNumbers; @@ -42,7 +41,6 @@ import com.hedera.node.app.spi.validation.ExpiryMeta; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.config.ConfigProvider; -import com.hedera.node.config.VersionedConfiguration; import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import java.util.function.LongSupplier; import org.junit.jupiter.api.BeforeEach; @@ -74,21 +72,17 @@ class MonoExpiryValidatorTest { @Mock private HederaNumbers numbers; - @Mock - private Account account; - @Mock(strictness = Strictness.LENIENT) private ConfigProvider configProvider; - private VersionedConfiguration configuration; - private MonoExpiryValidator subject; @BeforeEach void setUp() { subject = new MonoExpiryValidator(accountStore, attributeValidator, consensusSecondNow, numbers, configProvider); - configuration = new VersionedConfigImpl(HederaTestConfigBuilder.createConfig(), DEFAULT_CONFIG_VERSION); + final var configuration = + new VersionedConfigImpl(HederaTestConfigBuilder.createConfig(), DEFAULT_CONFIG_VERSION); given(configProvider.getConfiguration()).willReturn(configuration); } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/ingest/IngestCheckerTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/ingest/IngestCheckerTest.java index f3e81e4f27e1..702d189444b3 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/ingest/IngestCheckerTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/ingest/IngestCheckerTest.java @@ -25,18 +25,22 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_SIGNATURE; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSACTION; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSACTION_BODY; -import static com.hedera.hapi.node.base.ResponseCodeEnum.KEY_PREFIX_MISMATCH; import static com.hedera.hapi.node.base.ResponseCodeEnum.PLATFORM_NOT_ACTIVE; +import static com.hedera.hapi.node.base.ResponseCodeEnum.UNAUTHORIZED; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.estimatedFee; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mock.Strictness.LENIENT; import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import com.hedera.hapi.node.base.HederaFunctionality; +import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.hapi.node.base.SignatureMap; import com.hedera.hapi.node.base.Transaction; @@ -45,17 +49,20 @@ import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.hapi.node.transaction.UncheckedSubmitBody; import com.hedera.node.app.AppTestBase; -import com.hedera.node.app.signature.SignaturePreparer; +import com.hedera.node.app.info.CurrentPlatformStatus; +import com.hedera.node.app.signature.SignatureExpander; +import com.hedera.node.app.signature.SignatureVerificationFuture; +import com.hedera.node.app.signature.SignatureVerifier; import com.hedera.node.app.solvency.SolvencyPreCheck; -import com.hedera.node.app.spi.info.CurrentPlatformStatus; +import com.hedera.node.app.spi.signatures.SignatureVerification; import com.hedera.node.app.spi.workflows.InsufficientBalanceException; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.throttle.ThrottleAccumulator; import com.hedera.node.app.workflows.TransactionChecker; import com.hedera.node.app.workflows.TransactionInfo; import com.swirlds.common.system.status.PlatformStatus; +import java.util.Map; import java.util.stream.Stream; -import org.assertj.core.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; @@ -83,7 +90,10 @@ class IngestCheckerTest extends AppTestBase { ThrottleAccumulator throttleAccumulator; @Mock(strictness = LENIENT) - private SignaturePreparer signaturePreparer; + private SignatureExpander signatureExpander; + + @Mock(strictness = LENIENT) + private SignatureVerifier signatureVerifier; @Mock(strictness = LENIENT) private SolvencyPreCheck solvencyPreCheck; @@ -116,7 +126,12 @@ void setUp() throws PreCheckException { when(transactionChecker.check(tx)).thenReturn(transactionInfo); subject = new IngestChecker( - currentPlatformStatus, transactionChecker, throttleAccumulator, solvencyPreCheck, signaturePreparer); + currentPlatformStatus, + transactionChecker, + throttleAccumulator, + solvencyPreCheck, + signatureExpander, + signatureVerifier); } @Nested @@ -138,7 +153,7 @@ void testParseAndCheckWithInactivePlatformFails(final PlatformStatus status) { if (status != PlatformStatus.ACTIVE) { // Given a platform that is not ACTIVE when(currentPlatformStatus.get()).thenReturn(status); - + // When we try to parse and check a transaction, it should fail because the platform is not active assertThatThrownBy(() -> subject.checkNodeState()) .isInstanceOf(PreCheckException.class) .has(responseCode(PLATFORM_NOT_ACTIVE)); @@ -148,16 +163,22 @@ void testParseAndCheckWithInactivePlatformFails(final PlatformStatus status) { @Test @DisplayName("Run all checks successfully") - void testRunAllChecksSuccessfully() throws PreCheckException { + void testRunAllChecksSuccessfully() throws Exception { // given final var expected = new TransactionInfo( tx, txBody, MOCK_SIGNATURE_MAP, tx.signedTransactionBytes(), HederaFunctionality.UNCHECKED_SUBMIT); + final var verificationResultFuture = mock(SignatureVerificationFuture.class); + final var verificationResult = mock(SignatureVerification.class); + when(verificationResult.passed()).thenReturn(true); + when(verificationResultFuture.get(anyLong(), any())).thenReturn(verificationResult); + when(signatureVerifier.verify(any(), any())) + .thenReturn(Map.of(ALICE.account().keyOrThrow(), verificationResultFuture)); // when final var actual = subject.runAllChecks(state, tx); // then - Assertions.assertThat(actual).isEqualTo(expected); + assertThat(actual).isEqualTo(expected); } @Nested @@ -198,8 +219,10 @@ void randomException() throws PreCheckException { } } + // TODO: #2 Test deduplication + @Nested - @DisplayName("2. Check throttles") + @DisplayName("3. Check throttles") class ThrottleTests { @Test @DisplayName("When the transaction is throttled, the transaction should be rejected") @@ -228,45 +251,7 @@ void randomException() { } @Nested - @DisplayName("3. Check payer's signature") - class PayerSignatureTests { - public static Stream failureReasons() { - return Stream.of( - Arguments.of(INVALID_SIGNATURE), - Arguments.of(KEY_PREFIX_MISMATCH), - Arguments.of(INVALID_ACCOUNT_ID)); - } - - @ParameterizedTest(name = "SignatureCheck fails with error code {0}") - @MethodSource("failureReasons") - @DisplayName("If the payer signature is invalid, the transaction should be rejected") - void payerSignatureFails(ResponseCodeEnum failureReason) throws PreCheckException { - doThrow(new PreCheckException(failureReason)) - .when(signaturePreparer) - .syncGetPayerSigStatus(any()); - - assertThatThrownBy(() -> subject.runAllChecks(state, tx)) - .isInstanceOf(PreCheckException.class) - .has(responseCode(failureReason)); - } - - @Test - @DisplayName("If some random exception is thrown from checking signatures, the exception is bubbled up") - void randomException() throws PreCheckException { - // Given an IngestChecker that will throw a RuntimeException from checkPayerSignature - doThrow(new RuntimeException("checkPayerSignature exception")) - .when(signaturePreparer) - .syncGetPayerSigStatus(any()); - - // When the transaction is submitted, then the exception is bubbled up - assertThatThrownBy(() -> subject.runAllChecks(state, tx)) - .isInstanceOf(RuntimeException.class) - .hasMessageContaining("checkPayerSignature exception"); - } - } - - @Nested - @DisplayName("4.a Check account status") + @DisplayName("5.a Check account status") class PayerAccountStatusTests { public static Stream failureReasons() { @@ -300,7 +285,7 @@ void randomException() throws PreCheckException { } @Nested - @DisplayName("4.b Check payer solvency") + @DisplayName("5.b Check payer solvency") class PayerBalanceTests { public static Stream failureReasons() { @@ -338,4 +323,81 @@ void randomException() throws PreCheckException { .hasMessageContaining("checkSolvency exception"); } } + + @Nested + @DisplayName("6. Check payer's signature") + class PayerSignatureTests { + @Test + @DisplayName("No account for payer") + void noAccountForPayer() { + // The tx payer is ALICE. If we remove her from the state, then the payer has no account + accountsState.remove(ALICE.accountID()); + + // When the transaction is submitted, then the exception is thrown + assertThatThrownBy(() -> subject.runAllChecks(state, tx)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_ACCOUNT_ID)); + } + + // NOTE: This should never happen in real life, but we need to code defensively for it anyway. + @Test + @DisplayName("No key for payer in state") + void noKeyForPayer() { + // The tx payer is ALICE. We remove her key from state + final var account = accountsState.get(ALICE.accountID()); + assertThat(account).isNotNull(); + accountsState.put( + ALICE.accountID(), account.copyBuilder().key((Key) null).build()); + + // When the transaction is submitted, then the exception is thrown + assertThatThrownBy(() -> subject.runAllChecks(state, tx)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(UNAUTHORIZED)); + } + + @Test + @DisplayName("Payer signature is missing") + void noPayerSignature() { + // If the signature verifier's returned map doesn't contain an entry for ALICE, it means she didn't have a + // signature in the signature map to begin with. + when(signatureVerifier.verify(any(), any())).thenReturn(Map.of()); + + // When the transaction is submitted, then the exception is thrown + assertThatThrownBy(() -> subject.runAllChecks(state, tx)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_SIGNATURE)); + } + + @Test + @DisplayName("Payer verification fails") + void payerVerificationFails() throws Exception { + final var verificationResultFuture = mock(SignatureVerificationFuture.class); + final var verificationResult = mock(SignatureVerification.class); + when(verificationResult.passed()).thenReturn(false); + when(verificationResultFuture.get(anyLong(), any())).thenReturn(verificationResult); + when(signatureVerifier.verify(any(), any())) + .thenReturn(Map.of(ALICE.account().keyOrThrow(), verificationResultFuture)); + + assertThatThrownBy(() -> subject.runAllChecks(state, tx)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_SIGNATURE)); + } + + @Test + @DisplayName("Unexpected verification exception") + void randomException() throws Exception { + // Given a verification result future that throws an exception + final var verificationResultFuture = mock(SignatureVerificationFuture.class); + doThrow(new RuntimeException("checkPayerSignature exception")) + .when(verificationResultFuture) + .get(anyLong(), any()); + when(signatureVerifier.verify(any(), any())) + .thenReturn(Map.of(ALICE.account().keyOrThrow(), verificationResultFuture)); + + // When the transaction is submitted, then the exception is bubbled up + assertThatThrownBy(() -> subject.runAllChecks(state, tx)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("checkPayerSignature exception"); + } + } } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/ingest/SubmissionManagerTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/ingest/SubmissionManagerTest.java index fc6c8fe85c52..2d47ba919ece 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/ingest/SubmissionManagerTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/ingest/SubmissionManagerTest.java @@ -28,14 +28,19 @@ import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.hapi.node.transaction.UncheckedSubmitBody; import com.hedera.node.app.AppTestBase; -import com.hedera.node.app.service.mono.context.properties.NodeLocalProperties; +import com.hedera.node.app.config.VersionedConfigImpl; import com.hedera.node.app.service.mono.context.properties.Profile; import com.hedera.node.app.service.mono.pbj.PbjConverter; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.state.DeduplicationCache; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.HederaConfig; +import com.hedera.node.config.data.StatsConfig; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import com.hedera.pbj.runtime.io.buffer.Bytes; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.metrics.SpeedometerMetric; +import com.swirlds.common.metrics.config.MetricsConfig; import com.swirlds.common.system.Platform; import java.time.Instant; import org.junit.jupiter.api.BeforeEach; @@ -54,21 +59,31 @@ final class SubmissionManagerTest extends AppTestBase { /** Mocked global properties to verify default transaction duration */ @Mock private DeduplicationCache deduplicationCache; - /** Mocked local properties to verify that we ONLY support Unchecked Submit when in PROD mode */ - @Mock - private NodeLocalProperties nodeProps; + /** Configuration */ + private ConfigProvider config; + + @BeforeEach + void setUp() { + config = () -> new VersionedConfigImpl( + HederaTestConfigBuilder.create(false) + .withConfigDataType(HederaConfig.class) + .withConfigDataType(StatsConfig.class) + .withConfigDataType(MetricsConfig.class) + .getOrCreateConfig(), + 1); + } @Test @DisplayName("Null cannot be provided as any of the constructor args") @SuppressWarnings("ConstantConditions") void testConstructorWithIllegalParameters() { - assertThatThrownBy(() -> new SubmissionManager(null, deduplicationCache, nodeProps, metrics)) + assertThatThrownBy(() -> new SubmissionManager(null, deduplicationCache, config, metrics)) .isInstanceOf(NullPointerException.class); - assertThatThrownBy(() -> new SubmissionManager(platform, null, nodeProps, metrics)) + assertThatThrownBy(() -> new SubmissionManager(platform, null, config, metrics)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new SubmissionManager(platform, deduplicationCache, null, metrics)) .isInstanceOf(NullPointerException.class); - assertThatThrownBy(() -> new SubmissionManager(platform, deduplicationCache, nodeProps, null)) + assertThatThrownBy(() -> new SubmissionManager(platform, deduplicationCache, config, null)) .isInstanceOf(NullPointerException.class); } @@ -102,7 +117,7 @@ class SubmitTest extends AppTestBase { void setup() { bytes = randomBytes(25); when(mockedMetrics.getOrCreate(any())).thenReturn(platformTxnRejections); - submissionManager = new SubmissionManager(platform, deduplicationCache, nodeProps, mockedMetrics); + submissionManager = new SubmissionManager(platform, deduplicationCache, config, mockedMetrics); txBody = TransactionBody.newBuilder() .transactionID(TransactionID.newBuilder() .transactionValidStart(asTimestamp(Instant.now())) @@ -193,9 +208,16 @@ class UncheckedSubmitTest extends AppTestBase { @BeforeEach void setup() { - when(nodeProps.activeProfile()).thenReturn(Profile.TEST); + config = () -> new VersionedConfigImpl( + HederaTestConfigBuilder.create(false) + .withConfigDataType(HederaConfig.class) + .withConfigDataType(StatsConfig.class) + .withConfigDataType(MetricsConfig.class) + .withValue("hedera.profiles.active", Profile.TEST.toString()) + .getOrCreateConfig(), + 1); when(mockedMetrics.getOrCreate(any())).thenReturn(platformTxnRejections); - submissionManager = new SubmissionManager(platform, deduplicationCache, nodeProps, mockedMetrics); + submissionManager = new SubmissionManager(platform, deduplicationCache, config, mockedMetrics); bytes = randomBytes(25); @@ -232,8 +254,15 @@ void testSuccessWithUncheckedSubmit() throws PreCheckException { @DisplayName("An unchecked transaction in PROD mode WILL FAIL") void testUncheckedSubmitInProdFails() { // Given we are in PROD mode - when(nodeProps.activeProfile()).thenReturn(Profile.PROD); - submissionManager = new SubmissionManager(platform, deduplicationCache, nodeProps, mockedMetrics); + config = () -> new VersionedConfigImpl( + HederaTestConfigBuilder.create(false) + .withConfigDataType(HederaConfig.class) + .withConfigDataType(StatsConfig.class) + .withConfigDataType(MetricsConfig.class) + .withValue("hedera.profiles.active", Profile.PROD.toString()) + .getOrCreateConfig(), + 1); + submissionManager = new SubmissionManager(platform, deduplicationCache, config, mockedMetrics); // When we submit an unchecked transaction, and separate bytes, then the // submission FAILS because we are in PROD mode @@ -255,8 +284,15 @@ void testUncheckedSubmitInProdFails() { @DisplayName("Send bogus bytes as an unchecked transaction and verify it fails with a PreCheckException") void testBogusBytes() { // Given we are in TEST mode and have a transaction with bogus bytes - when(nodeProps.activeProfile()).thenReturn(Profile.TEST); - submissionManager = new SubmissionManager(platform, deduplicationCache, nodeProps, mockedMetrics); + config = () -> new VersionedConfigImpl( + HederaTestConfigBuilder.create(false) + .withConfigDataType(HederaConfig.class) + .withConfigDataType(StatsConfig.class) + .withConfigDataType(MetricsConfig.class) + .withValue("hedera.profiles.active", Profile.TEST.toString()) + .getOrCreateConfig(), + 1); + submissionManager = new SubmissionManager(platform, deduplicationCache, config, mockedMetrics); txBody = TransactionBody.newBuilder() .transactionID(TransactionID.newBuilder() .transactionValidStart(asTimestamp(Instant.now())) diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/PreHandleResultTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/PreHandleResultTest.java index baf654b164f7..eb30e6d44cb0 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/PreHandleResultTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/PreHandleResultTest.java @@ -19,48 +19,23 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_PAYER_ACCOUNT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; import static com.hedera.hapi.node.base.ResponseCodeEnum.UNKNOWN; -import static com.hedera.node.app.workflows.prehandle.FakeSignatureVerificationFuture.badFuture; -import static com.hedera.node.app.workflows.prehandle.FakeSignatureVerificationFuture.goodFuture; import static com.hedera.node.app.workflows.prehandle.PreHandleResult.Status.NODE_DUE_DILIGENCE_FAILURE; import static com.hedera.node.app.workflows.prehandle.PreHandleResult.Status.PRE_HANDLE_FAILURE; import static com.hedera.node.app.workflows.prehandle.PreHandleResult.Status.SO_FAR_SO_GOOD; import static com.hedera.node.app.workflows.prehandle.PreHandleResult.Status.UNKNOWN_FAILURE; -import static java.util.Collections.emptyMap; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.junit.jupiter.api.Named.named; -import static org.junit.jupiter.params.provider.Arguments.of; -import static org.mockito.Mockito.mock; -import com.google.common.collect.Streams; import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Key; -import com.hedera.hapi.node.base.KeyList; -import com.hedera.hapi.node.base.ThresholdKey; -import com.hedera.node.app.signature.SignatureVerificationFuture; -import com.hedera.node.app.signature.impl.SignatureVerificationImpl; import com.hedera.node.app.spi.fixtures.Scenarios; -import com.hedera.node.app.spi.signatures.SignatureVerification; import com.hedera.node.app.workflows.TransactionInfo; -import com.hedera.pbj.runtime.io.buffer.Bytes; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; +import java.util.Set; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; -import org.junit.jupiter.params.provider.ValueSource; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @@ -83,14 +58,14 @@ final class CreationTests { @SuppressWarnings("ConstantConditions") void statusMustNotBeNull( @Mock AccountID payer, @Mock TransactionInfo txInfo, @Mock PreHandleResult innerResult) { - final Map verificationResults = Map.of(); assertThatThrownBy(() -> new PreHandleResult( payer, Key.DEFAULT, null, OK, txInfo, - verificationResults, + Set.of(), + Map.of(), innerResult, DEFAULT_CONFIG_VERSION)) .isInstanceOf(NullPointerException.class); @@ -102,14 +77,14 @@ void statusMustNotBeNull( @SuppressWarnings("ConstantConditions") void responseCodeMustNotBeNull( @Mock AccountID payer, @Mock TransactionInfo txInfo, @Mock PreHandleResult innerResult) { - final Map verificationResults = Map.of(); assertThatThrownBy(() -> new PreHandleResult( payer, Key.DEFAULT, SO_FAR_SO_GOOD, null, txInfo, - verificationResults, + Set.of(), + Map.of(), innerResult, DEFAULT_CONFIG_VERSION)) .isInstanceOf(NullPointerException.class); @@ -125,16 +100,8 @@ void unknownFailure() { assertThat(result.innerResult()).isNull(); assertThat(result.payer()).isNull(); assertThat(result.txInfo()).isNull(); + assertThat(result.requiredKeys()).isNull(); assertThat(result.verificationResults()).isNull(); - assertThat(result.verificationFor(ERIN.account().alias())) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - - assertThat(result.verificationFor(Key.DEFAULT)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); } @Test @@ -150,16 +117,8 @@ void nodeDiligenceFailure(@Mock TransactionInfo txInfo) { assertThat(result.innerResult()).isNull(); assertThat(result.payer()).isEqualTo(nodeAccountId); assertThat(result.txInfo()).isSameAs(txInfo); + assertThat(result.requiredKeys()).isNull(); assertThat(result.verificationResults()).isNull(); - assertThat(result.verificationFor(ERIN.account().alias())) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - - assertThat(result.verificationFor(Key.DEFAULT)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); } @Test @@ -167,1095 +126,15 @@ void nodeDiligenceFailure(@Mock TransactionInfo txInfo) { void preHandleFailure(@Mock TransactionInfo txInfo) { final var payer = AccountID.newBuilder().accountNum(1001).build(); final var responseCode = INVALID_PAYER_ACCOUNT_ID; - final var result = PreHandleResult.preHandleFailure(payer, null, responseCode, txInfo, null); + final var result = PreHandleResult.preHandleFailure(payer, null, responseCode, txInfo, null, null); assertThat(result.status()).isEqualTo(PRE_HANDLE_FAILURE); assertThat(result.responseCode()).isEqualTo(responseCode); assertThat(result.innerResult()).isNull(); assertThat(result.payer()).isEqualTo(payer); assertThat(result.txInfo()).isSameAs(txInfo); + assertThat(result.requiredKeys()).isNull(); assertThat(result.verificationResults()).isNull(); - assertThat(result.verificationFor(ERIN.account().alias())) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - - assertThat(result.verificationFor(Key.DEFAULT)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - } - } - - /** - * Tests to verify that finding a {@link SignatureVerification} for cryptographic keys (ED25519, ECDSA_SECP256K1) - * work as expected. No key lists or threshold keys involved. - */ - @Nested - @DisplayName("Finding SignatureVerification With Cryptographic Keys") - @ExtendWith(MockitoExtension.class) - final class FindingSignatureVerificationWithCryptoKeyTests { - @Test - @DisplayName("Null key or alias throws exception") - @SuppressWarnings("DataFlowIssue") - void nullKeyThrowsException() { - final var result = PreHandleResult.unknownFailure(); - assertThatThrownBy(() -> result.verificationFor((Key) null)).isInstanceOf(NullPointerException.class); - assertThatThrownBy(() -> result.verificationFor((Bytes) null)).isInstanceOf(NullPointerException.class); - } - - @ParameterizedTest - @MethodSource("provideCompoundKeys") - @DisplayName("If there are no verification results, then the result is failed") - void noVerificationResults(@NonNull final Key key) { - final var result = PreHandleResult.unknownFailure(); - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - } - - @Test - @DisplayName("If the key is a cryptographic key in the results then it is returned") - void cryptoKeyIsPresent() { - final var aliceKey = ALICE.keyInfo().publicKey(); // ECDSA - final var aliceFuture = mock(SignatureVerificationFuture.class); - final var bobKey = BOB.keyInfo().publicKey(); // ED25519 - final var bobFuture = mock(SignatureVerificationFuture.class); - final var verificationResults = Map.of(aliceKey, aliceFuture, bobKey, bobFuture); - final var result = preHandle(verificationResults); - - assertThat(result.verificationFor(aliceKey)).isSameAs(aliceFuture); - assertThat(result.verificationFor(bobKey)).isSameAs(bobFuture); - } - - @Test - @DisplayName("If the key is a cryptographic key not in the results then null returned") - void cryptoKeyIsMissing() { - final var aliceKey = ALICE.keyInfo().publicKey(); // ECDSA - final var aliceFuture = mock(SignatureVerificationFuture.class); - final var bobKey = BOB.keyInfo().publicKey(); // ED25519 - final var bobFuture = mock(SignatureVerificationFuture.class); - final var verificationResults = Map.of(aliceKey, aliceFuture, bobKey, bobFuture); - final var result = preHandle(verificationResults); - - // ERIN is another ECDSA key, but one that is not in the verification results - assertThat(result.verificationFor(ERIN.keyInfo().publicKey())) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - } - - /** A provider that supplies basic cryptographic keys */ - static Stream provideCompoundKeys() { - // FUTURE: Add RSA keys to this list - return Stream.of( - Arguments.of(named("ED25519", FAKE_ED25519_KEY_INFOS[0].publicKey())), - Arguments.of(named("ECDSA_SECP256K1", FAKE_ECDSA_KEY_INFOS[0].publicKey()))); - } - } - - /** - * Tests to verify that finding a {@link SignatureVerification} for compound keys (threshold keys, key lists) that - * also have duplicated keys. The point of these tests is really to verify that duplicate keys are counted multiple - * times as expected when meeting threshold requirements. - * - *

We try testing all the boundary conditions: - *

- * - * - * - * - * - * - *

And for those testing "more than needed" and "less than needed", we try to get right on the boundary condition - * as well as all the other permutations. - */ - @Nested - @DisplayName("Finding SignatureVerification With Complex Keys with Duplicates") - @ExtendWith(MockitoExtension.class) - final class FindingSignatureVerificationWithDuplicateKeysTests { - // Used once in the key list - private static final Key ECDSA_X1 = FAKE_ECDSA_KEY_INFOS[1].publicKey(); - // Used twice in the key list - private static final Key ECDSA_X2 = FAKE_ECDSA_KEY_INFOS[2].publicKey(); - // Used once in the key list - private static final Key ED25519_X1 = FAKE_ED25519_KEY_INFOS[1].publicKey(); - // Used twice in the key list - private static final Key ED25519_X2 = FAKE_ED25519_KEY_INFOS[2].publicKey(); - - private Map verificationResults(Map keysAndPassFail) { - final var results = new HashMap(); - for (final var entry : keysAndPassFail.entrySet()) { - results.put( - entry.getKey(), - new FakeSignatureVerificationFuture( - new SignatureVerificationImpl(entry.getKey(), null, entry.getValue()))); - } - return results; - } - - @Test - @DisplayName("All signatures are valid for the KeyList") - void allValidInKeyList() { - // Given a KeyList with 6 different keys with 2 duplicates (4 unique keys) and - // verification results for ALL 4 different keys that are PASSING - final var keyList = KeyList.newBuilder() - .keys(ECDSA_X2, ECDSA_X2, ECDSA_X1, ED25519_X2, ED25519_X2, ED25519_X1) - .build(); - var key = Key.newBuilder().keyList(keyList).build(); - var verificationResults = verificationResults(Map.of( - ECDSA_X1, true, - ECDSA_X2, true, - ED25519_X1, true, - ED25519_X2, true)); - // When we pre handle - var result = preHandle(verificationResults); - // Then we find the verification results are passing because we have all keys signed - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(true); - } - - /** - * If there are just enough signatures to meet the threshold and all are valid signatures, then the overall - * verification will pass. - */ - @ParameterizedTest - @MethodSource("provideJustEnoughSignaturesAndAllAreValid") - @DisplayName("Just enough signatures and all are valid") - void justEnoughAndAllAreValid(@NonNull final Map keysAndPassFail) { - // Given a ThresholdList with a threshold of 3 and 6 different keys with 2 duplicates (4 unique keys) and - // verification results for only 2 keys (1 that is a duplicate, one that is not), so that the threshold is - // met - final var keyList = KeyList.newBuilder() - .keys(ECDSA_X1, ECDSA_X2, ECDSA_X2, ED25519_X1, ED25519_X2, ED25519_X2) - .build(); - final var thresholdKey = - ThresholdKey.newBuilder().threshold(3).keys(keyList).build(); - final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); - final var verificationResults = verificationResults(keysAndPassFail); - // When we pre handle - final var result = preHandle(verificationResults); - // Then we find the verification results are passing because we have met the minimum threshold - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(true); - } - - static Stream provideJustEnoughSignaturesAndAllAreValid() { - return Stream.of( - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=pass", - Map.of( - ECDSA_X1, true, - ECDSA_X2, true))), - Arguments.of(named( - "ECDSA_X2=pass, ED25519_X1=pass", - Map.of( - ECDSA_X2, true, - ED25519_X1, true))), - Arguments.of(named( - "ECDSA_X1=pass, ED25519_X2=pass", - Map.of( - ECDSA_X1, true, - ED25519_X2, true))), - Arguments.of(named( - "ED25519_X1=pass, ED25519_X2=pass", - Map.of( - ED25519_X1, true, - ED25519_X2, true)))); - } - - /** - * If there are more than enough signatures, but only *just barely* enough signatures are valid that the - * threshold is met, then the verification will still pass. - */ - @ParameterizedTest - @MethodSource("provideMoreThanEnoughAndJustEnoughValid") - @DisplayName("More than enough signatures but only a sufficient number are valid") - void moreThanEnoughAndJustEnoughValid(@NonNull final Map keysAndPassFail) { - // Given a ThresholdList with a threshold of 3 and 6 different keys with 2 duplicates (4 unique keys) and - // verification results for 3 keys (1 that is a duplicate, two that are not), but only 2 of the three are - // passing (where one of them is the duplicate), so that the threshold is met - final var keyList = KeyList.newBuilder() - .keys(ECDSA_X1, ECDSA_X2, ECDSA_X2, ED25519_X1, ED25519_X2, ED25519_X2) - .build(); - final var thresholdKey = - ThresholdKey.newBuilder().threshold(3).keys(keyList).build(); - final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); - final var verificationResults = verificationResults(keysAndPassFail); - // When we pre handle - final var result = preHandle(verificationResults); - // Then we find the verification results are passing because we have met the minimum threshold - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(true); - } - - static Stream provideMoreThanEnoughAndJustEnoughValid() { - return Stream.of( - // Every key answers, but just enough are valid to pass - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X1=fail, ED25519_X2=fail", - Map.of( - ECDSA_X1, true, - ECDSA_X2, true, - ED25519_X1, false, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X1=pass, ED25519_X2=fail", - Map.of( - ECDSA_X1, false, - ECDSA_X2, true, - ED25519_X1, true, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X1=fail, ED25519_X2=pass", - Map.of( - ECDSA_X1, true, - ECDSA_X2, false, - ED25519_X1, false, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X1=fail, ECDSA_X2=fail, ED25519_X1=pass, ED25519_X2=pass", - Map.of( - ECDSA_X1, false, - ECDSA_X2, false, - ED25519_X1, true, - ED25519_X2, true))), - // Some keys don't answer, but just enough are valid to pass - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X1=fail", - Map.of( - ECDSA_X1, true, - ECDSA_X2, true, - ED25519_X1, false))), - Arguments.of(named( - "ECDSA_X2=pass, ED25519_X1=pass, ED25519_X2=fail", - Map.of( - ECDSA_X2, true, - ED25519_X1, true, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=pass, ED25519_X1=fail, ED25519_X2=pass", - Map.of( - ECDSA_X1, true, - ED25519_X1, false, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X1=fail, ED25519_X1=pass, ED25519_X2=pass", - Map.of( - ECDSA_X1, false, - ED25519_X1, true, - ED25519_X2, true))), - // Some other keys don't answer, but just enough are valid to pass - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X2=fail", - Map.of( - ECDSA_X1, true, - ECDSA_X2, true, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X1=pass", - Map.of( - ECDSA_X1, false, - ECDSA_X2, true, - ED25519_X1, true))), - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X2=pass", - Map.of( - ECDSA_X1, true, - ECDSA_X2, false, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X2=fail, ED25519_X1=pass, ED25519_X2=pass", - Map.of( - ECDSA_X2, false, - ED25519_X1, true, - ED25519_X2, true)))); - } - - /** - * More than enough signatures were provided, and more than were needed actually passed. The overall - * verification therefore also passes. - */ - @ParameterizedTest - @MethodSource("provideMoreThanEnoughAndMoreThanNeededAreValid") - @DisplayName("More than enough signatures and more than enough are valid") - void moreThanEnoughAndMoreThanNeededAreValid(@NonNull final Map keysAndPassFail) { - // Given a ThresholdList with a threshold of 3 and 6 different keys with 2 duplicates (4 unique keys) and - // verification results for 3 keys (1 that is a duplicate, two that are not), and all three are passing, - // so that the threshold is met, plus more! - final var keyList = KeyList.newBuilder() - .keys(ECDSA_X1, ECDSA_X2, ECDSA_X2, ED25519_X1, ED25519_X2, ED25519_X2) - .build(); - final var thresholdKey = - ThresholdKey.newBuilder().threshold(3).keys(keyList).build(); - final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); - final var verificationResults = verificationResults(keysAndPassFail); - // When we pre handle - final var result = preHandle(verificationResults); - // Then we find the verification results are passing because we have met the minimum threshold - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(true); - } - - static Stream provideMoreThanEnoughAndMoreThanNeededAreValid() { - return Stream.of( - // Every key answers, and all are valid - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X1=pass, ED25519_X2=pass", - Map.of( - ECDSA_X1, true, - ECDSA_X2, true, - ED25519_X1, true, - ED25519_X2, true))), - - // Every key answers, one or more is invalid, but still more than we need - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X1=pass, ED25519_X2=fail", - Map.of( - ECDSA_X1, true, - ECDSA_X2, true, - ED25519_X1, true, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X1=fail, ED25519_X2=pass", - Map.of( - ECDSA_X1, true, - ECDSA_X2, true, - ED25519_X1, false, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X1=pass, ED25519_X2=pass", - Map.of( - ECDSA_X1, false, - ECDSA_X2, true, - ED25519_X1, true, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X1=fail, ED25519_X2=pass", - Map.of( - ECDSA_X1, false, - ECDSA_X2, true, - ED25519_X1, false, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X1=pass, ED25519_X2=pass", - Map.of( - ECDSA_X1, true, - ECDSA_X2, false, - ED25519_X1, true, - ED25519_X2, true))), - - // Some keys don't answer, but all are valid (more than enough) - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X1=pass", - Map.of( - ECDSA_X1, true, - ECDSA_X2, true, - ED25519_X1, true))), - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=pass, ED25519_X2=pass", - Map.of( - ECDSA_X1, true, - ECDSA_X2, true, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X2=pass, ED25519_X1=pass, ED25519_X2=pass", - Map.of( - ECDSA_X2, true, - ED25519_X1, true, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X2=pass, ED25519_X2=pass", - Map.of( - ECDSA_X2, true, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X1=pass, ED25519_X1=pass, ED25519_X2=pass", - Map.of( - ECDSA_X1, true, - ED25519_X1, true, - ED25519_X2, true)))); - } - - /** - * In this test there are more than enough keys in the signature ot meet the threshold, if they all passed. - * But it turns out, that enough of them did NOT pass, that the threshold is not met, and the overall - * verification is therefore failed. - */ - @ParameterizedTest - @MethodSource("provideMoreThanEnoughButNotEnoughValid") - @DisplayName("More than enough signatures but not enough are valid") - void moreThanEnoughButNotEnoughValid(@NonNull final Map keysAndPassFail) { - // Given a ThresholdList with a threshold of 3 and 6 different keys with 2 duplicates (4 unique keys) and - // verification results for 3 keys (1 that is a duplicate, two that are not), and only the two non-duplicate - // keys are passing, so the threshold is NOT met. - final var keyList = KeyList.newBuilder() - .keys(ECDSA_X1, ECDSA_X2, ECDSA_X2, ED25519_X1, ED25519_X2, ED25519_X2) - .build(); - final var thresholdKey = - ThresholdKey.newBuilder().threshold(3).keys(keyList).build(); - final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); - final var verificationResults = verificationResults(keysAndPassFail); - // When we pre handle - final var result = preHandle(verificationResults); - // Then we find the verification results are NOT passing because we have NOT met the minimum threshold - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); } - - static Stream provideMoreThanEnoughButNotEnoughValid() { - return Stream.of( - // Every key answers, but not enough are valid - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X1=fail, ED25519_X2=fail", - Map.of( - ECDSA_X1, true, - ECDSA_X2, false, - ED25519_X1, false, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X1=fail, ED25519_X2=fail", - Map.of( - ECDSA_X1, false, - ECDSA_X2, true, - ED25519_X1, false, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=fail, ECDSA_X2=fail, ED25519_X1=pass, ED25519_X2=fail", - Map.of( - ECDSA_X1, false, - ECDSA_X2, false, - ED25519_X1, true, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=fail, ECDSA_X2=fail, ED25519_X1=fail, ED25519_X2=pass", - Map.of( - ECDSA_X1, false, - ECDSA_X2, false, - ED25519_X1, false, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X1=pass, ED25519_X2=fail", - Map.of( - ECDSA_X1, true, - ECDSA_X2, false, - ED25519_X1, true, - ED25519_X2, false))), - - // Some keys don't answer, and those that do don't cross the threshold - Arguments.of(named( - "ECDSA_X2=pass, ED25519_X1=fail, ED25519_X2=fail", - Map.of( - ECDSA_X2, true, - ED25519_X1, false, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=pass, ED25519_X1=fail, ED25519_X2=fail", - Map.of( - ECDSA_X1, true, - ED25519_X1, false, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=fail, ED25519_X1=fail, ED25519_X2=pass", - Map.of( - ECDSA_X1, false, - ED25519_X1, false, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X1=fail, ED25519_X1=pass, ED25519_X2=fail", - Map.of( - ECDSA_X1, false, - ED25519_X1, true, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=pass, ED25519_X1=pass, ED25519_X2=fail", - Map.of( - ECDSA_X1, true, - ED25519_X1, true, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X1=fail", - Map.of( - ECDSA_X1, false, - ECDSA_X2, true, - ED25519_X1, false))), - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X1=fail", - Map.of( - ECDSA_X1, true, - ECDSA_X2, false, - ED25519_X1, false))), - Arguments.of(named( - "ECDSA_X2=fail, ED25519_X1=fail, ED25519_X2=pass", - Map.of( - ECDSA_X2, false, - ED25519_X1, false, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X2=fail, ED25519_X1=pass, ED25519_X2=fail", - Map.of( - ECDSA_X2, false, - ED25519_X1, true, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X1=pass", - Map.of( - ECDSA_X1, true, - ECDSA_X2, false, - ED25519_X1, true))), - Arguments.of(named( - "ECDSA_X1=fail, ECDSA_X2=pass, ED25519_X2=fail", - Map.of( - ECDSA_X1, false, - ECDSA_X2, true, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=pass, ECDSA_X2=fail, ED25519_X2=fail", - Map.of( - ECDSA_X1, true, - ECDSA_X2, false, - ED25519_X2, false))), - Arguments.of(named( - "ECDSA_X1=fail, ECDSA_X2=fail, ED25519_X2=pass", - Map.of( - ECDSA_X1, false, - ECDSA_X2, false, - ED25519_X2, true))), - Arguments.of(named( - "ECDSA_X1=fail, ECDSA_X2=fail, ED25519_X1=pass", - Map.of( - ECDSA_X1, false, - ECDSA_X2, false, - ED25519_X1, true)))); - } - - /** - * In this test, every signature is valid, but there just are not enough signatures to meet the threshold, - * so the overall verification must fail. - */ - @ParameterizedTest - @MethodSource("provideNotEnoughSignatures") - @DisplayName("Not enough signatures but all are valid") - void notEnoughSignatures(@NonNull final Map keysAndPassFail) { - // Given a ThresholdList with a threshold of 3 and 6 different keys with 2 duplicates (4 unique keys) and - // there are only verification results for 1 key, which isn't enough to meet the threshold. - final var keyList = KeyList.newBuilder() - .keys(ECDSA_X1, ECDSA_X2, ECDSA_X2, ED25519_X1, ED25519_X2, ED25519_X2) - .build(); - final var thresholdKey = - ThresholdKey.newBuilder().threshold(3).keys(keyList).build(); - final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); - final var verificationResults = verificationResults(keysAndPassFail); - // When we pre handle - final var result = preHandle(verificationResults); - // Then we find the verification results are passing because we have met the minimum threshold - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - } - - static Stream provideNotEnoughSignatures() { - return Stream.of( - // Every key answers, but not enough are valid - Arguments.of(named("ECDSA_X1=pass", Map.of(ECDSA_X1, true))), // 1 of 3 - Arguments.of(named("ECDSA_X2=pass", Map.of(ECDSA_X2, true))), // 2 of 3 - Arguments.of(named("ED25519_X1=pass", Map.of(ED25519_X1, true))), // 1 of 3 - Arguments.of(named("ED25519_X2=pass", Map.of(ED25519_X2, true))), // 2 of 3 - Arguments.of(named( - "ECDSA_X1=pass, ED25519_X1=pass", Map.of(ECDSA_X1, true, ED25519_X1, true)))); // 2 of 3 - } - } - - /** - * Various targeted tests for {@link ThresholdKey} and {@link KeyList} lookup. - */ - @Nested - @DisplayName("Finding SignatureVerification With Threshold and KeyList Keys") - @ExtendWith(MockitoExtension.class) - final class FindingSignatureVerificationWithCompoundKeyTests { - - // A ThresholdKey with a threshold greater than max keys acts like a KeyList - - @Test - @DisplayName("An empty KeyList never validates") - void emptyKeyList() { - // Given a KeyList with no keys - final var keyList = KeyList.newBuilder().build(); - final var key = Key.newBuilder().keyList(keyList).build(); - // When we pre handle - final var result = preHandle(emptyMap()); - // Then we find the verification results will fail - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - } - - @ParameterizedTest - @ValueSource(ints = {-1, 0}) - @DisplayName("A threshold of less than 1 is clamped to 1") - void thresholdLessThanOne(final int threshold) { - // Given a ThresholdKey with a threshold less than 1 - final var thresholdKey = ThresholdKey.newBuilder() - .threshold(threshold) - .keys(KeyList.newBuilder() - .keys(FAKE_ECDSA_KEY_INFOS[0].publicKey(), FAKE_ED25519_KEY_INFOS[0].publicKey())) - .build(); - final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); - - // First, verify that if there are NO valid verification results the threshold verification fails - Map verificationResults = - Map.of(FAKE_ECDSA_KEY_INFOS[1].publicKey(), goodFuture(FAKE_ECDSA_KEY_INFOS[1].publicKey())); - var result = preHandle(verificationResults); - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - - // Now verify that if we verify with one valid verification result, the threshold verification passes - verificationResults = - Map.of(FAKE_ECDSA_KEY_INFOS[0].publicKey(), goodFuture(FAKE_ECDSA_KEY_INFOS[0].publicKey())); - // When we pre handle - result = preHandle(verificationResults); - // Then we find the verification results will pass if we have at least 1 valid signature - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(true); - } - - @Test - @DisplayName("A threshold greater than the number of keys is clamped to the number of keys") - void thresholdGreaterThanNumKeys() { - // Given a ThresholdKey with a threshold greater than the number of keys - final var thresholdKey = ThresholdKey.newBuilder() - .threshold(3) - .keys(KeyList.newBuilder() - .keys(FAKE_ECDSA_KEY_INFOS[0].publicKey(), FAKE_ED25519_KEY_INFOS[0].publicKey())) - .build(); - final var key = Key.newBuilder().thresholdKey(thresholdKey).build(); - final Map verificationResults = Map.of( - FAKE_ECDSA_KEY_INFOS[0].publicKey(), goodFuture(FAKE_ECDSA_KEY_INFOS[0].publicKey()), - FAKE_ED25519_KEY_INFOS[0].publicKey(), goodFuture(FAKE_ED25519_KEY_INFOS[0].publicKey())); - - // When we pre handle - var result = preHandle(verificationResults); - - // Then we find the verification results will pass - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(true); - } - - /** - * If there are no verification results at all, then no matter what key we throw at it, we should get back - * a failed verification. - */ - @ParameterizedTest - @MethodSource("provideCompoundKeys") - @DisplayName("A ThresholdKey or KeyList with no verification results returns a failed SignatureVerification") - void keyWithNoVerificationResults(@NonNull final Key key) { - final var result = preHandle(emptyMap()); - final var future = result.verificationFor(key); - assertThat(future).isNotNull(); - assertThat(future.isDone()).isTrue(); - assertThat(future) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - } - - /** - * If there are just enough signatures to meet the threshold and all are valid signatures, then the overall - * verification will pass. - */ - @ParameterizedTest - @MethodSource("provideCompoundKeys") - @DisplayName("Just enough signatures and all are valid") - void justEnoughAndAllAreValid(@NonNull final Key key) { - // Given a barely sufficient number of signatures, all of which are valid - final var verificationResults = allVerifications(key); - removeVerificationsFrom(key, verificationResults, false); - - // When we pre handle - final var result = preHandle(verificationResults); - - // Then we find the verification results are passing because we have met the minimum threshold - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(true); - } - - /** - * If there are more than enough signatures, but only *just barely* enough signatures are valid that the - * threshold is met, then the verification will still pass. - */ - @ParameterizedTest - @MethodSource("provideCompoundKeys") - @DisplayName("More than enough signatures but only a sufficient number are valid") - void moreThanEnoughAndJustEnoughValid(@NonNull final Key key) { - // Given more than enough validations but just barely enough of them are valid - final var verificationResults = allVerifications(key); - failVerificationsIn(key, verificationResults, false); - - // When we pre handle - final var result = preHandle(verificationResults); - - // Then we find the verification results are passing because we have met the minimum threshold - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(true); - } - - /** - * More than enough signatures were provided, and more than were needed actually passed. The overall - * verification therefore also passes. - */ - @ParameterizedTest - @MethodSource("provideCompoundKeys") - @DisplayName("More than enough signatures and more than enough are valid") - void moreThanEnoughAndMoreThanNeededAreValid(@NonNull final Key key) { - // Given more than enough validations but just barely enough of them are valid - final Map verificationResults = allVerifications(key); - - // When we pre handle - final var result = preHandle(verificationResults); - - // Then we find the verification results are passing because we have met the minimum threshold - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(true); - } - - /** - * In this test there are more than enough keys in the signature ot meet the threshold, if they all passed. - * But it turns out, that enough of them did NOT pass, that the threshold is not met, and the overall - * verification is therefore failed. - */ - @ParameterizedTest - @MethodSource("provideCompoundKeys") - @DisplayName("More than enough signatures but not enough are valid") - void moreThanEnoughButNotEnoughValid(@NonNull final Key key) { - // Given more than enough validations but not enough of them are valid - final var verificationResults = allVerifications(key); - failVerificationsIn(key, verificationResults, true); - - // When we pre handle - final var result = preHandle(verificationResults); - - // Then we find the verification results are passing because we have met the minimum threshold - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - } - - /** - * In this test, every signature is valid, but there just are not enough signatures to meet the threshold, - * so the overall verification must fail. - */ - @ParameterizedTest - @MethodSource("provideCompoundKeys") - @DisplayName("Not enough signatures but all are valid") - void notEnoughSignatures(@NonNull final Key key) { - // Given not enough signatures - final var verificationResults = allVerifications(key); - removeVerificationsFrom(key, verificationResults, true); - - // When we pre handle - final var result = preHandle(verificationResults); - - // Then we find the verification results are passing because we have met the minimum threshold - assertThat(result.verificationFor(key)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - } - - /** A provider that supplies a mixture of KeyLists and ThresholdKeys, all of which are good keys. */ - static Stream provideCompoundKeys() { - // FUTURE: Add RSA keys to this list - return Streams.concat(provideKeyLists(), provideThresholdKeys()); - } - - /** - * Provides a comprehensive set of KeyLists, including with nesting of KeyLists and ThresholdKeys. At most, we - * return a KeyList with a depth of 3 and with up to 4 elements, one for each type of key that we support. This - * provider does not create duplicates, those scenarios are tested separately. - */ - static Stream provideKeyLists() { - return keyListPermutations().entrySet().stream() - .map(entry -> of(named( - "KeyList(" + entry.getKey() + ")", - Key.newBuilder().keyList(entry.getValue()).build()))); - } - - /** - * A provider specifically for all permutations of a valid threshold key, including those with duplicate keys - * and nesting. - */ - static Stream provideThresholdKeys() { - return keyListPermutations().entrySet().stream().map(entry -> { - final var keys = entry.getValue().keysOrThrow(); - final var threshold = Math.max(1, keys.size() / 2); - final var thresholdKey = Key.newBuilder() - .thresholdKey(ThresholdKey.newBuilder() - .threshold(threshold) - .keys(KeyList.newBuilder().keys(keys))) - .build(); - return of(named("ThresholdKey(" + threshold + ", " + entry.getKey() + ")", thresholdKey)); - }); - } - - /** Generates the set of test permutations shared between KeyLists and ThresholdKeys. */ - private static Map keyListPermutations() { - final var map = new LinkedHashMap(); - // FUTURE: Add RSA keys to this list - final List>> creators = List.of( - (i) -> Map.entry("ED25519", FAKE_ED25519_KEY_INFOS[i].publicKey()), - (i) -> Map.entry("ECDSA_SECP256K1", FAKE_ECDSA_KEY_INFOS[i].publicKey()), - (i) -> Map.entry( - "KeyList(ECDSA_SECP256K1, ED25519)", - keyList(FAKE_ECDSA_KEY_INFOS[i].publicKey(), FAKE_ED25519_KEY_INFOS[i].publicKey())), - (i) -> Map.entry( - "ThresholdKey(1, ED25519, ECDSA_SECP256K1)", - thresholdKey( - 1, FAKE_ED25519_KEY_INFOS[i].publicKey(), FAKE_ECDSA_KEY_INFOS[i].publicKey()))); - - // Compute every permutation of 1, 2, 3, and 4 elements. - for (int i = -1; i < 4; i++) { - for (int j = -1; j < 4; j++) { - for (int k = -1; k < 4; k++) { - for (int el = 0; el < 4; el++) { - int keyIndex = 0; - final var names = new ArrayList(); - final var keys = new ArrayList(); - if (i >= 0) { - final var entry = creators.get(i).apply(keyIndex++); - final var name = entry.getKey(); - final var key = entry.getValue(); - names.add(name); - keys.add(key); - } - if (j >= 0) { - final var entry = creators.get(j).apply(keyIndex++); - final var name = entry.getKey(); - final var key = entry.getValue(); - names.add(name); - keys.add(key); - } - if (k >= 0) { - final var entry = creators.get(k).apply(keyIndex++); - final var name = entry.getKey(); - final var key = entry.getValue(); - names.add(name); - keys.add(key); - } - final var entry = creators.get(el).apply(keyIndex); - final var name = entry.getKey(); - final var key = entry.getValue(); - names.add(name); - keys.add(key); - - final var keyList = KeyList.newBuilder().keys(keys).build(); - map.put(String.join(", ", names), keyList); - } - } - } - } - return map; - } - - /** Provides all {@link SignatureVerificationFuture}s for every cryptographic key in the {@link Key}. */ - private static Map allVerifications(@NonNull final Key key) { - return switch (key.key().kind()) { - case KEY_LIST -> allVerifications(key.keyListOrThrow()); - case THRESHOLD_KEY -> allVerifications(key.thresholdKeyOrThrow().keysOrThrow()); - case ED25519, ECDSA_SECP256K1 -> new HashMap<>(Map.of(key, goodFuture(key))); // make mutable - default -> throw new IllegalArgumentException( - "Unsupported key type: " + key.key().kind()); - }; - } - - /** Creates a {@link SignatureVerification} for each key in the key list */ - private static Map allVerifications(@NonNull final KeyList key) { - return key.keysOrThrow().stream() - .map(FindingSignatureVerificationWithCompoundKeyTests::allVerifications) - .flatMap(map -> map.entrySet().stream()) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - } - - /** - * Removes some number of {@link SignatureVerificationFuture}s from the map such that either there are only - * just barely enough remaining to pass any threshold checks (if {@code removeTooMany} is {@code false}), or too - * many of them such that there are not enough for threshold checks to pass (if {@code removeToMany} is - * {@code true}). - */ - private static void removeVerificationsFrom( - @NonNull final Key key, - @NonNull final Map map, - final boolean removeTooMany) { - - switch (key.key().kind()) { - case KEY_LIST -> { - // A Key list cannot have ANY removed and still pass. So we only remove a single key's worth of - // verifications if we are removing too many. - if (removeTooMany) { - final var subKeys = key.keyListOrThrow().keysOrThrow(); - final var subKey = subKeys.get(0); - removeVerificationsFrom(subKey, map, true); - } - } - case THRESHOLD_KEY -> { - // We remove verifications associated with keys. If we are removing too many, we remove one more - // than is supported by the threshold. Otherwise, we just remove down to the threshold - final var threshold = key.thresholdKeyOrThrow().threshold(); - final var subKeys = key.thresholdKeyOrThrow().keysOrThrow().keysOrThrow(); - final var numToRemove = subKeys.size() - threshold + (removeTooMany ? 1 : 0); - for (int i = 0; i < numToRemove; i++) { - final var subKey = subKeys.get(i); - removeVerificationsFrom(subKey, map, removeTooMany); - } - } - case ED25519, ECDSA_SECP256K1 -> { - if (removeTooMany) { - map.remove(key); - } - } - default -> throw new IllegalArgumentException( - "Unsupported key type: " + key.key().kind()); - } - } - - /** Similar to the above, except we fail verifications instead of removing them. */ - private static void failVerificationsIn( - @NonNull final Key key, @NonNull Map map, boolean failTooMany) { - switch (key.key().kind()) { - case KEY_LIST -> { - // A Key list cannot have ANY failed and still pass. So we only fail a single key's worth of - // verifications if we are failing too many. - if (failTooMany) { - final var subKeys = key.keyListOrThrow().keysOrThrow(); - final var subKey = subKeys.get(0); - failVerificationsIn(subKey, map, true); - } - } - case THRESHOLD_KEY -> { - // We fail verifications associated with keys. If we are failing too many, we fail one more - // than is supported by the threshold. Otherwise, we just fail down to the threshold - final var threshold = key.thresholdKeyOrThrow().threshold(); - final var subKeys = key.thresholdKeyOrThrow().keysOrThrow().keysOrThrow(); - final var numToFail = subKeys.size() - threshold + (failTooMany ? 1 : 0); - for (int i = 0; i < numToFail; i++) { - final var subKey = subKeys.get(i); - failVerificationsIn(subKey, map, failTooMany); - } - } - case ED25519, ECDSA_SECP256K1 -> { - if (failTooMany) { - map.put(key, badFuture(key)); - } - } - default -> throw new IllegalArgumentException( - "Unsupported key type: " + key.key().kind()); - } - } - } - - @Nested - @DisplayName("Hollow Account based Verification") - final class HollowAccountBasedTest { - /** As with key verification, with hollow account verification, an empty list of signatures should fail. */ - @Test - @DisplayName("Cannot verify hollow account when the signature list is empty") - void failToVerifyIfSignaturesAreEmpty() { - // Given a hollow account and no verification results - final var alias = ERIN.account().alias(); - // When we pre-handle the transaction - final var result = preHandle(emptyMap()); - // Then we find the verification result is failed - assertThat(result.verificationFor(alias)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - } - - /** If there are verifications but none for this hollow account, then we get no result */ - @Test - @DisplayName("Cannot verify hollow account if it is not in the verification results") - void failToVerifyIfHollowAccountIsNotInVerificationResults() { - // Given a hollow account and no verification results - final var alias = ERIN.account().alias(); - Map verificationResults = Map.of( - ALICE.keyInfo().publicKey(), goodFuture(ALICE.keyInfo().publicKey()), - BOB.keyInfo().publicKey(), goodFuture(BOB.keyInfo().publicKey()), - CAROL.keyInfo().publicKey(), goodFuture(CAROL.keyInfo().publicKey(), CAROL.account())); - // When we pre-handle the transaction - final var result = preHandle(verificationResults); - // Then we find the verification result is failed - assertThat(result.verificationFor(alias)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(false); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - @DisplayName("Able to verify if the hollow account is in the verification results") - void failToVerifyIfHollowAccountIsNotInVerificationResults(final boolean passes) { - // Given a hollow account and no verification results - final var alias = ERIN.account().alias(); - Map verificationResults = Map.of( - ALICE.keyInfo().publicKey(), goodFuture(ALICE.keyInfo().publicKey()), - BOB.keyInfo().publicKey(), goodFuture(BOB.keyInfo().publicKey()), - CAROL.keyInfo().publicKey(), goodFuture(CAROL.keyInfo().publicKey(), CAROL.account()), - ERIN.keyInfo().publicKey(), - passes - ? goodFuture(ERIN.keyInfo().publicKey(), ERIN.account()) - : badFuture(ERIN.keyInfo().publicKey(), ERIN.account())); - // When we pre-handle the transaction - final var result = preHandle(verificationResults); - // Then we find the verification result is as expected - assertThat(result.verificationFor(alias)) - .succeedsWithin(1, TimeUnit.SECONDS) - .extracting(SignatureVerification::passed) - .isEqualTo(passes); - } - } - - /** A simple utility method for creating a "SO_FAR_SO_GOOD" PreHandleResult */ - private PreHandleResult preHandle(@NonNull final Map map) { - return new PreHandleResult( - ALICE.accountID(), ALICE.account().key(), SO_FAR_SO_GOOD, OK, null, map, null, DEFAULT_CONFIG_VERSION); - } - - /** Convenience method for creating a key list */ - private static Key keyList(Key... keys) { - return Key.newBuilder().keyList(KeyList.newBuilder().keys(keys)).build(); - } - - /** Convenience method for creating a threshold key */ - private static Key thresholdKey(int threshold, Key... keys) { - return Key.newBuilder() - .thresholdKey(ThresholdKey.newBuilder() - .keys(KeyList.newBuilder().keys(keys)) - .threshold(threshold)) - .build(); } } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/PreHandleWorkflowImplTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/PreHandleWorkflowImplTest.java index 2423a244b552..527df6277487 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/PreHandleWorkflowImplTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/PreHandleWorkflowImplTest.java @@ -50,7 +50,9 @@ import com.hedera.node.app.workflows.TransactionChecker; import com.hedera.node.app.workflows.dispatcher.ReadableStoreFactory; import com.hedera.node.app.workflows.dispatcher.TransactionDispatcher; +import com.hedera.node.app.workflows.handle.HandleContextVerifier; import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.HederaConfig; import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import com.hedera.pbj.runtime.io.buffer.Bytes; import com.swirlds.common.system.transaction.Transaction; @@ -59,7 +61,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; @@ -130,7 +131,7 @@ void setUp() { Collections.emptyMap())); storeFactory = new ReadableStoreFactory(fakeHederaState); - final var config = new VersionedConfigImpl(HederaTestConfigBuilder.createConfig(false), DEFAULT_CONFIG_VERSION); + final var config = new VersionedConfigImpl(HederaTestConfigBuilder.createConfig(), DEFAULT_CONFIG_VERSION); when(configProvider.getConfiguration()).thenReturn(config); workflow = new PreHandleWorkflowImpl( @@ -370,9 +371,9 @@ void payerSignatureInvalid(@Mock SignatureVerificationFuture sigFuture) throws E // But when we check the future for the signature, we find it will end up failing. // (And the handle workflow will deal with this) - final var future = result1.verificationFor(key); - assertThat(future).isNotNull(); - final var result = future.get(1, TimeUnit.MILLISECONDS); + final var config = configProvider.getConfiguration().getConfigData(HederaConfig.class); + final HandleContextVerifier verifier = new HandleContextVerifier(config, result1.verificationResults()); + final var result = verifier.verificationFor(key); assertThat(result.passed()).isFalse(); // And we do see this transaction registered with the deduplication cache verify(deduplicationCache).add(txInfo.txBody().transactionIDOrThrow()); @@ -480,14 +481,12 @@ void nonPayerSignatureInvalid( assertThat(result.responseCode()).isEqualTo(OK); assertThat(result.payer()).isEqualTo(payerAccount); // and the payer sig check succeeds - final var payerFuture = result.verificationFor(payerKey); - assertThat(payerFuture).isNotNull(); - final var payerFutureResult = payerFuture.get(1, TimeUnit.MILLISECONDS); + final var config = configProvider.getConfiguration().getConfigData(HederaConfig.class); + final HandleContextVerifier verifier = new HandleContextVerifier(config, result.verificationResults()); + final var payerFutureResult = verifier.verificationFor(payerKey); assertThat(payerFutureResult.passed()).isTrue(); // but the other checks fail - final var nonPayerFuture = result.verificationFor(badKey); - assertThat(nonPayerFuture).isNotNull(); - final var nonPayerFutureResult = nonPayerFuture.get(1, TimeUnit.MILLISECONDS); + final var nonPayerFutureResult = verifier.verificationFor(badKey); assertThat(nonPayerFutureResult.passed()).isFalse(); // And we do see this transaction registered with the deduplication cache verify(deduplicationCache).add(txInfo.txBody().transactionIDOrThrow()); @@ -523,9 +522,9 @@ void happyPath(@Mock SignatureVerificationFuture sigFuture) throws Exception { assertThat(result.status()).isEqualTo(SO_FAR_SO_GOOD); assertThat(result.responseCode()).isEqualTo(OK); assertThat(result.payer()).isEqualTo(ALICE.accountID()); - final var payerFuture = result.verificationFor(payerKey); - assertThat(payerFuture).isNotNull(); - final var payerFutureResult = payerFuture.get(1, TimeUnit.MILLISECONDS); + final var config = configProvider.getConfiguration().getConfigData(HederaConfig.class); + final HandleContextVerifier verifier = new HandleContextVerifier(config, result.verificationResults()); + final var payerFutureResult = verifier.verificationFor(payerKey); assertThat(payerFutureResult.passed()).isTrue(); assertThat(result.txInfo()).isNotNull(); assertThat(result.txInfo()).isSameAs(txInfo); @@ -558,9 +557,9 @@ void happyPathHollowAccountAsPayer(@Mock SignatureVerificationFuture sigFuture) assertThat(result.status()).isEqualTo(SO_FAR_SO_GOOD); assertThat(result.responseCode()).isEqualTo(OK); assertThat(result.payer()).isEqualTo(hollowAccountID); - final var payerFuture = result.verificationFor(hollowAccountAlias); - assertThat(payerFuture).isNotNull(); - final var payerFutureResult = payerFuture.get(1, TimeUnit.MILLISECONDS); + final var config = configProvider.getConfiguration().getConfigData(HederaConfig.class); + final HandleContextVerifier verifier = new HandleContextVerifier(config, result.verificationResults()); + final var payerFutureResult = verifier.verificationFor(hollowAccountAlias); assertThat(payerFutureResult.passed()).isTrue(); assertThat(payerFutureResult.evmAlias()).isEqualTo(hollowAccountAlias); assertThat(payerFutureResult.key()).isEqualTo(finalizedKey); @@ -609,14 +608,12 @@ void happyPathHollowAccountsNonPayer( assertThat(result.responseCode()).isEqualTo(OK); assertThat(result.payer()).isEqualTo(payerAccountID); // and the payer sig check succeeds - final var payerFuture = result.verificationFor(payerKey); - assertThat(payerFuture).isNotNull(); - final var payerFutureResult = payerFuture.get(1, TimeUnit.MILLISECONDS); + final var config = configProvider.getConfiguration().getConfigData(HederaConfig.class); + final HandleContextVerifier verifier = new HandleContextVerifier(config, result.verificationResults()); + final var payerFutureResult = verifier.verificationFor(payerKey); assertThat(payerFutureResult.passed()).isTrue(); // and the non-payer sig check for the hollow account works - final var nonPayerHollowFuture = result.verificationFor(hollowAccountAlias); - assertThat(nonPayerHollowFuture).isNotNull(); - final var nonPayerResult = nonPayerHollowFuture.get(1, TimeUnit.MILLISECONDS); + final var nonPayerResult = verifier.verificationFor(hollowAccountAlias); assertThat(nonPayerResult.evmAlias()).isEqualTo(hollowAccountAlias); assertThat(nonPayerResult.key()).isEqualTo(finalizedKey); assertThat(result.txInfo()).isNotNull(); diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryCheckerTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryCheckerTest.java index 7ccc88aa2a3a..2bc8b8e77414 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryCheckerTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryCheckerTest.java @@ -44,7 +44,6 @@ import com.hedera.node.app.fees.QueryFeeCheck; import com.hedera.node.app.service.token.impl.handlers.CryptoTransferHandler; import com.hedera.node.app.solvency.SolvencyPreCheck; -import com.hedera.node.app.spi.numbers.HederaAccountNumbers; import com.hedera.node.app.spi.workflows.InsufficientBalanceException; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.workflows.TransactionInfo; @@ -59,9 +58,6 @@ @ExtendWith(MockitoExtension.class) class QueryCheckerTest { - @Mock - private HederaAccountNumbers accountNumbers; - @Mock(strictness = LENIENT) private QueryFeeCheck queryFeeCheck; @@ -78,22 +74,19 @@ class QueryCheckerTest { @BeforeEach void setup() { - checker = new QueryChecker(accountNumbers, queryFeeCheck, authorizer, cryptoTransferHandler, solvencyPreCheck); + checker = new QueryChecker(queryFeeCheck, authorizer, cryptoTransferHandler, solvencyPreCheck); } @SuppressWarnings("ConstantConditions") @Test void testConstructorWithIllegalArguments() { - assertThatThrownBy(() -> - new QueryChecker(null, queryFeeCheck, authorizer, cryptoTransferHandler, solvencyPreCheck)) + assertThatThrownBy(() -> new QueryChecker(null, authorizer, cryptoTransferHandler, solvencyPreCheck)) .isInstanceOf(NullPointerException.class); - assertThatThrownBy(() -> - new QueryChecker(accountNumbers, null, authorizer, cryptoTransferHandler, solvencyPreCheck)) + assertThatThrownBy(() -> new QueryChecker(queryFeeCheck, null, cryptoTransferHandler, solvencyPreCheck)) .isInstanceOf(NullPointerException.class); - assertThatThrownBy(() -> - new QueryChecker(accountNumbers, queryFeeCheck, null, cryptoTransferHandler, solvencyPreCheck)) + assertThatThrownBy(() -> new QueryChecker(queryFeeCheck, authorizer, null, solvencyPreCheck)) .isInstanceOf(NullPointerException.class); - assertThatThrownBy(() -> new QueryChecker(accountNumbers, queryFeeCheck, authorizer, null, solvencyPreCheck)) + assertThatThrownBy(() -> new QueryChecker(queryFeeCheck, authorizer, cryptoTransferHandler, null)) .isInstanceOf(NullPointerException.class); } @@ -304,7 +297,7 @@ void testValidateAccountBalancesWithSuperuserAndFailingNodePayment() throws Insu final var transaction = Transaction.newBuilder().build(); final var transactionInfo = new TransactionInfo(transaction, txBody, signatureMap, Bytes.EMPTY, CONSENSUS_CREATE_TOPIC); - when(accountNumbers.isSuperuser(4711L)).thenReturn(true); + when(authorizer.isSuperUser(payer)).thenReturn(true); doThrow(new InsufficientBalanceException(INSUFFICIENT_TX_FEE, fee)) .when(queryFeeCheck) .nodePaymentValidity(List.of(accountAmount), fee, nodeAccountId); diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryComponentTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryComponentTest.java deleted file mode 100644 index f336445579fa..000000000000 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryComponentTest.java +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Copyright (C) 2020-2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.workflows.query; - -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.mockito.BDDMockito.given; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.hedera.hapi.node.base.AccountID; -import com.hedera.node.app.DaggerHederaInjectionComponent; -import com.hedera.node.app.HederaInjectionComponent; -import com.hedera.node.app.components.QueryInjectionComponent; -import com.hedera.node.app.service.mono.context.properties.BootstrapProperties; -import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; -import com.hedera.node.app.spi.state.ReadableStates; -import com.hedera.node.app.spi.state.WritableKVState; -import com.hedera.node.app.spi.state.WritableQueueState; -import com.hedera.node.app.spi.state.WritableSingletonState; -import com.hedera.node.app.spi.state.WritableStates; -import com.hedera.node.app.state.HederaState; -import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; -import com.swirlds.common.context.PlatformContext; -import com.swirlds.common.crypto.Cryptography; -import com.swirlds.common.crypto.CryptographyHolder; -import com.swirlds.common.crypto.Hash; -import com.swirlds.common.system.InitTrigger; -import com.swirlds.common.system.NodeId; -import com.swirlds.common.system.Platform; -import com.swirlds.common.utility.EmptyIterator; -import com.swirlds.config.api.Configuration; -import com.swirlds.platform.gui.SwirldsGui; -import java.util.Collections; -import java.util.Iterator; -import java.util.Set; -import java.util.function.Predicate; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class QueryComponentTest { - - @Mock - private Cryptography cryptography; - - @Mock - private Platform platform; - - private HederaInjectionComponent app; - - @BeforeEach - void setUp() { - final var selfNodeId = new NodeId(666L); - final Configuration configuration = HederaTestConfigBuilder.createConfig(); - final PlatformContext platformContext = mock(PlatformContext.class); - when(platformContext.getConfiguration()).thenReturn(configuration); - when(platform.getContext()).thenReturn(platformContext); - when(platformContext.getCryptography()).thenReturn(cryptography); - app = DaggerHederaInjectionComponent.builder() - .initTrigger(InitTrigger.GENESIS) - .platform(platform) - .crypto(CryptographyHolder.get()) - .consoleCreator(SwirldsGui::createConsole) - .staticAccountMemo("memo") - .bootstrapProps(new BootstrapProperties()) - .selfId(AccountID.newBuilder().accountNum(selfNodeId.id() + 3L).build()) - .initialHash(new Hash()) - .maxSignedTxnSize(1024) - .genesisUsage(false) - .servicesRegistry(Set::of) - .build(); - - // Simulate an empty (but iterable) state for the record cache - final var emptyIterableQueueState = new EmptyIterableQueueState(new EmptyIterableQueueWritableStates()); - app.workingStateAccessor().setHederaState(emptyIterableQueueState); - } - - @Test - void objectGraphRootsAreAvailable() { - given(platform.getSelfId()).willReturn(new NodeId(0L)); - - final QueryInjectionComponent subject = - app.queryComponentFactory().get().create(); - - assertNotNull(subject.queryWorkflow()); - } - - // The following classes only exist to load enough of an empty state, such that the graph roots can be instantiated - // for the test - private record EmptyIterableQueueState(WritableStates writableStates) implements HederaState { - @NotNull - @Override - public ReadableStates createReadableStates(@NotNull String serviceName) { - return writableStates; - } - - @NotNull - @Override - public WritableStates createWritableStates(@NotNull String serviceName) { - return writableStates; - } - } - - private static final class EmptyIterableQueueWritableStates implements WritableStates { - @NotNull - @Override - public WritableKVState get(@NotNull String stateKey) { - return new MapWritableKVState<>(stateKey, Collections.emptyMap()); - } - - @NotNull - @Override - public WritableSingletonState getSingleton(@NotNull String stateKey) { - return new WritableSingletonState() { - @Override - public void put(@Nullable T value) {} - - @Override - public boolean isModified() { - return false; - } - - @NotNull - @Override - public String getStateKey() { - return "BOGUS STATE KEY"; - } - - @Nullable - @Override - public T get() { - return null; - } - - @Override - public boolean isRead() { - return false; - } - }; - } - - @NotNull - @Override - public WritableQueueState getQueue(@NotNull String stateKey) { - return (WritableQueueState) new EmptyWritableQueueState(); - } - - @Override - public boolean contains(@NotNull String stateKey) { - return false; - } - - @NotNull - @Override - public Set stateKeys() { - return Set.of(); - } - } - - private static final class EmptyWritableQueueState implements WritableQueueState { - - @NotNull - @Override - public String getStateKey() { - return "BOGUS STATE KEY"; - } - - @Nullable - @Override - public Object peek() { - return null; - } - - @NotNull - @Override - public Iterator iterator() { - return new EmptyIterator<>(); - } - - @Override - public void add(@NotNull Object element) {} - - @Nullable - @Override - public Object removeIf(@NotNull Predicate predicate) { - return null; - } - } -} diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryWorkflowImplTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryWorkflowImplTest.java index ae19a6f7c1e0..4775b9aea569 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryWorkflowImplTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryWorkflowImplTest.java @@ -61,6 +61,7 @@ import com.hedera.node.app.service.mono.pbj.PbjConverter; import com.hedera.node.app.service.mono.stats.HapiOpCounters; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkGetExecutionTimeHandler; +import com.hedera.node.app.spi.records.RecordCache; import com.hedera.node.app.spi.workflows.InsufficientBalanceException; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.QueryHandler; @@ -130,6 +131,9 @@ class QueryWorkflowImplTest extends AppTestBase { @Mock(strictness = LENIENT) private ConfigProvider configProvider; + @Mock(strictness = LENIENT) + private RecordCache recordCache; + private Query query; private Transaction payment; private TransactionBody txBody; @@ -190,7 +194,8 @@ void setup() throws IOException, PreCheckException { dispatcher, feeAccumulator, queryParser, - configProvider); + configProvider, + recordCache); } @SuppressWarnings("ConstantConditions") @@ -205,7 +210,8 @@ void testConstructorWithIllegalParameters() { dispatcher, feeAccumulator, queryParser, - configProvider)) + configProvider, + recordCache)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new QueryWorkflowImpl( stateAccessor, @@ -216,7 +222,8 @@ void testConstructorWithIllegalParameters() { dispatcher, feeAccumulator, queryParser, - configProvider)) + configProvider, + recordCache)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new QueryWorkflowImpl( stateAccessor, @@ -227,7 +234,8 @@ void testConstructorWithIllegalParameters() { dispatcher, feeAccumulator, queryParser, - configProvider)) + configProvider, + recordCache)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new QueryWorkflowImpl( stateAccessor, @@ -238,7 +246,8 @@ void testConstructorWithIllegalParameters() { dispatcher, feeAccumulator, queryParser, - configProvider)) + configProvider, + recordCache)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new QueryWorkflowImpl( stateAccessor, @@ -249,7 +258,8 @@ void testConstructorWithIllegalParameters() { dispatcher, feeAccumulator, queryParser, - configProvider)) + configProvider, + recordCache)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new QueryWorkflowImpl( stateAccessor, @@ -260,7 +270,8 @@ void testConstructorWithIllegalParameters() { null, feeAccumulator, queryParser, - configProvider)) + configProvider, + recordCache)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new QueryWorkflowImpl( stateAccessor, @@ -271,7 +282,8 @@ void testConstructorWithIllegalParameters() { dispatcher, null, queryParser, - configProvider)) + configProvider, + recordCache)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new QueryWorkflowImpl( stateAccessor, @@ -282,7 +294,20 @@ void testConstructorWithIllegalParameters() { dispatcher, feeAccumulator, null, - configProvider)) + configProvider, + recordCache)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> new QueryWorkflowImpl( + stateAccessor, + throttleAccumulator, + submissionManager, + queryChecker, + ingestChecker, + dispatcher, + feeAccumulator, + queryParser, + null, + recordCache)) .isInstanceOf(NullPointerException.class); assertThatThrownBy(() -> new QueryWorkflowImpl( stateAccessor, @@ -293,6 +318,7 @@ void testConstructorWithIllegalParameters() { dispatcher, feeAccumulator, queryParser, + configProvider, null)) .isInstanceOf(NullPointerException.class); } @@ -391,7 +417,8 @@ void testMissingHeaderFails(@Mock QueryHandler localHandler, @Mock QueryDispatch localDispatcher, feeAccumulator, queryParser, - configProvider); + configProvider, + recordCache); // then assertThatThrownBy(() -> workflow.handleQuery(requestBuffer, responseBuffer)) diff --git a/hedera-node/hedera-app/src/testFixtures/java/com/hedera/node/app/fixtures/signature/ExpandedSignaturePairFactory.java b/hedera-node/hedera-app/src/testFixtures/java/com/hedera/node/app/fixtures/signature/ExpandedSignaturePairFactory.java new file mode 100644 index 000000000000..c30ecd0f2528 --- /dev/null +++ b/hedera-node/hedera-app/src/testFixtures/java/com/hedera/node/app/fixtures/signature/ExpandedSignaturePairFactory.java @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.fixtures.signature; + +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.SignaturePair; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.node.app.service.mono.sigs.utils.MiscCryptoUtils; +import com.hedera.node.app.signature.ExpandedSignaturePair; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; + +public class ExpandedSignaturePairFactory { + + private ExpandedSignaturePairFactory() {} + + /** Simple utility to create an ECDSA_SECP256K1 expanded signature */ + public static ExpandedSignaturePair ecdsaPair(final Key key) { + final var compressed = key.ecdsaSecp256k1OrThrow(); + final var array = new byte[(int) compressed.length()]; + compressed.getBytes(0, array); + final var decompressed = MiscCryptoUtils.decompressSecp256k1(array); + final var sigPair = SignaturePair.newBuilder() + .pubKeyPrefix(key.ecdsaSecp256k1OrThrow()) + .ecdsaSecp256k1(key.ecdsaSecp256k1OrThrow()) + .build(); + return new ExpandedSignaturePair(key, Bytes.wrap(decompressed), null, sigPair); + } + + /** Simple utility to create an ED25519 expanded signature */ + public static ExpandedSignaturePair ed25519Pair(final Key key) { + final var sigPair = SignaturePair.newBuilder() + .pubKeyPrefix(key.ed25519OrThrow()) + .ed25519(key.ed25519OrThrow()) + .build(); + return new ExpandedSignaturePair(key, key.ed25519OrThrow(), null, sigPair); + } + + /** Simple utility to create an ECDSA_SECP256K1 hollow account based expanded signature */ + public static ExpandedSignaturePair hollowPair(final Key key, @NonNull final Account hollowAccount) { + final var compressed = key.ecdsaSecp256k1OrThrow(); + final var array = new byte[(int) compressed.length()]; + compressed.getBytes(0, array); + final var decompressed = MiscCryptoUtils.decompressSecp256k1(array); + final var sigPair = SignaturePair.newBuilder() + .pubKeyPrefix(key.ecdsaSecp256k1OrThrow()) + .ecdsaSecp256k1(key.ecdsaSecp256k1OrThrow()) + .build(); + return new ExpandedSignaturePair(key, Bytes.wrap(decompressed), hollowAccount.alias(), sigPair); + } +} diff --git a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/converter/ProfileConverter.java b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/converter/ProfileConverter.java deleted file mode 100644 index 748e19d850b8..000000000000 --- a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/converter/ProfileConverter.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.config.converter; - -import com.hedera.node.app.service.mono.context.properties.Profile; -import com.swirlds.config.api.converter.ConfigConverter; - -/** - * Config api {@link ConfigConverter} implementation for the type {@link Profile}. - */ -public class ProfileConverter implements ConfigConverter { - - @Override - public Profile convert(final String value) throws IllegalArgumentException, NullPointerException { - if (value == null) { - throw new NullPointerException("null can not be converted"); - } - - try { - final int i = Integer.parseInt(value); - if (i == 0) { - return Profile.DEV; - } else if (i == 1) { - return Profile.PROD; - } else if (i == 2) { - return Profile.TEST; - } - } catch (final Exception e) { - // ignore - } - - return Profile.valueOf(value.toUpperCase()); - } -} diff --git a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/HederaConfig.java b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/HederaConfig.java index 87f8a22d32cb..02d0931cae99 100644 --- a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/HederaConfig.java +++ b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/HederaConfig.java @@ -47,6 +47,7 @@ public record HederaConfig( @ConfigProperty(value = "prefetch.threadPoolSize", defaultValue = "4") int prefetchThreadPoolSize, @ConfigProperty(value = "prefetch.codeCacheTtlSecs", defaultValue = "600") int prefetchCodeCacheTtlSecs, // @ConfigProperty(value = "profiles.active", defaultValue = "PROD") Profile profilesActive, + @ConfigProperty(value = "profiles.active", defaultValue = "PROD") String activeProfile, @ConfigProperty(value = "recordStream.isEnabled", defaultValue = "true") boolean recordStreamIsEnabled, @ConfigProperty(value = "recordStream.logDir", defaultValue = "/opt/hgcapp/recordStreams") String recordStreamLogDir, @@ -54,6 +55,8 @@ public record HederaConfig( @ConfigProperty(value = "recordStream.logPeriod", defaultValue = "2") long recordStreamLogPeriod, @ConfigProperty(value = "recordStream.queueCapacity", defaultValue = "5000") int recordStreamQueueCapacity, @ConfigProperty(value = "recordStream.logEveryTransaction", defaultValue = "false") - boolean recordStreamLogEveryTransaction - // @ConfigProperty("workflows.enabled", defaultValue = "") Set workflowsEnabled - ) {} + boolean recordStreamLogEveryTransaction, + @ConfigProperty(value = "workflow.verificationTimeoutMS", defaultValue = "20000") + long workflowVerificationTimeoutMS, + // FUTURE: Set. + @ConfigProperty(value = "workflows.enabled", defaultValue = "") String workflowsEnabled) {} diff --git a/hedera-node/hedera-config/src/test/java/com/hedera/node/config/PropertySourceBasedConfigTest.java b/hedera-node/hedera-config/src/test/java/com/hedera/node/config/PropertySourceBasedConfigTest.java index 27fd3b7f700b..d6aaeb0aa38e 100644 --- a/hedera-node/hedera-config/src/test/java/com/hedera/node/config/PropertySourceBasedConfigTest.java +++ b/hedera-node/hedera-config/src/test/java/com/hedera/node/config/PropertySourceBasedConfigTest.java @@ -26,7 +26,6 @@ import com.hedera.node.app.hapi.utils.sysfiles.domain.KnownBlockValues; import com.hedera.node.app.hapi.utils.sysfiles.domain.throttling.ScaleFactor; import com.hedera.node.app.service.mono.context.properties.EntityType; -import com.hedera.node.app.service.mono.context.properties.Profile; import com.hedera.node.app.service.mono.context.properties.PropertySource; import com.hedera.node.app.service.mono.fees.calculation.CongestionMultipliers; import com.hedera.node.app.service.mono.fees.calculation.EntityScaleFactors; @@ -45,7 +44,6 @@ import com.hedera.node.config.converter.LegacyContractIdActivationsConverter; import com.hedera.node.config.converter.MapAccessTypeConverter; import com.hedera.node.config.converter.PermissionedAccountsRangeConverter; -import com.hedera.node.config.converter.ProfileConverter; import com.hedera.node.config.converter.RecomputeTypeConverter; import com.hedera.node.config.converter.ScaleFactorConverter; import com.hedera.node.config.converter.SidecarTypeConverter; @@ -109,7 +107,6 @@ void testConfig() { .withConverter(new ContractIDConverter()) .withConverter(new FileIDConverter()) .withConverter(new HederaFunctionalityConverter()) - .withConverter(new ProfileConverter()) .withConverter(new SidecarTypeConverter()) .withConverter(new BytesConverter()) .withSource(new PropertySourceBasedConfigSource(propertySource)) @@ -133,7 +130,7 @@ void testConfig() { final FileID fileID = configuration.getValue("test.fileID", FileID.class); final HederaFunctionality hederaFunctionality = configuration.getValue("test.hederaFunctionality", HederaFunctionality.class); - final Profile profile = configuration.getValue("test.profile", Profile.class); + final String profile = configuration.getValue("test.profile", String.class); final SidecarType sidecarType = configuration.getValue("test.sidecarType", SidecarType.class); // then diff --git a/hedera-node/hedera-config/src/test/java/com/hedera/node/config/converter/ProfileConverterTest.java b/hedera-node/hedera-config/src/test/java/com/hedera/node/config/converter/ProfileConverterTest.java deleted file mode 100644 index b750573835ff..000000000000 --- a/hedera-node/hedera-config/src/test/java/com/hedera/node/config/converter/ProfileConverterTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.config.converter; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; - -import com.hedera.node.app.service.mono.context.properties.Profile; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; - -class ProfileConverterTest { - - @Test - void testNullParam() { - // given - final ProfileConverter converter = new ProfileConverter(); - - // then - assertThatThrownBy(() -> converter.convert(null)).isInstanceOf(NullPointerException.class); - } - - @Test - void testInvalidParam() { - // given - final ProfileConverter converter = new ProfileConverter(); - - // then - assertThatThrownBy(() -> converter.convert("null")).isInstanceOf(IllegalArgumentException.class); - } - - @ParameterizedTest - @CsvSource({ - "DEV, DEV", - "TEST, TEST", - "PROD, PROD", - "dev, DEV", - "test, TEST", - "prod, PROD", - "dEv, DEV", - "tEst, TEST", - "pRod, PROD", - "0,DEV", - "2,TEST", - "1,PROD" - }) - void testValidParam(final String input, final String enumName) { - // given - final ProfileConverter converter = new ProfileConverter(); - final Profile expected = Profile.valueOf(enumName); - - // when - final Profile cryptoTransfer = converter.convert(input); - - // then - assertThat(cryptoTransfer).isEqualTo(expected); - } -} diff --git a/hedera-node/hedera-config/src/testFixtures/java/com/hedera/node/config/testfixtures/HederaTestConfigBuilder.java b/hedera-node/hedera-config/src/testFixtures/java/com/hedera/node/config/testfixtures/HederaTestConfigBuilder.java index ebd31b7c3c24..0f73e7f57cc6 100644 --- a/hedera-node/hedera-config/src/testFixtures/java/com/hedera/node/config/testfixtures/HederaTestConfigBuilder.java +++ b/hedera-node/hedera-config/src/testFixtures/java/com/hedera/node/config/testfixtures/HederaTestConfigBuilder.java @@ -29,7 +29,6 @@ import com.hedera.node.config.converter.LegacyContractIdActivationsConverter; import com.hedera.node.config.converter.MapAccessTypeConverter; import com.hedera.node.config.converter.PermissionedAccountsRangeConverter; -import com.hedera.node.config.converter.ProfileConverter; import com.hedera.node.config.converter.RecomputeTypeConverter; import com.hedera.node.config.converter.ScaleFactorConverter; import com.hedera.node.config.converter.SemanticVersionConverter; @@ -82,7 +81,6 @@ public static TestConfigBuilder create(boolean registerAllTypes) { .withConverter(new LegacyContractIdActivationsConverter()) .withConverter(new MapAccessTypeConverter()) .withConverter(new PermissionedAccountsRangeConverter()) - .withConverter(new ProfileConverter()) .withConverter(new RecomputeTypeConverter()) .withConverter(new ScaleFactorConverter()) .withConverter(new SemanticVersionConverter()) diff --git a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/AdapterUtils.java b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/AdapterUtils.java index ade5b6ceb32b..0877e20110fd 100644 --- a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/AdapterUtils.java +++ b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/AdapterUtils.java @@ -20,8 +20,8 @@ import static com.hedera.node.app.service.mono.context.BasicTransactionContext.EMPTY_KEY; import static com.hedera.node.app.service.mono.pbj.PbjConverter.toPbj; import static com.hedera.node.app.service.mono.utils.EntityNum.MISSING_NUM; -import static com.hedera.node.app.service.mono.utils.EntityNum.fromAccountId; import static com.hedera.node.app.service.mono.utils.MiscUtils.asKeyUnchecked; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static com.hedera.test.factories.scenarios.TxnHandlingScenario.COMPLEX_KEY_ACCOUNT; import static com.hedera.test.factories.scenarios.TxnHandlingScenario.COMPLEX_KEY_ACCOUNT_KT; import static com.hedera.test.factories.scenarios.TxnHandlingScenario.CURRENTLY_UNUSED_ALIAS; @@ -72,8 +72,6 @@ import com.hedera.hapi.node.state.token.AccountApprovalForAllAllowance; import com.hedera.hapi.node.state.token.AccountCryptoAllowance; import com.hedera.hapi.node.state.token.AccountFungibleTokenAllowance; -import com.hedera.node.app.service.mono.state.virtual.EntityNumValue; -import com.hedera.node.app.service.mono.state.virtual.EntityNumVirtualKey; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; import com.hedera.node.app.spi.state.ReadableKVState; @@ -119,22 +117,16 @@ public static ReadableStates mockStates(final Map keysT return mockStates; } - private static ReadableKVState wellKnownAccountsState() { + private static ReadableKVState wellKnownAccountsState() { return new MapReadableKVState<>(ACCOUNTS_KEY, wellKnownAccountStoreAt()); } - private static MapReadableKVState wellKnownAliasState() { - final Map wellKnownAliases = Map.ofEntries( - Map.entry(CURRENTLY_UNUSED_ALIAS, new EntityNumValue(MISSING_NUM.longValue())), - Map.entry( - NO_RECEIVER_SIG_ALIAS, - new EntityNumValue(fromAccountId(NO_RECEIVER_SIG).longValue())), - Map.entry( - RECEIVER_SIG_ALIAS, - new EntityNumValue(fromAccountId(RECEIVER_SIG).longValue())), - Map.entry( - FIRST_TOKEN_SENDER_LITERAL_ALIAS.toStringUtf8(), - new EntityNumValue(fromAccountId(FIRST_TOKEN_SENDER).longValue()))); + private static MapReadableKVState wellKnownAliasState() { + final Map wellKnownAliases = Map.ofEntries( + Map.entry(Bytes.wrap(CURRENTLY_UNUSED_ALIAS), asAccount(MISSING_NUM.longValue())), + Map.entry(Bytes.wrap(NO_RECEIVER_SIG_ALIAS), toPbj(NO_RECEIVER_SIG)), + Map.entry(Bytes.wrap(RECEIVER_SIG_ALIAS), toPbj(RECEIVER_SIG)), + Map.entry(Bytes.wrap(FIRST_TOKEN_SENDER_LITERAL_ALIAS.toByteArray()), toPbj(FIRST_TOKEN_SENDER))); return new MapReadableKVState<>(ALIASES_KEY, wellKnownAliases); } @@ -157,54 +149,52 @@ class SigReqAdapterUtils { .spenderNum(DEFAULT_PAYER.getAccountNum()) .build(); - private static ReadableKVState wellKnownAccountsState() { + private static ReadableKVState wellKnownAccountsState() { return new MapReadableKVState<>(ACCOUNTS_KEY, wellKnownAccountStoreAt()); } - public static Map wellKnownAccountStoreAt() { - final var destination = new HashMap(); + public static Map wellKnownAccountStoreAt() { + final var destination = new HashMap(); destination.put( - EntityNumVirtualKey.fromLong(FIRST_TOKEN_SENDER.getAccountNum()), + toPbj(FIRST_TOKEN_SENDER), toPbjAccount(FIRST_TOKEN_SENDER.getAccountNum(), FIRST_TOKEN_SENDER_KT.asPbjKey(), 10_000L)); destination.put( - EntityNumVirtualKey.fromLong(SECOND_TOKEN_SENDER.getAccountNum()), + toPbj(SECOND_TOKEN_SENDER), toPbjAccount(SECOND_TOKEN_SENDER.getAccountNum(), SECOND_TOKEN_SENDER_KT.asPbjKey(), 10_000L)); destination.put( - EntityNumVirtualKey.fromLong(TOKEN_RECEIVER.getAccountNum()), - toPbjAccount(TOKEN_RECEIVER.getAccountNum(), TOKEN_WIPE_KT.asPbjKey(), 0L)); + toPbj(TOKEN_RECEIVER), toPbjAccount(TOKEN_RECEIVER.getAccountNum(), TOKEN_WIPE_KT.asPbjKey(), 0L)); destination.put( - EntityNumVirtualKey.fromLong(DEFAULT_NODE.getAccountNum()), - toPbjAccount(DEFAULT_NODE.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), 0L)); + toPbj(DEFAULT_NODE), toPbjAccount(DEFAULT_NODE.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), 0L)); destination.put( - EntityNumVirtualKey.fromLong(DEFAULT_PAYER.getAccountNum()), + toPbj(DEFAULT_PAYER), toPbjAccount(DEFAULT_PAYER.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), DEFAULT_PAYER_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(STAKING_FUND.getAccountNum()), + toPbj(STAKING_FUND), toPbjAccount(STAKING_FUND.getAccountNum(), toPbj(asKeyUnchecked(EMPTY_KEY)), 0L)); destination.put( - EntityNumVirtualKey.fromLong(MASTER_PAYER.getAccountNum()), + toPbj(MASTER_PAYER), toPbjAccount(MASTER_PAYER.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), DEFAULT_PAYER_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(TREASURY_PAYER.getAccountNum()), + toPbj(TREASURY_PAYER), toPbjAccount(TREASURY_PAYER.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), DEFAULT_PAYER_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(NO_RECEIVER_SIG.getAccountNum()), + toPbj(NO_RECEIVER_SIG), toPbjAccount(NO_RECEIVER_SIG.getAccountNum(), NO_RECEIVER_SIG_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(RECEIVER_SIG.getAccountNum()), + toPbj(RECEIVER_SIG), toPbjAccount(RECEIVER_SIG.getAccountNum(), RECEIVER_SIG_KT.asPbjKey(), DEFAULT_BALANCE, true)); destination.put( - EntityNumVirtualKey.fromLong(SYS_ACCOUNT.getAccountNum()), + toPbj(SYS_ACCOUNT), toPbjAccount(SYS_ACCOUNT.getAccountNum(), SYS_ACCOUNT_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(MISC_ACCOUNT.getAccountNum()), + toPbj(MISC_ACCOUNT), toPbjAccount(MISC_ACCOUNT.getAccountNum(), MISC_ACCOUNT_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(CUSTOM_PAYER_ACCOUNT.getAccountNum()), + toPbj(CUSTOM_PAYER_ACCOUNT), toPbjAccount( CUSTOM_PAYER_ACCOUNT.getAccountNum(), CUSTOM_PAYER_ACCOUNT_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(OWNER_ACCOUNT.getAccountNum()), + toPbj(OWNER_ACCOUNT), toPbjAccount( OWNER_ACCOUNT.getAccountNum(), OWNER_ACCOUNT_KT.asPbjKey(), @@ -214,7 +204,7 @@ public static Map wellKnownAccountStoreAt() { List.of(fungibleTokenAllowances), List.of(nftAllowances))); destination.put( - EntityNumVirtualKey.fromLong(DELEGATING_SPENDER.getAccountNum()), + toPbj(DELEGATING_SPENDER), toPbjAccount( DELEGATING_SPENDER.getAccountNum(), DELEGATING_SPENDER_KT.asPbjKey(), @@ -224,20 +214,20 @@ public static Map wellKnownAccountStoreAt() { List.of(fungibleTokenAllowances), List.of(nftAllowances))); destination.put( - EntityNumVirtualKey.fromLong(COMPLEX_KEY_ACCOUNT.getAccountNum()), + toPbj(COMPLEX_KEY_ACCOUNT), toPbjAccount( COMPLEX_KEY_ACCOUNT.getAccountNum(), COMPLEX_KEY_ACCOUNT_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(TOKEN_TREASURY.getAccountNum()), + toPbj(TOKEN_TREASURY), toPbjAccount(TOKEN_TREASURY.getAccountNum(), TOKEN_TREASURY_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(DILIGENT_SIGNING_PAYER.getAccountNum()), + toPbj(DILIGENT_SIGNING_PAYER), toPbjAccount( DILIGENT_SIGNING_PAYER.getAccountNum(), DILIGENT_SIGNING_PAYER_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(FROM_OVERLAP_PAYER.getAccountNum()), + toPbj(FROM_OVERLAP_PAYER), toPbjAccount( FROM_OVERLAP_PAYER.getAccountNum(), FROM_OVERLAP_PAYER_KT.asPbjKey(), DEFAULT_BALANCE)); return destination; diff --git a/hedera-node/hedera-evm/src/main/java/com/hedera/node/app/service/evm/utils/ValidationUtils.java b/hedera-node/hedera-evm/src/main/java/com/hedera/node/app/service/evm/utils/ValidationUtils.java index 312449aa49a0..c3a6d3041582 100644 --- a/hedera-node/hedera-evm/src/main/java/com/hedera/node/app/service/evm/utils/ValidationUtils.java +++ b/hedera-node/hedera-evm/src/main/java/com/hedera/node/app/service/evm/utils/ValidationUtils.java @@ -18,6 +18,7 @@ import com.hedera.node.app.service.evm.exceptions.InvalidTransactionException; import com.hederahashgraph.api.proto.java.ResponseCodeEnum; +import java.util.function.Supplier; /** * A minimalist collection of helpers to improve readability of code that throws an {@code @@ -46,6 +47,13 @@ public static void validateTrue(final boolean flag, final ResponseCodeEnum code, } } + public static void validateTrue( + final boolean flag, final ResponseCodeEnum code, final Supplier failureMsg) { + if (!flag) { + throw new InvalidTransactionException(failureMsg.get(), code); + } + } + public static void validateFalse(final boolean flag, final ResponseCodeEnum code) { if (flag) { throw new InvalidTransactionException(code); @@ -58,6 +66,13 @@ public static void validateFalse(final boolean flag, final ResponseCodeEnum code } } + public static void validateFalse( + final boolean flag, final ResponseCodeEnum code, final Supplier failureMsg) { + if (flag) { + throw new InvalidTransactionException(failureMsg.get(), code); + } + } + public static void validateFalseOrRevert(final boolean flag, final ResponseCodeEnum code) { if (flag) { throw new InvalidTransactionException(code, true); diff --git a/hedera-node/hedera-evm/src/test/java/com/hedera/node/app/service/evm/utils/ValidationUtilsTest.java b/hedera-node/hedera-evm/src/test/java/com/hedera/node/app/service/evm/utils/ValidationUtilsTest.java index 0c2d8dd50f09..367adbca1cdc 100644 --- a/hedera-node/hedera-evm/src/test/java/com/hedera/node/app/service/evm/utils/ValidationUtilsTest.java +++ b/hedera-node/hedera-evm/src/test/java/com/hedera/node/app/service/evm/utils/ValidationUtilsTest.java @@ -23,7 +23,9 @@ import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CANNOT_WIPE_TOKEN_TREASURY_ACCOUNT; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ALLOWANCE_OWNER_ID; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOKEN_BURN_AMOUNT; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOKEN_SYMBOL; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.MEMO_TOO_LONG; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.MISSING_TOKEN_SYMBOL; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_SUPPLY_KEY; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -36,7 +38,7 @@ class ValidationUtilsTest { @Test - void factoriesWorkAsExpected() { + void testValidateTrue() { final var falseExCapturedByCode = assertThrows(InvalidTransactionException.class, () -> validateTrue(false, MEMO_TOO_LONG)); validateTrue(true, MEMO_TOO_LONG); @@ -44,6 +46,20 @@ void factoriesWorkAsExpected() { InvalidTransactionException.class, () -> validateTrue(false, INVALID_TOKEN_BURN_AMOUNT, "Should be true!")); validateTrue(true, INVALID_TOKEN_BURN_AMOUNT, "Should be true!"); + final var falseExCapturedbyCodeAndLambda = assertThrows( + InvalidTransactionException.class, + () -> validateTrue(false, INVALID_TOKEN_SYMBOL, () -> String.format("Should %s be true!", "also"))); + validateTrue(true, INVALID_TOKEN_SYMBOL, () -> String.format("Should %s be true!", "also")); + + assertEquals(MEMO_TOO_LONG, falseExCapturedByCode.getResponseCode()); + assertEquals(INVALID_TOKEN_BURN_AMOUNT, falseExCapturedByCodeAndMsg.getResponseCode()); + assertEquals("Should be true!", falseExCapturedByCodeAndMsg.getMessage()); + assertEquals(INVALID_TOKEN_SYMBOL, falseExCapturedbyCodeAndLambda.getResponseCode()); + assertEquals("Should also be true!", falseExCapturedbyCodeAndLambda.getMessage()); + } + + @Test + void testValidateFalse() { final var trueExCapturedByCode = assertThrows( InvalidTransactionException.class, () -> validateFalse(true, CANNOT_WIPE_TOKEN_TREASURY_ACCOUNT)); validateFalse(false, CANNOT_WIPE_TOKEN_TREASURY_ACCOUNT); @@ -51,13 +67,16 @@ void factoriesWorkAsExpected() { InvalidTransactionException.class, () -> validateFalse(true, TOKEN_HAS_NO_SUPPLY_KEY, "Should be false!")); validateFalse(false, TOKEN_HAS_NO_SUPPLY_KEY, "Should be false!"); + final var trueExCapturedbyCodeAndLambda = assertThrows( + InvalidTransactionException.class, + () -> validateFalse(true, MISSING_TOKEN_SYMBOL, () -> String.format("Should %s be false!", "also"))); + validateFalse(false, MISSING_TOKEN_SYMBOL, () -> String.format("Should %s be false!", "also")); - assertEquals(MEMO_TOO_LONG, falseExCapturedByCode.getResponseCode()); - assertEquals(INVALID_TOKEN_BURN_AMOUNT, falseExCapturedByCodeAndMsg.getResponseCode()); - assertEquals("Should be true!", falseExCapturedByCodeAndMsg.getMessage()); assertEquals(CANNOT_WIPE_TOKEN_TREASURY_ACCOUNT, trueExCapturedByCode.getResponseCode()); assertEquals(TOKEN_HAS_NO_SUPPLY_KEY, trueExCapturedByCodeAndMsg.getResponseCode()); assertEquals("Should be false!", trueExCapturedByCodeAndMsg.getMessage()); + assertEquals(MISSING_TOKEN_SYMBOL, trueExCapturedbyCodeAndLambda.getResponseCode()); + assertEquals("Should also be false!", trueExCapturedbyCodeAndLambda.getMessage()); } @Test diff --git a/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/handlers/FileGetInfoHandler.java b/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/handlers/FileGetInfoHandler.java index 37677c43753e..d9014f0fd677 100644 --- a/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/handlers/FileGetInfoHandler.java +++ b/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/handlers/FileGetInfoHandler.java @@ -29,7 +29,7 @@ import com.hedera.hapi.node.file.FileInfo; import com.hedera.hapi.node.transaction.Query; import com.hedera.hapi.node.transaction.Response; -import com.hedera.node.app.service.file.impl.ReadableFileStoreImpl; +import com.hedera.node.app.service.file.ReadableFileStore; import com.hedera.node.app.service.file.impl.base.FileQueryBase; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.QueryContext; @@ -75,7 +75,7 @@ public void validate(@NonNull final QueryContext context) throws PreCheckExcepti public @NonNull Response findResponse(@NonNull final QueryContext context, @NonNull final ResponseHeader header) { requireNonNull(header); final var query = context.query(); - final var fileStore = context.createStore(ReadableFileStoreImpl.class); + final var fileStore = context.createStore(ReadableFileStore.class); final var ledgerConfig = context.configuration().getConfigData(LedgerConfig.class); final var op = query.fileGetInfoOrThrow(); final var responseBuilder = FileGetInfoResponse.newBuilder(); @@ -100,7 +100,7 @@ public void validate(@NonNull final QueryContext context) throws PreCheckExcepti */ private @Nullable Optional infoForFile( @NonNull final FileID fileID, - @NonNull final ReadableFileStoreImpl fileStore, + @NonNull final ReadableFileStore fileStore, @NonNull final LedgerConfig ledgerConfig) { final var meta = fileStore.getFileMetadata(fileID); if (meta == null) { diff --git a/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/handlers/FileUpdateHandler.java b/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/handlers/FileUpdateHandler.java index 47b60760d9c4..f1cf72731d25 100644 --- a/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/handlers/FileUpdateHandler.java +++ b/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/handlers/FileUpdateHandler.java @@ -29,7 +29,7 @@ import com.hedera.hapi.node.base.HederaFunctionality; import com.hedera.hapi.node.file.FileUpdateTransactionBody; import com.hedera.hapi.node.state.file.File; -import com.hedera.node.app.service.file.impl.ReadableFileStoreImpl; +import com.hedera.node.app.service.file.ReadableFileStore; import com.hedera.node.app.service.file.impl.WritableFileStoreImpl; import com.hedera.node.app.spi.validation.AttributeValidator; import com.hedera.node.app.spi.workflows.HandleContext; @@ -65,7 +65,7 @@ public FileUpdateHandler() { public void preHandle(@NonNull final PreHandleContext context) throws PreCheckException { requireNonNull(context); final var transactionBody = context.body().fileUpdateOrThrow(); - final var fileStore = context.createStore(ReadableFileStoreImpl.class); + final var fileStore = context.createStore(ReadableFileStore.class); preValidate(transactionBody.fileID(), fileStore, context, false); validateAndAddRequiredKeys(transactionBody.keys(), context, true); diff --git a/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/schemas/GenesisSchema.java b/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/schemas/GenesisSchema.java index ee7b7314e132..4d00b701980a 100644 --- a/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/schemas/GenesisSchema.java +++ b/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/schemas/GenesisSchema.java @@ -18,6 +18,7 @@ import static com.hedera.hapi.node.base.HederaFunctionality.fromString; import static com.hedera.node.app.service.file.impl.FileServiceImpl.BLOBS_KEY; +import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Objects.requireNonNull; import com.fasterxml.jackson.databind.JsonNode; @@ -30,6 +31,8 @@ import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.base.KeyList; import com.hedera.hapi.node.base.SemanticVersion; +import com.hedera.hapi.node.base.ServicesConfigurationList; +import com.hedera.hapi.node.base.Setting; import com.hedera.hapi.node.base.SubType; import com.hedera.hapi.node.base.TimestampSeconds; import com.hedera.hapi.node.base.TransactionFeeSchedule; @@ -45,7 +48,12 @@ import com.hedera.node.config.data.HederaConfig; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; +import java.io.StringReader; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; +import java.util.Comparator; +import java.util.Properties; import java.util.Set; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -280,7 +288,65 @@ private void createGenesisHapiPermissions( @NonNull final FilesConfig filesConfig, @NonNull final WritableKVState files) { logger.debug("Creating genesis HAPI permissions file"); - // TBD Implement this method + + // Get the path to the HAPI permissions file + final var pathToApiPermissions = Path.of(bootstrapConfig.feeSchedulesJsonResource()); + + // If the file exists, load from there + String apiPermissionsContent = null; + if (Files.exists(pathToApiPermissions)) { + try { + apiPermissionsContent = Files.readString(pathToApiPermissions); + logger.info("API Permissions loaded from {}", pathToApiPermissions); + } catch (IOException e) { + logger.error( + "API Permissions could not be loaded from {}, looking for fallback on classpath", + pathToApiPermissions); + } + } + // Otherwise, load from the classpath. If that cannot be done, we have a totally broken build. + if (apiPermissionsContent == null) { + final var resourceName = "api-permission.properties"; + try (final var in = Thread.currentThread().getContextClassLoader().getResourceAsStream(resourceName)) { + apiPermissionsContent = new String(requireNonNull(in).readAllBytes(), UTF_8); + logger.info("API Permissions loaded from classpath resource {}", resourceName); + } catch (IOException | NullPointerException e) { + logger.fatal("API Permissions could not be loaded from classpath"); + throw new IllegalArgumentException("API Permissions could not be loaded from classpath", e); + } + } + + // Parse the HAPI permissions file into a ServicesConfigurationList protobuf object + final var settings = new ArrayList(); + try (final var in = new StringReader(apiPermissionsContent)) { + final var props = new Properties(); + props.load(in); + props.entrySet().stream() + .sorted(Comparator.comparing(entry -> String.valueOf(entry.getKey()))) + .forEach(entry -> settings.add(Setting.newBuilder() + .name(String.valueOf(entry.getKey())) + .value(String.valueOf(entry.getValue())) + .build())); + } catch (final IOException e) { + logger.fatal("API Permissions could not be parsed"); + throw new IllegalArgumentException("API Permissions could not be parsed", e); + } + + // Store the configuration in state + final var fileNum = filesConfig.hapiPermissions(); + final var fileId = FileID.newBuilder().fileNum(fileNum).build(); // default to shard=0, realm=0 + final var masterKey = + Key.newBuilder().ed25519(bootstrapConfig.genesisPublicKey()).build(); + files.put( + fileId, + File.newBuilder() + .contents(ServicesConfigurationList.PROTOBUF.toBytes(ServicesConfigurationList.newBuilder() + .nameValue(settings) + .build())) + .fileId(fileId) + .keys(KeyList.newBuilder().keys(masterKey)) + .expirationTime(bootstrapConfig.systemEntityExpiry()) + .build()); } // ================================================================================================================ diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetInfoTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetInfoTest.java index b79f4d02d374..f9c810849e73 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetInfoTest.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetInfoTest.java @@ -158,7 +158,7 @@ void getsResponseIfFailedResponse() { final var query = createGetFileInfoQuery(fileId.fileNum()); when(context.query()).thenReturn(query); - when(context.createStore(ReadableFileStoreImpl.class)).thenReturn(readableStore); + when(context.createStore(ReadableFileStore.class)).thenReturn(readableStore); final var response = subject.findResponse(context, responseHeader); final var op = response.fileGetInfoOrThrow(); @@ -176,7 +176,7 @@ void getsResponseIfOkResponse() { final var query = createGetFileInfoQuery(fileId.fileNum()); when(context.query()).thenReturn(query); - when(context.createStore(ReadableFileStoreImpl.class)).thenReturn(readableStore); + when(context.createStore(ReadableFileStore.class)).thenReturn(readableStore); final var response = subject.findResponse(context, responseHeader); final var fileInfoResponse = response.fileGetInfoOrThrow(); diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/pbj/PbjConverter.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/pbj/PbjConverter.java index 8326b7e59595..6d649b326381 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/pbj/PbjConverter.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/pbj/PbjConverter.java @@ -27,6 +27,7 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.ContractID; import com.hedera.hapi.node.base.FileID; +import com.hedera.hapi.node.base.Fraction; import com.hedera.hapi.node.base.HederaFunctionality; import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.base.QueryHeader; @@ -39,7 +40,10 @@ import com.hedera.hapi.node.base.Transaction; import com.hedera.hapi.node.network.NetworkGetExecutionTimeQuery; import com.hedera.hapi.node.transaction.CustomFee; +import com.hedera.hapi.node.transaction.FixedFee; +import com.hedera.hapi.node.transaction.FractionalFee; import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.RoyaltyFee; import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.service.mono.legacy.core.jproto.JKey; import com.hedera.node.app.service.mono.state.submerkle.FcCustomFee; @@ -49,7 +53,6 @@ import com.hedera.pbj.runtime.io.buffer.Bytes; import com.hedera.pbj.runtime.io.stream.ReadableStreamingData; import com.hedera.pbj.runtime.io.stream.WritableStreamingData; -import com.hederahashgraph.api.proto.java.AccountID.AccountCase; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.io.ByteArrayInputStream; @@ -65,7 +68,7 @@ public final class PbjConverter { requireNonNull(accountID); final var builder = AccountID.newBuilder().shardNum(accountID.getShardNum()).realmNum(accountID.getRealmNum()); - if (accountID.getAccountCase() == AccountCase.ALIAS) { + if (accountID.getAccountCase() == com.hederahashgraph.api.proto.java.AccountID.AccountCase.ALIAS) { builder.alias(Bytes.wrap(accountID.getAlias().toByteArray())); } else { builder.accountNum(accountID.getAccountNum()); @@ -1351,7 +1354,7 @@ public static Key asPbjKey(@NonNull final JKey jKey) { .build(); } - public static com.hederahashgraph.api.proto.java.FileID fromPbj(FileID someFileId) { + public static com.hederahashgraph.api.proto.java.FileID fromPbj(final FileID someFileId) { return com.hederahashgraph.api.proto.java.FileID.newBuilder() .setRealmNum(someFileId.realmNum()) .setShardNum(someFileId.shardNum()) @@ -1359,6 +1362,59 @@ public static com.hederahashgraph.api.proto.java.FileID fromPbj(FileID someFileI .build(); } + @NonNull + public static com.hederahashgraph.api.proto.java.CustomFee fromPbj(@NonNull final CustomFee customFee) { + var builder = com.hederahashgraph.api.proto.java.CustomFee.newBuilder(); + if (customFee.hasFixedFee()) { + builder.setFixedFee(fromPbj(customFee.fixedFee())); + } else if (customFee.hasFractionalFee()) { + builder.setFractionalFee(fromPbj(customFee.fractionalFee())); + } else if (customFee.hasRoyaltyFee()) { + builder.setRoyaltyFee(fromPbj(customFee.royaltyFee())); + } + + builder.setFeeCollectorAccountId(fromPbj(customFee.feeCollectorAccountId())); + builder.setAllCollectorsAreExempt(customFee.allCollectorsAreExempt()); + + return builder.build(); + } + + @NonNull + public static com.hederahashgraph.api.proto.java.RoyaltyFee fromPbj(@NonNull final RoyaltyFee royaltyFee) { + var builder = com.hederahashgraph.api.proto.java.RoyaltyFee.newBuilder(); + builder.setExchangeValueFraction(fromPbj(royaltyFee.exchangeValueFraction())); + if (royaltyFee.hasFallbackFee()) builder.setFallbackFee(fromPbj(royaltyFee.fallbackFee())); + return builder.build(); + } + + @NonNull + public static com.hederahashgraph.api.proto.java.Fraction fromPbj(@NonNull final Fraction fraction) { + var builder = com.hederahashgraph.api.proto.java.Fraction.newBuilder(); + builder.setNumerator(fraction.numerator()); + builder.setDenominator(fraction.denominator()); + return builder.build(); + } + + @NonNull + public static com.hederahashgraph.api.proto.java.FractionalFee fromPbj(@NonNull final FractionalFee fractionalFee) { + var builder = com.hederahashgraph.api.proto.java.FractionalFee.newBuilder(); + builder.setFractionalAmount(fromPbj(fractionalFee.fractionalAmount())); + builder.setMinimumAmount(fractionalFee.minimumAmount()); + builder.setMaximumAmount(fractionalFee.maximumAmount()); + builder.setNetOfTransfers(fractionalFee.netOfTransfers()); + return builder.build(); + } + + @NonNull + public static com.hederahashgraph.api.proto.java.FixedFee fromPbj(@Nullable FixedFee fixedFee) { + var builder = com.hederahashgraph.api.proto.java.FixedFee.newBuilder(); + if (fixedFee != null) { + builder.setAmount(fixedFee.amount()); + builder.setDenominatingTokenId(fromPbj(fixedFee.denominatingTokenId())); + } + return builder.build(); + } + public static TopicID toPbj(com.hederahashgraph.api.proto.java.TopicID topicId) { return protoToPbj(topicId, TopicID.class); } diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/adapters/VirtualMapLike.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/adapters/VirtualMapLike.java index 7ab5be01216c..595ba7f84bc4 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/adapters/VirtualMapLike.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/adapters/VirtualMapLike.java @@ -56,6 +56,9 @@ public interface VirtualMapLike { void extractVirtualMapData(ThreadManager threadManager, InterruptableConsumer> handler, int threadCount) throws InterruptedException; + void extractVirtualMapDataC(ThreadManager threadManager, InterruptableConsumer> handler, int threadCount) + throws InterruptedException; + static VirtualMapLike from(final VirtualMap real) { return new VirtualMapLike<>() { @Override @@ -72,6 +75,15 @@ public void extractVirtualMapData( VirtualMapMigration.extractVirtualMapData(threadManager, real, handler, threadCount); } + @Override + public void extractVirtualMapDataC( + final ThreadManager threadManager, + final InterruptableConsumer> handler, + final int threadCount) + throws InterruptedException { + VirtualMapMigration.extractVirtualMapDataC(threadManager, real, handler, threadCount); + } + @Override public void registerMetrics(final Metrics metrics) { real.registerMetrics(metrics); diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/exports/SignedStateBalancesExporter.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/exports/SignedStateBalancesExporter.java index 714897b81455..2ec69ea444f0 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/exports/SignedStateBalancesExporter.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/exports/SignedStateBalancesExporter.java @@ -33,7 +33,6 @@ import com.hedera.node.app.service.mono.state.migration.TokenRelStorageAdapter; import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.mono.utils.MiscUtils; -import com.hedera.node.app.service.mono.utils.NonAtomicReference; import com.hedera.node.app.service.mono.utils.SystemExits; import com.hedera.services.stream.proto.AllAccountBalances; import com.hedera.services.stream.proto.SingleAccountBalances; @@ -56,9 +55,11 @@ import java.time.Duration; import java.time.Instant; import java.util.ArrayList; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.zip.GZIPOutputStream; import javax.inject.Singleton; @@ -226,21 +227,21 @@ private boolean exportBalancesProtoFile( BalancesSummary summarized(final ServicesState signedState) { final long nodeBalanceWarnThreshold = dynamicProperties.nodeBalanceWarningThreshold(); - final var totalFloat = new NonAtomicReference<>(BigInteger.valueOf(0L)); - final List accountBalances = new ArrayList<>(); + final var totalFloat = new AtomicReference<>(BigInteger.ZERO); + final List accountBalances = Collections.synchronizedList(new ArrayList<>()); final var nodeIds = MiscUtils.getNodeAccounts(signedState.addressBook()); final var tokens = signedState.tokens(); final var accounts = signedState.accounts(); final var tokenAssociations = signedState.tokenAssociations(); - accounts.forEach((id, account) -> { + accounts.forEachParallel((id, account) -> { if (!account.isDeleted()) { final var accountId = id.toGrpcAccountId(); final var balance = account.getBalance(); if (nodeIds.contains(accountId) && balance < nodeBalanceWarnThreshold) { log.warn(LOW_NODE_BALANCE_WARN_MSG_TPL, readableId(accountId), balance); } - totalFloat.set(totalFloat.get().add(BigInteger.valueOf(account.getBalance()))); + totalFloat.accumulateAndGet(BigInteger.valueOf(account.getBalance()), BigInteger::add); final SingleAccountBalances.Builder sabBuilder = SingleAccountBalances.newBuilder(); sabBuilder.setHbarBalance(balance).setAccountID(accountId); if (dynamicProperties.shouldExportTokenBalances()) { diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/merkle/MerkleToken.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/merkle/MerkleToken.java index 81e6f86b409a..1c1982f03719 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/merkle/MerkleToken.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/merkle/MerkleToken.java @@ -176,8 +176,14 @@ public boolean equals(final Object o) { && Objects.equals(this.symbol, that.symbol) && Objects.equals(this.name, that.name) && Objects.equals(this.memo, that.memo) - && Objects.equals(this.treasury, that.treasury) - && Objects.equals(this.autoRenewAccount, that.autoRenewAccount) + && ((this.treasury == null && that.treasury == null) + || (this.treasury != null + && that.treasury != null + && (this.treasury.num() == that.treasury.num()))) + && ((this.autoRenewAccount == null && that.autoRenewAccount == null) + || (this.autoRenewAccount != null + && that.autoRenewAccount != null + && (this.autoRenewAccount.num() == that.autoRenewAccount.num()))) && equalUpToDecodability(this.wipeKey, that.wipeKey) && equalUpToDecodability(this.supplyKey, that.supplyKey) && equalUpToDecodability(this.adminKey, that.adminKey) diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/AccountStorageAdapter.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/AccountStorageAdapter.java index 87c7f306039c..b4a54a14548a 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/AccountStorageAdapter.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/AccountStorageAdapter.java @@ -133,6 +133,23 @@ private void forEachOnDisk(final BiConsumer visitor) { } } + public void forEachParallel(final BiConsumer visitor) { + if (accountsOnDisk) { + try { + onDiskAccounts.extractVirtualMapDataC( + getStaticThreadManager(), + entry -> visitor.accept(entry.getKey().asEntityNum(), entry.getValue()), + THREAD_COUNT); + } catch (final InterruptedException e) { + log.error("Interrupted while extracting VM data", e); + Thread.currentThread().interrupt(); + throw new IllegalStateException(e); + } + } else { + inMemoryAccounts.forEach(visitor); + } + } + public boolean areOnDisk() { return accountsOnDisk; } diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/TokenStateTranslator.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/TokenStateTranslator.java new file mode 100644 index 000000000000..2f88cf7512d0 --- /dev/null +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/TokenStateTranslator.java @@ -0,0 +1,231 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.mono.state.migration; + +import static java.util.Objects.requireNonNull; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenSupplyType; +import com.hedera.hapi.node.base.TokenType; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.transaction.CustomFee; +import com.hedera.node.app.service.mono.legacy.core.jproto.JKey; +import com.hedera.node.app.service.mono.pbj.PbjConverter; +import com.hedera.node.app.service.mono.state.submerkle.EntityId; +import com.hedera.node.app.service.mono.state.submerkle.FcCustomFee; +import com.hedera.node.app.service.mono.utils.EntityNum; +import com.hedera.node.app.service.token.ReadableTokenStore; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.ArrayList; +import java.util.List; + +/** + * Translates between the legacy {@link com.hedera.node.app.service.mono.state.merkle.MerkleToken} and the {@link Token} and vise versa. + */ +public final class TokenStateTranslator { + private TokenStateTranslator() { + throw new UnsupportedOperationException("Utility Class"); + } + + /** + * Translates the {@link com.hedera.node.app.service.mono.state.merkle.MerkleToken} to the {@link Token}. + * @param token {@link com.hedera.node.app.service.mono.state.merkle.MerkleToken} + * @return {@link Token} + */ + public static Token tokenFromMerkle( + @NonNull final com.hedera.node.app.service.mono.state.merkle.MerkleToken token) { + final var builder = Token.newBuilder() + .tokenId(TokenID.newBuilder().tokenNum(token.getKey().longValue())) + .name(token.name()) + .symbol(token.symbol()) + .decimals(token.decimals()) + .totalSupply(token.totalSupply()) + .treasuryAccountId(asAccount(token.treasury().num())) + .lastUsedSerialNumber(token.getLastUsedSerialNumber()) + .deleted(token.isDeleted()) + .tokenType(fromMerkleType(token.tokenType())) + .supplyType(fromMerkleSupplyType(token.supplyType())) + .autoRenewAccountId( + token.autoRenewAccount() != null + ? asAccount(token.autoRenewAccount().num()) + : null) + .autoRenewSecs(token.autoRenewPeriod()) + .expiry(token.expiry()) + .memo(token.memo()) + .maxSupply(token.maxSupply()) + .paused(token.isPaused()) + .accountsFrozenByDefault(token.accountsAreFrozenByDefault()) + .accountsKycGrantedByDefault(token.accountsKycGrantedByDefault()) + .customFees(convertMonoCustomFees(token.customFeeSchedule())); + if (token.hasAdminKey()) { + builder.adminKey(PbjConverter.asPbjKey(token.getAdminKey())); + } + if (token.hasKycKey()) { + builder.kycKey(PbjConverter.asPbjKey(token.getKycKey())); + } + if (token.hasFreezeKey()) { + builder.freezeKey(PbjConverter.asPbjKey(token.getFreezeKey())); + } + if (token.hasWipeKey()) { + builder.wipeKey(PbjConverter.asPbjKey(token.getWipeKey())); + } + if (token.hasSupplyKey()) { + builder.supplyKey(PbjConverter.asPbjKey(token.getSupplyKey())); + } + if (token.hasFeeScheduleKey()) { + builder.feeScheduleKey(PbjConverter.asPbjKey(token.getFeeScheduleKey())); + } + if (token.hasPauseKey()) { + builder.pauseKey(PbjConverter.asPbjKey(token.getPauseKey())); + } + return builder.build(); + } + + @NonNull + static List convertMonoCustomFees( + @Nullable final List monoCustomFees) { + final List customFees = new ArrayList<>(); + if (monoCustomFees != null) { + for (var customFee : monoCustomFees) { + customFees.add(PbjConverter.fromFcCustomFee(customFee)); + } + } + + return customFees; + } + + @NonNull + static TokenType fromMerkleType(@NonNull com.hedera.node.app.service.evm.store.tokens.TokenType tokenType) { + return (tokenType.equals(com.hedera.node.app.service.evm.store.tokens.TokenType.NON_FUNGIBLE_UNIQUE)) + ? TokenType.NON_FUNGIBLE_UNIQUE + : TokenType.FUNGIBLE_COMMON; + } + + @NonNull + static TokenSupplyType fromMerkleSupplyType( + @NonNull com.hedera.node.app.service.mono.state.enums.TokenSupplyType tokenSupplyType) { + return (tokenSupplyType.equals(com.hedera.node.app.service.mono.state.enums.TokenSupplyType.INFINITE)) + ? TokenSupplyType.INFINITE + : TokenSupplyType.FINITE; + } + + @NonNull + /*** + * Converts a {@link com.hedera.hapi.node.state.token.Token} to a {@link com.hedera.node.app.service.mono.state.merkle.MerkleAccount} + * @param tokenId the {@link TokenID} of the token to convert + * @param readableTokenStore the {@link com.hedera.node.app.service.token.ReadableTokenStore} to use to retrieve the token + * @return the {@link com.hedera.node.app.service.mono.state.merkle.MerkleToken} corresponding to the tokenId + */ + public static com.hedera.node.app.service.mono.state.merkle.MerkleToken merkleTokenFromToken( + @NonNull TokenID tokenId, @NonNull ReadableTokenStore readableTokenStore) { + requireNonNull(tokenId); + requireNonNull(readableTokenStore); + final var optionalToken = readableTokenStore.get(tokenId); + if (optionalToken == null) { + throw new IllegalArgumentException("Token not found"); + } + return merkleTokenFromToken(optionalToken); + } + + @NonNull + public static com.hedera.node.app.service.mono.state.merkle.MerkleToken merkleTokenFromToken(@NonNull Token token) { + requireNonNull(token); + com.hedera.node.app.service.mono.state.merkle.MerkleToken merkleToken = + new com.hedera.node.app.service.mono.state.merkle.MerkleToken(); + merkleToken.setKey(EntityNum.fromLong(token.tokenId().tokenNum())); + merkleToken.setName(token.name()); + merkleToken.setSymbol(token.symbol()); + merkleToken.setDecimals(token.decimals()); + merkleToken.setTotalSupply(token.totalSupply()); + merkleToken.setTreasury(EntityId.fromNum(token.treasuryAccountId().accountNum())); + merkleToken.setLastUsedSerialNumber(token.lastUsedSerialNumber()); + merkleToken.setDeleted(token.deleted()); + merkleToken.setTokenType(toMerkleType(token.tokenType())); + merkleToken.setSupplyType(toMerkleSupplyType(token.supplyType())); + merkleToken.setAutoRenewAccount( + (token.autoRenewAccountId() != null) + ? new EntityId( + token.autoRenewAccountId().shardNum(), + token.autoRenewAccountId().realmNum(), + token.autoRenewAccountId().accountNum()) + : null); + merkleToken.setAutoRenewPeriod(token.autoRenewSecs()); + merkleToken.setExpiry(token.expiry()); + merkleToken.setMemo(token.memo()); + merkleToken.setMaxSupply(token.maxSupply()); + merkleToken.setPaused(token.paused()); + merkleToken.setAccountsFrozenByDefault(token.accountsFrozenByDefault()); + merkleToken.setAccountsKycGrantedByDefault(token.accountsKycGrantedByDefault()); + merkleToken.setAdminKey((JKey) PbjConverter.fromPbjKeyUnchecked(token.adminKeyOrElse((Key.DEFAULT))) + .orElse(null)); + merkleToken.setKycKey((JKey) PbjConverter.fromPbjKeyUnchecked(token.kycKeyOrElse((Key.DEFAULT))) + .orElse(null)); + merkleToken.setFreezeKey((JKey) PbjConverter.fromPbjKeyUnchecked(token.freezeKeyOrElse((Key.DEFAULT))) + .orElse(null)); + merkleToken.setWipeKey((JKey) PbjConverter.fromPbjKeyUnchecked(token.wipeKeyOrElse((Key.DEFAULT))) + .orElse(null)); + merkleToken.setSupplyKey((JKey) PbjConverter.fromPbjKeyUnchecked(token.supplyKeyOrElse((Key.DEFAULT))) + .orElse(null)); + merkleToken.setFeeScheduleKey((JKey) PbjConverter.fromPbjKeyUnchecked(token.feeScheduleKeyOrElse((Key.DEFAULT))) + .orElse(null)); + merkleToken.setPauseKey((JKey) PbjConverter.fromPbjKeyUnchecked(token.pauseKeyOrElse((Key.DEFAULT))) + .orElse(null)); + merkleToken.setFeeSchedule(convertCustomFees(token.customFees())); + return merkleToken; + } + + @NonNull + static com.hedera.node.app.service.evm.store.tokens.TokenType toMerkleType(@NonNull TokenType tokenType) { + return (tokenType.equals(TokenType.NON_FUNGIBLE_UNIQUE)) + ? com.hedera.node.app.service.evm.store.tokens.TokenType.NON_FUNGIBLE_UNIQUE + : com.hedera.node.app.service.evm.store.tokens.TokenType.FUNGIBLE_COMMON; + } + + @NonNull + static com.hedera.node.app.service.mono.state.enums.TokenSupplyType toMerkleSupplyType( + @NonNull TokenSupplyType tokenSupplyType) { + return (tokenSupplyType.equals(TokenSupplyType.INFINITE)) + ? com.hedera.node.app.service.mono.state.enums.TokenSupplyType.INFINITE + : com.hedera.node.app.service.mono.state.enums.TokenSupplyType.FINITE; + } + + @NonNull + static List convertCustomFees( + @Nullable final List customFees) { + final List monoCustomFees = new ArrayList<>(); + if (customFees != null) { + for (var customFee : customFees) { + if (customFee != null) { + monoCustomFees.add(FcCustomFee.fromGrpc(PbjConverter.fromPbj(customFee))); + } + } + } + + return monoCustomFees; + } + + private static AccountID asAccount(final long num) { + return AccountID.newBuilder().accountNum(num).build(); + } + + private static TokenID asToken(final long num) { + return TokenID.newBuilder().tokenNum(num).build(); + } +} diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/models/Token.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/models/Token.java index 5eced31982b5..e0bc1f6246a8 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/models/Token.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/models/Token.java @@ -54,6 +54,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Supplier; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; @@ -364,7 +365,7 @@ private void changeSupply( validateTrue( treasuryRel.hasInvolvedIds(id, treasury.getId()), FAIL_INVALID, - "Cannot change " + this + " supply (" + amount + ") with non-treasury rel " + treasuryRel); + () -> "Cannot change " + this + " supply (" + amount + ") with non-treasury rel " + treasuryRel); if (!ignoreSupplyKey) { validateTrue(supplyKey != null, TOKEN_HAS_NO_SUPPLY_KEY); } @@ -374,7 +375,7 @@ private void changeSupply( validateTrue( maxSupply >= newTotalSupply, TOKEN_MAX_SUPPLY_REACHED, - "Cannot mint new supply (" + amount + "). Max supply (" + maxSupply + ") reached"); + () -> "Cannot mint new supply (" + amount + "). Max supply (" + maxSupply + ") reached"); } final var treasuryAccount = treasuryRel.getAccount(); final long newTreasuryBalance = treasuryRel.getBalance() + amount; @@ -410,8 +411,8 @@ private void amountWipeValidations(final TokenRelationship accountRel, final lon validateTrue(newAccountBalance >= 0, INVALID_WIPING_AMOUNT, "Wiping would negate account balance"); } - private String errorMessage(final String op, final long amount, final TokenRelationship rel) { - return "Cannot " + op + " " + amount + " units of " + this + " from " + rel; + private Supplier errorMessage(final String op, final long amount, final TokenRelationship rel) { + return () -> "Cannot " + op + " " + amount + " units of " + this + " from " + rel; } public Account getTreasury() { diff --git a/hedera-node/hedera-mono-service/src/main/java/module-info.java b/hedera-node/hedera-mono-service/src/main/java/module-info.java index d0cb578fb284..1fbbf340c3a8 100644 --- a/hedera-node/hedera-mono-service/src/main/java/module-info.java +++ b/hedera-node/hedera-mono-service/src/main/java/module-info.java @@ -57,21 +57,6 @@ exports com.hedera.node.app.service.mono.store.models to com.hedera.node.app.service.mono.test.fixtures, com.hedera.node.app; - exports com.hedera.node.app.service.mono.state.merkle to - com.hedera.node.app.service.mono.test.fixtures, - com.hedera.node.app.service.token.impl, - com.hedera.node.app.service.token.impl.test, - com.hedera.node.app.service.contract.impl.test, - com.hedera.node.app.service.contract.impl, - com.hedera.node.app.service.networkadmin.impl, - com.hedera.node.app.service.consensus.impl, - com.hedera.node.app.service.file.impl, - com.hedera.node.app, - com.hedera.node.app.service.schedule.impl, - com.hedera.node.app.service.networkadmin.impl.test, - com.hedera.node.app.service.schedule.impl.test, - com.hedera.node.app.service.consensus.impl.test, - com.hedera.node.app.service.network.admin.impl; exports com.hedera.node.app.service.mono.state.validation to com.hedera.node.app, com.hedera.node.app.service.consensus.impl, @@ -81,7 +66,8 @@ com.hedera.node.app; exports com.hedera.node.app.service.mono.sigs.utils to com.hedera.node.app.service.mono.test.fixtures, - com.hedera.node.app; + com.hedera.node.app, + com.hedera.node.app.test.fixtures; exports com.hedera.node.app.service.mono.sigs.verification to com.hedera.node.app.service.mono.test.fixtures, com.hedera.node.app; @@ -260,6 +246,11 @@ opens com.hedera.node.app.service.mono.cache to com.swirlds.common; + exports com.hedera.node.app.service.mono.state.merkle; + + opens com.hedera.node.app.service.mono.state.migration to + com.swirlds.common; + requires transitive com.hedera.node.app.hapi.fees; requires transitive com.hedera.node.app.hapi.utils; requires transitive com.hedera.node.app.service.evm; diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/sigs/order/SigRequirementsTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/sigs/order/SigRequirementsTest.java index 67e09bea09ec..75c0b1f2c75f 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/sigs/order/SigRequirementsTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/sigs/order/SigRequirementsTest.java @@ -308,6 +308,7 @@ import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SCHEDULED_TRANSACTION_NOT_IN_WHITELIST; import static java.util.stream.Collectors.toList; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.collection.IsIterableContainingInOrder.contains; import static org.junit.jupiter.api.Assertions.assertArrayEquals; @@ -1053,8 +1054,10 @@ void getsNftOwnerChangeWithNoReceiverSigReqAndFallbackFeeNotTriggeredDueToHbar() final var summary = subject.keysForOtherParties(txn, summaryFactory); // then: - assertThat(summary.getOrderedKeys(), iterableWithSize(1)); - assertThat(sanityRestored(summary.getOrderedKeys()), contains(FIRST_TOKEN_SENDER_KT.asKey())); + assertThat(summary.getOrderedKeys(), iterableWithSize(2)); + assertThat( + sanityRestored(summary.getOrderedKeys()), + contains(FIRST_TOKEN_SENDER_KT.asKey(), SECOND_TOKEN_SENDER_KT.asKey())); } @Test @@ -1079,8 +1082,10 @@ void getsNftOwnerChangeWithNoReceiverSigReqAndFallbackFeeNotTriggeredDueToHbarWi final var summary = subject.keysForOtherParties(txn, summaryFactory, null, CUSTOM_PAYER_ACCOUNT); // then: - assertThat(summary.getOrderedKeys(), iterableWithSize(1)); - assertThat(sanityRestored(summary.getOrderedKeys()), contains(FIRST_TOKEN_SENDER_KT.asKey())); + assertThat(summary.getOrderedKeys(), iterableWithSize(2)); + assertThat( + sanityRestored(summary.getOrderedKeys()), + contains(FIRST_TOKEN_SENDER_KT.asKey(), SECOND_TOKEN_SENDER_KT.asKey())); } @Test @@ -1092,8 +1097,10 @@ void getsNftOwnerChangeWithNoReceiverSigReqAndFallbackFeeNotTriggeredDueToFt() t final var summary = subject.keysForOtherParties(txn, summaryFactory); // then: - assertThat(summary.getOrderedKeys(), iterableWithSize(1)); - assertThat(sanityRestored(summary.getOrderedKeys()), contains(FIRST_TOKEN_SENDER_KT.asKey())); + assertThat(summary.getOrderedKeys(), iterableWithSize(2)); + assertThat( + sanityRestored(summary.getOrderedKeys()), + containsInAnyOrder(FIRST_TOKEN_SENDER_KT.asKey(), SECOND_TOKEN_SENDER_KT.asKey())); } @Test @@ -1105,8 +1112,10 @@ void getsNftOwnerChangeWithNoReceiverSigReqAndFallbackFeeNotTriggeredDueToFtWith final var summary = subject.keysForOtherParties(txn, summaryFactory, null, CUSTOM_PAYER_ACCOUNT); // then: - assertThat(summary.getOrderedKeys(), iterableWithSize(1)); - assertThat(sanityRestored(summary.getOrderedKeys()), contains(FIRST_TOKEN_SENDER_KT.asKey())); + assertThat(summary.getOrderedKeys(), iterableWithSize(2)); + assertThat( + sanityRestored(summary.getOrderedKeys()), + containsInAnyOrder(FIRST_TOKEN_SENDER_KT.asKey(), SECOND_TOKEN_SENDER_KT.asKey())); } @Test diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleAccountStateSerdeTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleAccountStateSerdeTest.java index 9ce2d1a58965..aa6bebfb51af 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleAccountStateSerdeTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleAccountStateSerdeTest.java @@ -23,10 +23,12 @@ import static com.hedera.node.app.service.mono.state.merkle.MerkleAccountState.RELEASE_0320_VERSION; import com.hedera.node.app.service.mono.state.migration.AccountStateTranslator; +import com.hedera.test.serde.EqualityType; import com.hedera.test.serde.SelfSerializableDataTest; import com.hedera.test.serde.SerializedForms; import com.hedera.test.utils.SeededPropertySource; import com.swirlds.common.utility.CommonUtils; +import edu.umd.cs.findbugs.annotations.NonNull; public class MerkleAccountStateSerdeTest extends SelfSerializableDataTest { public static final int NUM_TEST_CASES = 2 * MIN_TEST_CASES_PER_VERSION; @@ -47,7 +49,8 @@ protected byte[] getSerializedForm(final int version, final int testCaseNo) { } @Override - protected MerkleAccountState getExpectedObject(final int version, final int testCaseNo) { + protected MerkleAccountState getExpectedObject( + final int version, final int testCaseNo, @NonNull final EqualityType equalityType) { final var propertySource = SeededPropertySource.forSerdeTest(version, testCaseNo); if (version == RELEASE_0230_VERSION) { return propertySource.next0242AccountState(); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleNetworkContextSerdeTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleNetworkContextSerdeTest.java index 171c232bcd7a..f446d3caa916 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleNetworkContextSerdeTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleNetworkContextSerdeTest.java @@ -16,9 +16,11 @@ package com.hedera.node.app.service.mono.state.merkle; +import com.hedera.test.serde.EqualityType; import com.hedera.test.serde.SelfSerializableDataTest; import com.hedera.test.serde.SerializedForms; import com.hedera.test.utils.SeededPropertySource; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Optional; import java.util.function.BiConsumer; @@ -46,7 +48,8 @@ protected byte[] getSerializedForm(final int version, final int testCaseNo) { } @Override - protected MerkleNetworkContext getExpectedObject(final int version, final int testCaseNo) { + protected MerkleNetworkContext getExpectedObject( + final int version, final int testCaseNo, @NonNull final EqualityType equalityType) { final var propertySource = SeededPropertySource.forSerdeTest(version, testCaseNo); if (version < MerkleNetworkContext.RELEASE_0310_VERSION) { if (version < MerkleNetworkContext.RELEASE_0300_VERSION) { diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleStakingInfoSerdeTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleStakingInfoSerdeTest.java index ef0b62d05dd4..44ae4973460f 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleStakingInfoSerdeTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleStakingInfoSerdeTest.java @@ -16,8 +16,10 @@ package com.hedera.node.app.service.mono.state.merkle; +import com.hedera.test.serde.EqualityType; import com.hedera.test.serde.SelfSerializableDataTest; import com.hedera.test.utils.SeededPropertySource; +import edu.umd.cs.findbugs.annotations.NonNull; public class MerkleStakingInfoSerdeTest extends SelfSerializableDataTest { public static final int NUM_TEST_CASES = 2 * MIN_TEST_CASES_PER_VERSION; @@ -38,7 +40,8 @@ protected MerkleStakingInfo getExpectedObject(SeededPropertySource propertySourc } @Override - protected MerkleStakingInfo getExpectedObject(final int version, final int testCaseNo) { + protected MerkleStakingInfo getExpectedObject( + final int version, final int testCaseNo, @NonNull final EqualityType equalityType) { final var propertySource = SeededPropertySource.forSerdeTest(version, testCaseNo); return version < MerkleStakingInfo.RELEASE_0371_VERSION ? propertySource.next0370StakingInfo() diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleTokenRelStatusSerdeTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleTokenRelStatusSerdeTest.java index 173f81701f80..8a92244a5176 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleTokenRelStatusSerdeTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleTokenRelStatusSerdeTest.java @@ -16,8 +16,10 @@ package com.hedera.node.app.service.mono.state.merkle; +import com.hedera.test.serde.EqualityType; import com.hedera.test.serde.SelfSerializableDataTest; import com.hedera.test.utils.SeededPropertySource; +import edu.umd.cs.findbugs.annotations.NonNull; public class MerkleTokenRelStatusSerdeTest extends SelfSerializableDataTest { @Override @@ -31,7 +33,8 @@ protected MerkleTokenRelStatus getExpectedObject(final SeededPropertySource prop } @Override - protected MerkleTokenRelStatus getExpectedObject(final int version, final int testCaseNo) { + protected MerkleTokenRelStatus getExpectedObject( + final int version, final int testCaseNo, @NonNull final EqualityType equalityType) { var expected = super.getExpectedObject(version, testCaseNo); if (version < MerkleTokenRelStatus.RELEASE_0250_VERSION) { expected.setNext(0); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleTokenSerdeTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleTokenSerdeTest.java index 27f103a2ea76..0f8603a626bd 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleTokenSerdeTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleTokenSerdeTest.java @@ -16,8 +16,11 @@ package com.hedera.node.app.service.mono.state.merkle; +import com.hedera.node.app.service.mono.state.migration.TokenStateTranslator; +import com.hedera.test.serde.EqualityType; import com.hedera.test.serde.SelfSerializableDataTest; import com.hedera.test.utils.SeededPropertySource; +import edu.umd.cs.findbugs.annotations.NonNull; public class MerkleTokenSerdeTest extends SelfSerializableDataTest { public static final int NUM_TEST_CASES = 2 * MIN_TEST_CASES_PER_VERSION; @@ -33,7 +36,15 @@ protected int getNumTestCasesFor(int version) { } @Override - protected MerkleToken getExpectedObject(final SeededPropertySource propertySource) { + protected MerkleToken getExpectedObject( + final SeededPropertySource propertySource, @NonNull final EqualityType equalityType) { + final var strictExpectation = getExpectedObject(propertySource); + final var pbjToken = TokenStateTranslator.tokenFromMerkle(strictExpectation); + return TokenStateTranslator.merkleTokenFromToken(pbjToken); + } + + @Override + protected MerkleToken getExpectedObject(@NonNull final SeededPropertySource propertySource) { return propertySource.nextToken(); } } diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleUniqueTokenSerdeTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleUniqueTokenSerdeTest.java index c46e24f596b6..65be2949a682 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleUniqueTokenSerdeTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/merkle/MerkleUniqueTokenSerdeTest.java @@ -21,8 +21,10 @@ import com.hedera.node.app.service.mono.state.submerkle.EntityId; import com.hedera.node.app.service.mono.utils.NftNumPair; +import com.hedera.test.serde.EqualityType; import com.hedera.test.serde.SelfSerializableDataTest; import com.hedera.test.utils.SeededPropertySource; +import edu.umd.cs.findbugs.annotations.NonNull; public class MerkleUniqueTokenSerdeTest extends SelfSerializableDataTest { public static final int NUM_TEST_CASES = 2 * MIN_TEST_CASES_PER_VERSION; @@ -38,7 +40,8 @@ protected MerkleUniqueToken getExpectedObject(final SeededPropertySource propert } @Override - protected MerkleUniqueToken getExpectedObject(final int version, final int testCaseNo) { + protected MerkleUniqueToken getExpectedObject( + final int version, final int testCaseNo, @NonNull final EqualityType equalityType) { final var propertySource = SeededPropertySource.forSerdeTest(version, testCaseNo); final var seededObject = getExpectedObject(propertySource); if (version <= RELEASE_0180_VERSION) { diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/migration/AccountStorageAdapterTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/migration/AccountStorageAdapterTest.java index 2134fc9d7701..6b28cbd1d8f2 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/migration/AccountStorageAdapterTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/migration/AccountStorageAdapterTest.java @@ -40,6 +40,7 @@ import com.swirlds.common.crypto.Hash; import com.swirlds.common.threading.interrupt.InterruptableConsumer; import com.swirlds.merkle.map.MerkleMap; +import java.util.HashSet; import java.util.Set; import java.util.function.BiConsumer; import org.apache.commons.lang3.tuple.Pair; @@ -215,6 +216,36 @@ void onDiskPropagatesInterruption() throws InterruptedException { assertThrows(IllegalStateException.class, () -> subject.forEach(visitor)); } + @Test + @SuppressWarnings("unchecked") + void onDiskForEachParallel() throws InterruptedException { + withOnDiskSubject(); + willAnswer(invocation -> { + final var observer = invocation.getArgument(1, InterruptableConsumer.class); + observer.accept(Pair.of(SOME_KEY, onDiskStandIn)); + observer.accept(Pair.of(EntityNumVirtualKey.from(SOME_OTHER_NUM), onDiskStandIn)); + observer.accept(Pair.of(EntityNumVirtualKey.from(YET_ANOTHER_NUM), onDiskStandIn)); + return null; + }) + .given(onDiskAccounts) + .extractVirtualMapDataC(eq(getStaticThreadManager()), any(InterruptableConsumer.class), eq(32)); + + final var actual = new HashSet<>(); + subject.forEachParallel((num, account) -> actual.add(num)); + + assertEquals(SOME_ON_DISK_KEY_SET, actual); + } + + @Test + @SuppressWarnings("unchecked") + void onDiskPropagatesInterruptionC() throws InterruptedException { + withOnDiskSubject(); + willThrow(InterruptedException.class) + .given(onDiskAccounts) + .extractVirtualMapDataC(eq(getStaticThreadManager()), any(InterruptableConsumer.class), eq(32)); + assertThrows(IllegalStateException.class, () -> subject.forEachParallel(visitor)); + } + private void withInMemorySubject() { subject = AccountStorageAdapter.fromInMemory(MerkleMapLike.from(inMemoryAccounts)); } diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/EvmFnResultSerdeTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/EvmFnResultSerdeTest.java index 03e34c270a05..97cf2d5ae879 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/EvmFnResultSerdeTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/EvmFnResultSerdeTest.java @@ -16,9 +16,11 @@ package com.hedera.node.app.service.mono.state.submerkle; +import com.hedera.test.serde.EqualityType; import com.hedera.test.serde.SelfSerializableDataTest; import com.hedera.test.utils.SeededPropertySource; import java.util.Collections; +import edu.umd.cs.findbugs.annotations.NonNull; public class EvmFnResultSerdeTest extends SelfSerializableDataTest { public static final int NUM_TEST_CASES = 2 * MIN_TEST_CASES_PER_VERSION; @@ -34,7 +36,8 @@ protected int getNumTestCasesFor(final int version) { } @Override - protected EvmFnResult getExpectedObject(final int version, final int testCaseNo) { + protected EvmFnResult getExpectedObject( + final int version, final int testCaseNo, @NonNull final EqualityType equalityType) { final var seeded = SeededPropertySource.forSerdeTest(version, testCaseNo).nextEvmResult(); if (version < EvmFnResult.RELEASE_0250_VERSION) { diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/ExpirableTxnRecordSerdeTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/ExpirableTxnRecordSerdeTest.java index ce06b40e0963..37780c4c708b 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/ExpirableTxnRecordSerdeTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/ExpirableTxnRecordSerdeTest.java @@ -21,9 +21,11 @@ import static com.hedera.node.app.service.mono.state.submerkle.ExpirableTxnRecord.RELEASE_0280_VERSION; import static com.hedera.node.app.service.mono.state.submerkle.ExpirableTxnRecord.RELEASE_0340_VERSION; +import com.hedera.test.serde.EqualityType; import com.hedera.test.serde.SelfSerializableDataTest; import com.hedera.test.serde.SerializedForms; import com.hedera.test.utils.SeededPropertySource; +import edu.umd.cs.findbugs.annotations.NonNull; public class ExpirableTxnRecordSerdeTest extends SelfSerializableDataTest { public static final int NUM_TEST_CASES = 4 * MIN_TEST_CASES_PER_VERSION; @@ -44,7 +46,8 @@ protected byte[] getSerializedForm(final int version, final int testCaseNo) { } @Override - protected ExpirableTxnRecord getExpectedObject(final int version, final int testCaseNo) { + protected ExpirableTxnRecord getExpectedObject( + final int version, final int testCaseNo, @NonNull final EqualityType equalityType) { final var seeded = SeededPropertySource.forSerdeTest(version, testCaseNo).nextRecord(); if (version < RELEASE_0260_VERSION) { diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/FcAssessedCustomFeeSerdeTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/FcAssessedCustomFeeSerdeTest.java index cffae0f89b4c..b63d190e6f80 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/FcAssessedCustomFeeSerdeTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/FcAssessedCustomFeeSerdeTest.java @@ -16,8 +16,10 @@ package com.hedera.node.app.service.mono.state.submerkle; +import com.hedera.test.serde.EqualityType; import com.hedera.test.serde.SelfSerializableDataTest; import com.hedera.test.utils.SeededPropertySource; +import edu.umd.cs.findbugs.annotations.NonNull; public class FcAssessedCustomFeeSerdeTest extends SelfSerializableDataTest { @Override @@ -31,7 +33,8 @@ protected FcAssessedCustomFee getExpectedObject(final SeededPropertySource prope } @Override - protected FcAssessedCustomFee getExpectedObject(final int version, final int testCaseNo) { + protected FcAssessedCustomFee getExpectedObject( + final int version, final int testCaseNo, @NonNull final EqualityType equalityType) { final var result = super.getExpectedObject(version, testCaseNo); if (version < FcAssessedCustomFee.RELEASE_0171_VERSION) { // Need to drop the last field. diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/FcCustomFeeSerdeTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/FcCustomFeeSerdeTest.java index 08c0ef7ad6b4..73ee6b00704c 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/FcCustomFeeSerdeTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/submerkle/FcCustomFeeSerdeTest.java @@ -16,8 +16,11 @@ package com.hedera.node.app.service.mono.state.submerkle; +import com.hedera.node.app.service.mono.pbj.PbjConverter; +import com.hedera.test.serde.EqualityType; import com.hedera.test.serde.SelfSerializableDataTest; import com.hedera.test.utils.SeededPropertySource; +import edu.umd.cs.findbugs.annotations.NonNull; public class FcCustomFeeSerdeTest extends SelfSerializableDataTest { @Override @@ -31,13 +34,15 @@ protected FcCustomFee getExpectedObject(final SeededPropertySource propertySourc } @Override - protected FcCustomFee getExpectedObject(int version, int testCaseNo) { + protected FcCustomFee getExpectedObject(int version, int testCaseNo, @NonNull final EqualityType equalityType) { final var propertySource = SeededPropertySource.forSerdeTest(version, testCaseNo); final var nextFee = propertySource.nextCustomFee(); if (version < FcCustomFee.RELEASE_0310_VERSION) { nextFee.setAllCollectorsAreExempt(false); } - - return nextFee; + final var seededFee = nextFee; + final var pbjFee = PbjConverter.fromFcCustomFee(seededFee); + final var merkleFcCustomFee = FcCustomFee.fromGrpc(PbjConverter.fromPbj(pbjFee)); + return merkleFcCustomFee; } } diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/utils/UtilsConstructorTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/utils/UtilsConstructorTest.java index ba1227efc072..a1db843b6cc6 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/utils/UtilsConstructorTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/utils/UtilsConstructorTest.java @@ -58,6 +58,7 @@ import com.hedera.node.app.service.mono.state.migration.StakingInfoMapBuilder; import com.hedera.node.app.service.mono.state.migration.StateChildIndices; import com.hedera.node.app.service.mono.state.migration.StateVersions; +import com.hedera.node.app.service.mono.state.migration.TokenStateTranslator; import com.hedera.node.app.service.mono.state.serdes.IoUtils; import com.hedera.node.app.service.mono.state.virtual.IterableStorageUtils; import com.hedera.node.app.service.mono.state.virtual.KeyPackingUtils; @@ -99,6 +100,7 @@ class UtilsConstructorTest { private static final Set> toBeTested = new HashSet<>(Arrays.asList( EntityIoUtils.class, + TokenStateTranslator.class, OrderedComparison.class, RecordParsers.class, Units.class, diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/EqualityType.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/EqualityType.java new file mode 100644 index 000000000000..677872e2a463 --- /dev/null +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/EqualityType.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.test.serde; + +/** + * Used by the base {@link SelfSerializableDataTest} to signal to expected object providers what type of + * equality test will be used. + * + *

We added this so that when {@code OBJECT_EQUALITY} is being used, we can insert an extra step of + * converting the expected object to-and-from the corresponding PBJ type before returning it. + * (This is an efficient way to confirm our bidirectional converters are not changing object semantics.) + */ +public enum EqualityType { + /** + * A test is using object equality to compare expected and actual objects. + */ + OBJECT_EQUALITY, + /** + * A test is using serialized bytes equality to compare expected and actual objects. + */ + SERIALIZED_EQUALITY +} diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/SelfSerializableDataTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/SelfSerializableDataTest.java index e8d33a2b34c5..277d805fb780 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/SelfSerializableDataTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/SelfSerializableDataTest.java @@ -16,6 +16,8 @@ package com.hedera.test.serde; +import static com.hedera.test.serde.EqualityType.OBJECT_EQUALITY; +import static com.hedera.test.serde.EqualityType.SERIALIZED_EQUALITY; import static com.hedera.test.serde.SerializedForms.assertSameSerialization; import static com.hedera.test.utils.SerdeUtils.deserializeFromBytes; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -33,6 +35,7 @@ import com.swirlds.common.io.SerializableDet; import com.swirlds.common.io.Versioned; import com.swirlds.common.io.streams.SerializableDataInputStream; +import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.lang.reflect.Array; import java.lang.reflect.Constructor; @@ -127,6 +130,18 @@ protected byte[] getSerializedForm(final int version, final int testCaseNo) { return SerializedForms.loadForm(getType(), version, testCaseNo); } + /** + * Returns the expected object created with a given version for a given test case. + * + * @param version the parent version that created the expected object + * @param testCaseNo the zero-indexed number of test case for this version + * @param equalityType the type of equality test the expected object will be subject to + * @return the expected object + */ + protected T getExpectedObject(final int version, final int testCaseNo, @NonNull final EqualityType equalityType) { + return getExpectedObject(SeededPropertySource.forSerdeTest(version, testCaseNo)); + } + /** * Returns the expected object created with a given version for a given test case. * @@ -138,6 +153,17 @@ protected T getExpectedObject(final int version, final int testCaseNo) { return getExpectedObject(SeededPropertySource.forSerdeTest(version, testCaseNo)); } + /** + * Returns the expected object created with a given seeded property source. + * + * @param propertySource the property source to use + * @param equalityType the type of equality test the expected object will be subject to + * @return the expected object + */ + protected T getExpectedObject(final SeededPropertySource propertySource, @NonNull final EqualityType equalityType) { + return getExpectedObject(propertySource); + } + /** * Returns the expected object created with a given seeded property source. * @@ -160,7 +186,7 @@ static void setUpClass() { @ArgumentsSource(SupportedVersionsArgumentsProvider.class) void deserializationWorksForAllSupportedVersions(final int version, final int testCaseNo) { final var serializedForm = getSerializedForm(version, testCaseNo); - final var expectedObject = getExpectedObject(version, testCaseNo); + final var expectedObject = getExpectedObject(version, testCaseNo, OBJECT_EQUALITY); final T actualObject = deserializeFromBytes(() -> instantiate(getType()), version, serializedForm); @@ -173,7 +199,11 @@ void deserializationWorksForAllSupportedVersions(final int version, final int te @ParameterizedTest @ArgumentsSource(CurrentVersionArgumentsProvider.class) void serializationHasNoRegressionWithCurrentVersion(final int version, final int testCaseNo) { - assertSameSerialization(getType(), this::getExpectedObject, version, testCaseNo); + assertSameSerialization( + getType(), + propertySource -> getExpectedObject(propertySource, SERIALIZED_EQUALITY), + version, + testCaseNo); } @ParameterizedTest diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/SerializedForms.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/SerializedForms.java index 3a1fbf44d794..898308820add 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/SerializedForms.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/SerializedForms.java @@ -152,7 +152,7 @@ private static void assertSameCopySerialization( } private static void generateSerializedData() { - GENERATOR_MAPPING.get(EvmFnResult.class).run(); + GENERATOR_MAPPING.get(MerkleToken.class).run(); // for (var entry : GENERATOR_MAPPING.entrySet()) { // entry.getValue().run(); // } diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/VirtualValueDataTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/VirtualValueDataTest.java index 697186763a96..03df25d48a07 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/VirtualValueDataTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/VirtualValueDataTest.java @@ -16,6 +16,8 @@ package com.hedera.test.serde; +import static com.hedera.test.serde.EqualityType.OBJECT_EQUALITY; +import static com.hedera.test.serde.EqualityType.SERIALIZED_EQUALITY; import static com.hedera.test.serde.SerializedForms.assertSameBufferSerialization; import static com.hedera.test.utils.SerdeUtils.deserializeFromBuffer; import static org.junit.jupiter.api.Assertions.*; @@ -37,14 +39,18 @@ public abstract class VirtualValueDataTest extends SelfS @ParameterizedTest @ArgumentsSource(CurrentVersionArgumentsProvider.class) void bufferSerializationHasNoRegressionWithCurrentVersion(final int version, final int testCaseNo) { - assertSameBufferSerialization(getType(), this::getExpectedObject, version, testCaseNo); + assertSameBufferSerialization( + getType(), + propertySource -> this.getExpectedObject(propertySource, SERIALIZED_EQUALITY), + version, + testCaseNo); } @ParameterizedTest @ArgumentsSource(SupportedVersionsArgumentsProvider.class) void bufferDeserializationWorksForAllSupportedVersions(final int version, final int testCaseNo) { final var serializedForm = getSerializedForm(version, testCaseNo); - final var expectedObject = getExpectedObject(version, testCaseNo); + final var expectedObject = getExpectedObject(version, testCaseNo, OBJECT_EQUALITY); final T actualObject = deserializeFromBuffer(() -> instantiate(getType()), version, serializedForm); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/utils/SeededPropertySource.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/utils/SeededPropertySource.java index 622689392535..64cb756c1d4e 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/utils/SeededPropertySource.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/utils/SeededPropertySource.java @@ -164,7 +164,7 @@ public MerkleToken nextToken() { nextString(48), nextBoolean(), nextBoolean(), - nextEntityId(), + nextZeroShardZeroRealmEntityId(), nextInt()); seeded.setMemo(nextString(36)); seeded.setDeleted(nextBoolean()); @@ -183,6 +183,7 @@ public MerkleToken nextToken() { seeded.setFeeScheduleKey(nextNullableKey()); seeded.setPauseKey(nextNullableKey()); seeded.setKey(nextNum()); + seeded.setAutoRenewAccount(nextZeroShardZeroRealmEntityId()); return seeded; } @@ -1114,6 +1115,17 @@ public ContractNonceInfo nextContractNonce() { return new ContractNonceInfo(nextEntityId(), nextUnsignedLong()); } + /** + * Return an entity id with the default shard and realm of {@code 0} so that when using + * PBJ converters inside {@link com.hedera.test.serde.SelfSerializableDataTest} object + * providers, we don't lose information. + * + * @return an entity id in the default shard and realm + */ + public EntityId nextZeroShardZeroRealmEntityId() { + return new EntityId(0, 0, nextUnsignedLong()); + } + public RichInstant nextRichInstant() { return new RichInstant(nextUnsignedLong(), SEEDED_RANDOM.nextInt(1_000_000)); } diff --git a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn0.hex b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn0.hex index bdada62c4f42..124eab460d1f 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn0.hex +++ b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn0.hex @@ -1 +1 @@ -004a69dc08fd23a87d007eae587a36f1960700000018384a6d116c5a166f5f65256a3f2033582a1c2e48481978050000003047642b3a7a06283e240c76427e4f34695a3d483965593846390f3a215b5611505231176a1c313c3578754a1d4c0a7371f35ba643324efa37000000017a647082b12f216c22dc42fed6983e634ba5d74cfd91d6d0000000000249f8e40b1d100c0000000100000000000000020000000000ecf2ea0000000000000020391869184becff3b7620abce5e919819282d07142a287ba5913a610d2d65dd3f000100000000000000020000000000eefa560000000000000021f3e9b6b84596b3f6cb2fe056e87f5a4f78923792ce9accce4ee2deee0a12f5c29c0100000000000000020000000000ecb1f000000000000001c30000000200000000000000020000000000eefa560000000000000021c4f2f1bf8524a95fb0963cf6cd163d7d5bcd4cfbd52d1efa443bbcb33b7b441e1c00000000000000020000000000ecb1f0000000000000016e0000000200000000000000020000000000eefa560000000000000021a5d43cb8a6899953a8812c129306fa047dd2deb80e25ed99f21803e13ff70349e000000000000000020000000000ecb1f000000000000001190000000200000000000000020000000000ed33e4000000000000001870f7f1a54d64f7dd553d0b93429d72116c8ac1d4eccce77900000000000000020000000000ecb1f000000000000000cd0000000200000000000000020000000000ef843a00000000000000247ec560510ec048490d7ba024150f14c3a200dc91182682e9bd503726974db301afc29a8600000000000000020000000000ecb1f000000000000000750000000200000000000000020000000000eefa5600000000000000216604e6db6752c56ce9b1b10d86340ca22be73aab8c330dedd6cf7a372411364a5900000000000000020000000000ecf2ea00000000000000205eb7763d27924586c10d9ee0ce4814e1c320f27eaa42ec7915af4d09679669960000002436137c1d04716c43704f1208062a626249583e31684067662d113f3937360308201f507000000001000000000000000002f98c6b0cea42edbd2d13e7000000040100f65baa433940f13700000003040000000000000007000000000000004a00f35ba643324efa370000000174794dcc7c94188d1705bf4394e7cd2f60201eea95de0f4a0000010ba14acec87c09faf35ba643324efa370000000121fb91ed16a51aff22f1a9a94ee7cf36769a74f6bfb04e0ef35ba643324efa370000000163c6306dc26ff48e253c1d92aaa9ae1c259f5ac488fdd2cf010002000000000000000400000000000000090000000000000062000000000000006900f35ba643324efa370000000178b3197cc14a8f9e24b90e4d881803641f0aec34ee68c3cc0000020000000000000007000000000000002f000000000000001f000000000000005b00f35ba643324efa37000000010014770af2acbf1252116904e93908321545c3bbd319797701000aa917460001 \ No newline at end of file +004a69dc08fd23a87d01f35ba643324efa3700000001000000000000000000000000000000003bba64930a71141e4b0552b91357c0ac00000018384a6d116c5a166f5f65256a3f2033582a1c2e48481978050000003047642b3a7a06283e240c76427e4f34695a3d483965593846390f3a215b5611505231176a1c313c3578754a1d4c0a7371f35ba643324efa3700000001000000000000000000000000000000007a647082b12f216c000000000249f8e40b1d100c000000000100000000000000020000000000ecb1f000000000000001c30000000200000000000000020000000000eefa560000000000000021c4f2f1bf8524a95fb0963cf6cd163d7d5bcd4cfbd52d1efa443bbcb33b7b441e1c00000000000000020000000000ecb1f0000000000000016e0000000200000000000000020000000000eefa560000000000000021a5d43cb8a6899953a8812c129306fa047dd2deb80e25ed99f21803e13ff70349e000000000000000020000000000ecb1f000000000000001190000000200000000000000020000000000ed33e4000000000000001870f7f1a54d64f7dd553d0b93429d72116c8ac1d4eccce77900000000000000020000000000ecb1f000000000000000cd0000000200000000000000020000000000ef843a00000000000000247ec560510ec048490d7ba024150f14c3a200dc91182682e9bd503726974db301afc29a8600000000000000020000000000ecb1f000000000000000750000000200000000000000020000000000eefa5600000000000000216604e6db6752c56ce9b1b10d86340ca22be73aab8c330dedd6cf7a372411364a5900000000000000020000000000ecf2ea00000000000000205eb7763d27924586c10d9ee0ce4814e1c320f27eaa42ec7915af4d0967966996000100000000000000020000000000eefa560000000000000021f3e9b6b84596b3f6cb2fe056e87f5a4f78923792ce9accce4ee2deee0a12f5c29c00000024014736137c1d04716c43704f1208062a626249583e31684067662d113f3937360308201f00000001000000000000000007e80f1f1dfff6258c348c1c000000050100f65baa433940f13700000003040000000000000004000000000000000600f35ba643324efa37000000010611a4517c6736320d652748c855d96174794dcc7c94188d01000200000000000000140000000000000030000000000000002d000000000000007c00f35ba643324efa3700000001769a74f6bfb04e0e63c6306dc26ff48e253c1d92aaa9ae1c000002000000000000001200000000000000630000000000000006000000000000006800f35ba643324efa3700000001567733ea4cc9db8d78b3197cc14a8f9e24b90e4d88180364010001275ae57fb75e95d2f35ba643324efa370000000158dbe5775d1d5b3712cad964a83e85883075b2079c71e3a9f35ba643324efa370000000151b4ab7ac042d2332d8c76906bda62170014770af2acbf12010004000000000000000e000000000000005900f35ba643324efa37000000017e6bfac185abe6c30cea42edbd2d13e7284d41e782cc28870100cc988a860100000000000000020000000000eefa5600000000000000214737d309367ecc8a5e5115fc41c1f8e6301884b7f1a5b513c3e7d3d661c1d7fe9301 \ No newline at end of file diff --git a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn1.hex b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn1.hex index 3945093f8a29..87ef04695b48 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn1.hex +++ b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn1.hex @@ -1 +1 @@ -010c624c9d1c9c6de10043ee9a556d46aca1000000185c1421743d6a25502f052d1d1e1a2b67205428676c041518000000301f131454073872774059617d2975546d5f11317d2d26385c7159020668161e3f4a0553343261680d7e6838204809335af35ba643324efa370000000104ab775a8676745055548080827bdaba3d936527f978a3680000000002477a637649afcb0100000100000000000000020000000000ecb1f000000000000000c50000000200000000000000020000000000eefa56000000000000002141bf91fcfde7d9d23804b3be7328a03b54e3839c93fb22049010c601954a80863b00000000000000020000000000ecb1f000000000000000700000000200000000000000020000000000ef843a0000000000000024367fbd9e40dcbbb3341859894376ff8cf38345f3c2bc4a7088919d01e1bf321263fa25c400000000000000020000000000ed33e400000000000000185e35fcc05d078c2650c9a3305bf782781f96472e9b740c410100000000000000020000000000ecf2ea0000000000000020e918c7ae45a2879fa107237dae3eb0ff8d8fc9aa71072096075502255692a9cd0100000000000000020000000000ef843a00000000000000247d212a7e03a5ac00684f4a20bf8cf6ec6a90d92b24675e1f75b6b59743440e185fdeba3c0100000000000000020000000000eefa560000000000000021faeafab176ae35ae537f805e94a5fd029f7d79f51efebd1178fd52cbb0c75a6640000000246d0d16117a1d2170082e7e066a2b2c197b145a38444a7d1d3a30681e4212351d204c1f4600000000000000000000000003f896d84bd1f134fea79495000000060100f65baa433940f13700000003016af7fd9a584fcca1f35ba643324efa370000000157338057bc7c5bbb56b82335a5b9d9dc325f9b7802baa711f35ba643324efa370000000151e178ca7b3d5db370da038bfaf3fb797034a9f008dfc2b00100020000000000000043000000000000005c0000000000000029000000000000007701f35ba643324efa37000000014c142aa63dd4a4946aa13f8366c2cc8b676d735e661bb7e5000002000000000000000200000000000000250000000000000057000000000000007d00f35ba643324efa370000000140e9563e5f3b083478ca3939e4c1d9d275dca369d2730f20010002000000000000000d00000000000000150000000000000022000000000000005d01f35ba643324efa37000000015a713141ae2e59257fd9b5bcc1d748196ac87c4419952d090000012c937e71d7392324f35ba643324efa37000000017c12e9459bf245b812c3c9e0d9f448c07fb4e0c926140b1ef35ba643324efa37000000013d6aba5ce80bf70f79f7937cf886c5d9502c8f689514f14d000004000000000000004c000000000000005000f35ba643324efa37000000011fcdb4924ba44cc0224a24ced29eeefa2697a890e4940343010100000000000000020000000000eefa5600000000000000212867aa1ff3b57cc1d6a805edc8fe259f9db65f3825d8bf0ba9c3c4a8ab46b89bfcfc8715fe0100000000000000020000000000ef843a00000000000000247419483a16c6458e4189f9de7ff4369a9e9f4f4d7a0174152ace122be4ac59270af4408201 \ No newline at end of file +000c624c9d1c9c6de101f35ba643324efa3700000001000000000000000000000000000000007702013e98caa6857ce5a15f1bb17675000000185c1421743d6a25502f052d1d1e1a2b67205428676c041518000000301f131454073872774059617d2975546d5f11317d2d26385c7159020668161e3f4a0553343261680d7e6838204809335af35ba643324efa37000000010000000000000000000000000000000004ab775a867674500000000002477a637649afcb0100000100000000000000020000000000ecf2ea00000000000000209010c601954a80863bc97939d62a9634f874703f0f9a9c0ec9c443ec40e70559000100000000000000020000000000ef843a000000000000002473cd1f0fa2983d81072da726792c879e537f805e94a5fd029f7d79f51efebd1178fd52cb0100000000000000020000000000ecb1f000000000000000750000000200000000000000020000000000eefa56000000000000002188919d01e1bf321263fa25c4a6a1200a1a28df4c64c3f00e4d180fba80f96bbcf100000000000000020000000000ecf2ea0000000000000020721698bce988b6040660ed4cb35f160ae918c7ae45a2879fa107237dae3eb0ff0000002448306d0d16117a1d2170082e7e066a2b2c197b145a38444a7d1d3a30681e4212351d204c00000001000000010000000005b0193a2711ebd8e0e5cfc2000000070100f65baa433940f13700000003040000000000000007000000000000004400f35ba643324efa370000000157338057bc7c5bbb56b82335a5b9d9dc325f9b7802baa7110000040000000000000005000000000000000c00f35ba643324efa3700000001166e62d9e141c3b125e7f35632378f460f291d4a98493ee40000014c142aa63dd4a494f35ba643324efa37000000016aa13f8366c2cc8b676d735e661bb7e52c2d429b249f6a93f35ba643324efa37000000017dbb97cbd1444d2852709ec36ff3ffeb2d830764b6f8adf0010002000000000000000200000000000000540000000000000049000000000000007401f35ba643324efa37000000017a9b888271c8a97c5b2d7effac47048a70a023303c74f481010002000000000000002300000000000000460000000000000042000000000000009100f35ba643324efa37000000012a1a8b21f171406e2c937e71d73923247c12e9459bf245b80100013d6aba5ce80bf70ff35ba643324efa370000000179f7937cf886c5d9502c8f689514f14d27a3e7a7e5318f0ef35ba643324efa37000000011e38cb21ed1462593b1b050c2663cdb5356f8b1df535a357010004000000000000001d000000000000003400f35ba643324efa370000000143ee9a556d46aca16537bd809c44b3251097241a413e561a0100337e9b810100000000000000020000000000ef843a00000000000000247d212a7e03a5ac00684f4a20bf8cf6ec6a90d92b24675e1f75b6b59743440e185fdeba3c01 \ No newline at end of file diff --git a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn2.hex b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn2.hex index 2f67ab116bd2..3292f997f981 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn2.hex +++ b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn2.hex @@ -1 +1 @@ -0143e0ce337a5f8b7a005976f93aceacd03e0000001852384a657b273477504c39581160106d4e5c08787c16333d000000307a5b6b21046b6a5c050a0f174f707b776a4349457d415479197048437241086a690b7d655d5864167e11415c550c2140f35ba643324efa37000000013cb1dc6d1f4b170c1021f7a692efd9046a2f2c45df8d30010000000000fb5034767f2d4301000100000000000000020000000000eefa56000000000000002109327272bfa85407f79a4774c58fe83aa7d49b9a091eeb2a8e65dc4ba89ac3f59f00000100000000000000020000000000ef843a00000000000000243f7ed588fc966c6d2ac95805bfd2578f488d209d6a38c1e70168fcbb4e707a77648fb2e60100000000000000020000000000eefa560000000000000021525ee6c2c8e4058219c56689f967d0ad4a1584a6077e690acfeea6b1d77ed001c7000000242a400e167559317a041b4b0a3240056d2c1f012c6b5c26222e08086a083e444a165c5028000000010000000100000000049000203b3fdff38a36336e000000070100f65baa433940f13700000003040000000000000003000000000000001b00f35ba643324efa370000000146f769fb5cb47a1c11b0ac7253a9b0db4e62b9ff6bff4da50000040000000000000020000000000000004400f35ba643324efa3700000001062b2be3a29009a10710e1848f2287da72a8e9c17cb4991c0000040000000000000029000000000000003d00f35ba643324efa37000000010518dd446a4f26eb2e714e11289837502823a23c1c92dfd900000200000000000000120000000000000021000000000000003d000000000000009100f35ba643324efa37000000011db9ff36aa6aa0f60263d1baf2f37f06416a45b222f6711500000400000000000000090000000000000033010000000000046af8f35ba643324efa3700000001591b60ea4d41b29341bbb14766a6d8f74370e69e903d55abf35ba643324efa3700000001626d67f9a8c26074144b3d803862df9f77bd759e1d2de6360100011f8febc1c810efe1f35ba643324efa37000000015473f56ef434cbe1372cae0e44e11d22698e0807dd8fefadf35ba643324efa37000000015d92ee7827f68e1a6bad09f7694560fe68c1bebd7122b20300000169455285a2e38d68f35ba643324efa370000000113aa7ffd7bcfb2f82c1919f3bc0facde39157d0579f66b46f35ba643324efa37000000010a170486bedaa13c6cada4fdf756bbc97136c2f0707bfed8000066f5c6ee0100000000000000020000000000ef843a000000000000002474f83dd866af5a5747fe4110bbd0e4c8c0b78cf5b08b453c9df86672f4cc1271252c11c500 \ No newline at end of file +0043e0ce337a5f8b7a01f35ba643324efa3700000001000000000000000000000000000000001fe40573d3df6a325da0cb1d40c43adb0000001852384a657b273477504c39581160106d4e5c08787c16333d000000307a5b6b21046b6a5c050a0f174f707b776a4349457d415479197048437241086a690b7d655d5864167e11415c550c2140f35ba643324efa3700000001000000000000000000000000000000003cb1dc6d1f4b170c0000000000fb5034767f2d4301000100000000000000020000000000eefa5600000000000000214a9bfed7fe73c59ccc42063171b6a579e03e637c98845a80460caa66f7d158c7fd000000000000002451042a400e167559317a041b4b0a3240056d2c1f012c6b5c26222e08086a083e444a165c00000001000000010000000001a393ca4eed358de0c48721000000000100000000000000020000000000ecf2ea00000000000000206a85d83d939f8ddcc6ad53f64963d44f38f87fa3e68720f6d64d9ed488ba310a44b682150001 \ No newline at end of file diff --git a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn3.hex b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn3.hex index 3c27d1c73ca7..347978fcbb67 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn3.hex +++ b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn3.hex @@ -1 +1 @@ -003c2e13936a0739fc0035225aadca257bd2000000183943665362392360521f6b2c1722006005201677685a5366000000301b57196a3e43507a510764172331776f52322c404152196e53461402203d43004d450c643f0f5371267768485b651c60f35ba643324efa37000000012af5e2634e508003683cf24ec43ff2326db653437f7e2fcf0000000003afb2984a5add7e00000100000000000000020000000000eefa5600000000000000218346eeaea086679bc9cd032ad259ae79182de668a201bda3fbdb7ae27c120d0a180100000000000000020000000000ef843a00000000000000243f604ebc5940b9f56ccd408405afaffefbca69a3673c178b6ad8a11b8ff1eed4fed3122000000100000000000000020000000000ecb1f000000000000000750000000200000000000000020000000000eefa560000000000000021dfb7e35492eb83eccbdc128bf435a1247a099ce32182f387cd22db2eca1cb433a500000000000000020000000000ecf2ea0000000000000020f509d6c0ecc21b3c7addcc2aba5890a6b1bb96e6c79a1e86af073e99a397d8be000000243d3a70234108652433483422710a0a0c67443a75382746163b573978602a40544a31623d00000001000000010000000004f48f1756ed05f07c870772000000030100f65baa433940f137000000030200000000000000040000000000000030000000000000004a000000000000006601f35ba643324efa37000000010fe544bb5f66de09699da8fd332b5ea845fedd1cb24c970400000171030dae545dbcf6f35ba643324efa37000000017bc0e83c3e8e128a61e83eea8c6c7b0228629d252238494ef35ba643324efa370000000128ff8b34db1cd49b6ce6ed2bd8dd0fab54c3bd80910dc327000002000000000000000500000000000000360000000000000027000000000000007f01f35ba643324efa3700000001302ceb068e8b8d3a62f66124e878fe1d6199ad8c5ddaaf300000a50b00d10001 \ No newline at end of file +003c2e13936a0739fc01f35ba643324efa37000000010000000000000000000000000000000030dd592e7ab378a46a7778c78dd0ec35000000183943665362392360521f6b2c1722006005201677685a5366000000301b57196a3e43507a510764172331776f52322c404152196e53461402203d43004d450c643f0f5371267768485b651c60f35ba643324efa3700000001000000000000000000000000000000002af5e2634e5080030000000003afb2984a5add7e00000100000000000000020000000000ef843a000000000000002443f9c110f1ce04bd19da0e65176d916645cc214a1c94a90ff509d6c0ecc21b3c7addcc2a0100000000000000020000000000ef843a000000000000002453fd71f315cbe9c84435cb32bc68d6644dbb086356460de7c8cf5b7a1dfdc9da3afc1b180100000000000000020000000000ed33e400000000000000187ebfd71b51d3333a515feb1c3aade16b55da68a7f4d425c7000100000000000000020000000000eefa560000000000000021266d226619647982b83fc711e0e3755e4a7acc5a6eebd883a4a909df2a505d79480000002405523d3a70234108652433483422710a0a0c67443a75382746163b573978602a40544a3100000000000000000000000007abecc72d6f1cac6da5e55f000000050100f65baa433940f1370000000302000000000000000a0000000000000019000000000000002f000000000000008c00f35ba643324efa37000000017cbf937872e4af4b5f3c3251f4976fc80fe544bb5f66de090000040000000000000044000000000000004f00f35ba643324efa37000000017bc0e83c3e8e128a61e83eea8c6c7b0228629d252238494e010004000000000000000e000000000000002301000000000003edadf35ba643324efa37000000012deb773f608544b907c3dfde5bc7eb9267517d78a6135955f35ba643324efa3700000001205fcd8fcfbdf7d0302ceb068e8b8d3a62f66124e878fe1d010002000000000000003000000000000000540000000000000051000000000000009e01f35ba643324efa3700000001727c149cd643a2a174fd69ebad9ef4b64f2354b63e2de05601000200000000000000080000000000000009000000000000005e00000000000000ab00f35ba643324efa37000000013f604ebc5940b9f56ccd408405afaffe02baafbc34b63b4500000c71d3920100000000000000020000000000ed33e400000000000000187741b125f34457580f0d47b4c1c43f2c0aeebd9200ae953700 \ No newline at end of file diff --git a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn4.hex b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn4.hex index cb1f36add5dd..2ed3cab454c0 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn4.hex +++ b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn4.hex @@ -1 +1 @@ -00367ee2743a70b37700593477bd778d1ca100000018352b1d0809612857554356411d2c5f73134d0d7c665b29060000003037284675041219682e1274013c1d5b3174733e142333230965550b6b662f3b092b0477412f1406542007293f65792a53f35ba643324efa37000000013811fd2f13e690a36be9a7cf10c492545458b1f3f72470a90000000003bc86ce5eabcdc1010100000100000000000000020000000000ed33e400000000000000187c93924b50cd9f286995a1b993b46ea416fea308f547e5db0100000000000000020000000000ed33e40000000000000018189b7ca9413eccac47df1e3ea6054b6b29614372358f4f820000000024665c10733319777d560437240c5c35291d215a110b72570a443f5b6d0d7019373d11550300000000000000010000000005ff84b44986b07183048fbb000000030100f65baa433940f1370000000301136609867d205b12f35ba643324efa370000000102da44102ba6919963c73e9f164b3a7612007fd2d3e31e19f35ba643324efa3700000001792ad126cdd7662d4e31993c4d9dcd281673113dd34195bf0000011fa21eaebd213753f35ba643324efa3700000001029c3eba7ba98a19002710c095c50b1d4c9e59d2ced0b6b9f35ba643324efa370000000154c6411e5497eca10d37e206ef07832e4a903478222543e900000144cafd56a124e2adf35ba643324efa370000000163048aacea9ab001429eaa2f35f02d01327553dbd2782439f35ba643324efa370000000163aca7ad1cba8e8530ea78976a86880c3318299805f8a83d000100000000000000020000000000ecf2ea0000000000000020465ea31230c1cda14e9c2c76172fea98b556e5797e37dd5a4b5d45a558fb99bbe5f6f9c10100000000000000020000000000ef843a000000000000002408c12525d5cdda9f3d851692012d46468187e2a9ccab12b9974600c0e82908b8d270457301 \ No newline at end of file +00367ee2743a70b37701f35ba643324efa3700000001000000000000000000000000000000005dc6078525de9cc20cfcceac1986d0eb00000018352b1d0809612857554356411d2c5f73134d0d7c665b29060000003037284675041219682e1274013c1d5b3174733e142333230965550b6b662f3b092b0477412f1406542007293f65792a53f35ba643324efa3700000001000000000000000000000000000000003811fd2f13e690a30000000003bc86ce5eabcdc10101000100000000000000020000000000ecf2ea0000000000000020c86e54a66b041704df2dc9886d565c9ab914a9b9d348e65c35bbae64ede1639900000100000000000000020000000000ecb1f000000000000000c50000000200000000000000020000000000ecb1f000000000000000700000000200000000000000020000000000ed33e4000000000000001827484ce24fda33924536afa4be3fc2343eb7d830ccd7b43400000000000000020000000000ef843a00000000000000243a67cc2a0dc26f1111278aea4185294d7bd8c8cc4d00736d4bdd290bc00e86e4c75d9a6a00000000000000020000000000eefa5600000000000000211d36ac66f14d816c4c7a9261428851384547b254c3cccd2a8caec07090b165462c000000245078665c10733319777d560437240c5c35291d215a110b72570a443f5b6d0d7019373d1100000000000000000000000005da962108fa19146a55a3fa000000080100f65baa433940f1370000000302000000000000004a00000000000000550000000000000035000000000000007100f35ba643324efa370000000112007fd2d3e31e19792ad126cdd7662d4e31993c4d9dcd28000001347a1e1c7c4f30acf35ba643324efa37000000011fa21eaebd213753029c3eba7ba98a19002710c095c50b1df35ba643324efa37000000014c9e59d2ced0b6b954c6411e5497eca10d37e206ef07832e0000010af4328160a60c64f35ba643324efa370000000144cafd56a124e2ad63048aacea9ab001429eaa2f35f02d01f35ba643324efa3700000001327553dbd278243963aca7ad1cba8e8530ea78976a86880c000002000000000000003e00000000000000430000000000000053000000000000009700f35ba643324efa370000000108bbf68ba254b06d475982789c0c666f0dd1e24e0829b4fe0000040000000000000019000000000000003800f35ba643324efa37000000016e51fee13924d0c86c4a4287921e776634d6644ff6a9f8250000040000000000000010000000000000001e00f35ba643324efa370000000150e6e0980951af234c75178bbb164e272d6e9bbf3cf2ab5a0000040000000000000016000000000000001e00f35ba643324efa37000000014454132ef916e1d45c8955e654f143c05c0414f46000234b01000101de0c3874c5a79df35ba643324efa3700000001441db84d376461f51f185fa6eac006750d8db557d2941297f35ba643324efa37000000015cfe5f60dcef96e3705d4acc7a8fbeef39b5f26b193c9fd4010100000000000000020000000000ecf2ea00000000000000209a7b76506981881378d026ae913dd202ab465c4773d2325b3eec0a3d76ded1694077d9eb0000 \ No newline at end of file diff --git a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn5.hex b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn5.hex index 95ba06d2f130..deac3a9d48be 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn5.hex +++ b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn5.hex @@ -1 +1 @@ -013e7a6e1457d6be26007b15de9c1e51f2ef000000186c3c353472486827534d2915694f1a33594a5401204e135a00000030730d5e3e0d65622d23182d161b6e6c5f6d1f6a027d0f13785b68020e2c682f705a4b195a3e5f1344651b243855063e73f35ba643324efa3700000001183d819796b68f592bf1187bcc2b9c090cededc1a6c1040900000000054f3af00e6a60ce01000100000000000000020000000000eefa560000000000000021f174954baccd0be92808401fcc394a023b4292a258b6eacbcea3e2aa82a5f233e20100000000000000020000000000ecb1f000000000000000c90000000200000000000000020000000000ecb1f000000000000000750000000200000000000000020000000000eefa5600000000000000210368e9df11051d9f268b593435ba3aa9c9d0ab2d0ed79034a9fe627da41bb7d2e400000000000000020000000000ecf2ea00000000000000204e4ff0fd05108e466eca045f09f1f3bcc4d63c1c8c5fe6d2873bbb3afa41578f00000000000000020000000000ecf2ea00000000000000204d8e30709f4a6f24a028157df215def2a258e9901cc6c2156eb2967d554d34a10100000000000000020000000000ecf2ea0000000000000020d2d9b69c7aadf07780e359cc6c49bf0b1ba1aef3a492929a6ae7d8ccedbc0a600100000000000000020000000000ef843a000000000000002447076320d8431c495ba6b6450813ad74c0f7b046fc2bde773b976247072a0226ac3423b40000000024251f2d0642697762215f5e53740b013a1b5f545d4b6412505b6d6f03414e6a361076050e000000000000000100000000057a933d2627334c79268b1f000000010100f65baa433940f13700000003040000000000000005000000000000001600f35ba643324efa37000000013facf6976cee4fa3618dbb7893df4b4d1e793eabdfbe62ff010100000000000000020000000000ed33e4000000000000001872c20fb28c6c3f61718427166e2f98283c71736807b3b084777db2660100000000000000020000000000ecb1f000000000000000c10000000200000000000000020000000000ecb1f0000000000000006c0000000200000000000000020000000000ecf2ea0000000000000020783078668b68bfcd94339b624df1e755b8c6997801d878722d83b0bf75583bc600000000000000020000000000ed33e40000000000000018145f0ae43f86264313f614386056776e2ab0089fe73a53a700000000000000020000000000eefa560000000000000021e7cc644364bda1cecd79dff48cab97b0a6457dd3adc123a978b82c0871081b4b8901 \ No newline at end of file +003e7a6e1457d6be2601f35ba643324efa3700000001000000000000000000000000000000003d1359efe5b6ac8d7b15de9c1e51f2ef000000186c3c353472486827534d2915694f1a33594a5401204e135a00000030730d5e3e0d65622d23182d161b6e6c5f6d1f6a027d0f13785b68020e2c682f705a4b195a3e5f1344651b243855063e73f35ba643324efa370000000100000000000000000000000000000000183d819796b68f5900000000054f3af00e6a60ce01000100000000000000020000000000eefa560000000000000021f174954baccd0be92808401fcc394a023b4292a258b6eacbcea3e2aa82a5f233e20100000000000000020000000000ecb1f000000000000000c90000000200000000000000020000000000ecb1f000000000000000750000000200000000000000020000000000eefa5600000000000000210368e9df11051d9f268b593435ba3aa9c9d0ab2d0ed79034a9fe627da41bb7d2e400000000000000020000000000ecf2ea00000000000000204e4ff0fd05108e466eca045f09f1f3bcc4d63c1c8c5fe6d2873bbb3afa41578f00000000000000020000000000ecf2ea00000000000000204d8e30709f4a6f24a028157df215def2a258e9901cc6c2156eb2967d554d34a10100000000000000020000000000ecf2ea0000000000000020d2d9b69c7aadf07780e359cc6c49bf0b1ba1aef3a492929a6ae7d8ccedbc0a600100000000000000020000000000ef843a000000000000002447076320d8431c495ba6b6450813ad74c0f7b046fc2bde773b976247072a0226ac3423b40000000024140b251f2d0642697762215f5e53740b013a1b5f545d4b6412505b6d6f03414e6a361076000000000000000100000000057a933d2627334c79268b1f000000010100f65baa433940f13700000003020000000000000015000000000000002b0000000000000015000000000000003701f35ba643324efa37000000013facf6976cee4fa3618dbb7893df4b4d1e793eabdfbe62ff010100000000000000020000000000ed33e4000000000000001872c20fb28c6c3f61718427166e2f98283c71736807b3b084777db2660100000000000000020000000000ecb1f000000000000000c10000000200000000000000020000000000ecb1f0000000000000006c0000000200000000000000020000000000ecf2ea0000000000000020783078668b68bfcd94339b624df1e755b8c6997801d878722d83b0bf75583bc600000000000000020000000000ed33e40000000000000018145f0ae43f86264313f614386056776e2ab0089fe73a53a700000000000000020000000000eefa560000000000000021e7cc644364bda1cecd79dff48cab97b0a6457dd3adc123a978b82c0871081b4b8901 \ No newline at end of file diff --git a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn6.hex b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn6.hex index 5703ed81c8d2..e2991c31220c 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn6.hex +++ b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn6.hex @@ -1 +1 @@ -01660e8c14b677f21000214ed59836f5beb4000000185e2c094a7058323c23515c3a4e723141622973586e1658170000003047234b25162f6236014d5669780e373731214b6b231c6347267a67226f0b5861385f750a6d781c6161031167402e2012f35ba643324efa3700000001504bf2038ea2adab4b6b9f32961b32e37e3b3aebaaabcd4e00000000045c39882700d6dd00010100000000000000020000000000ecf2ea0000000000000020840d13f5f417ae77d6df1be860ac2545122d008b19f3ceec873135f901facf070100000000000000020000000000ecb1f0000000000000007c0000000200000000000000020000000000ef843a000000000000002465a4f6fc3d6bd27b1eb5b08222760223db6e3d5363a4e876eeb5ce97897762555609ff6200000000000000020000000000ef843a0000000000000024014cec45e4464b61048fb61aa56822140af862063ed4efba24c8c9281005caf53dfed779000000000000240970271523114c694242714d1e11331b27226e554a5e530d5c5e4a6f30692f2e7e5678430000000000000000000000000534ce666c65483a539ea033000000000100000000000000020000000000ecb1f0000000000000006d0000000200000000000000020000000000eefa5600000000000000216dfdbaec0fd9b82312aa2273bd2c70f7eb9ca244f7d53f25f36e5af0f8e6da801800000000000000020000000000ed33e40000000000000018269cf10bd05345a649fbe7c4175ef9f6061fc5b9fbab38ec5df249890100000000000000020000000000ed33e400000000000000187051dd1cb1e9eeed3cc82328af60a3790ef915237331c22a01 \ No newline at end of file +00660e8c14b677f21001f35ba643324efa37000000010000000000000000000000000000000021d534b310bc2d5c7d0be6ad4dcd646c000000185e2c094a7058323c23515c3a4e723141622973586e1658170000003047234b25162f6236014d5669780e373731214b6b231c6347267a67226f0b5861385f750a6d781c6161031167402e2012f35ba643324efa370000000100000000000000000000000000000000504bf2038ea2adab00000000045c39882700d6dd0001000100000000000000020000000000ed33e400000000000000184ad89494b48d61f7038085f02f9f88de2efaf7cbb9a37bd200000000000024545c0970271523114c694242714d1e11331b27226e554a5e530d5c5e4a6f30692f2e7e5600000000000000010000000004a3a61f27a3ddafd7b445e4000000070100f65baa433940f1370000000302000000000000001400000000000000160000000000000031000000000000003901f35ba643324efa37000000010d7a645af78397de2245fd06ecfa5f1f5a6e903506288b4c0100012292d630740defebf35ba643324efa37000000017667798cc580168903e7fd00fc9a98c3306e66da7914acccf35ba643324efa37000000012a4d1b4cbdb940ef0cc8bf2dad17bde765a4f6fc3d6bd27b0000040000000000000010000000000000001600f35ba643324efa3700000001715e67f5030ff91e014cec45e4464b61048fb61aa56822140000017ae502881464e412f35ba643324efa37000000014683e013bcebff1e6254cb832a669d13076cd5820afec8f6f35ba643324efa37000000012d9e3fb4c42927c07cd38ab5397794ba27301fdd7977dd030100040000000000000011000000000000001c01000000000003ff72f35ba643324efa37000000017497b21a66e098a7269cf10bd05345a649fbe7c4175ef9f6f35ba643324efa3700000001061fc5b9fbab38ec359bf50accd84ae821fccf2a28012f7c000002000000000000001300000000000000250000000000000018000000000000007b01f35ba643324efa37000000017dfcfc8b1d2e1ab864b2467fa3234f2a3822f684890a0acd0100040000000000000001000000000000000200f35ba643324efa3700000001049f75c411e7c91f1e60f6305db51726647f2bb3263712d5000100000000000000020000000000ed33e400000000000000186f8567cf2e5268aa16920c76130f15f01c6956a44637af6c8bb52caf0000 \ No newline at end of file diff --git a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn7.hex b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn7.hex index c937c2953de5..dae87724ca66 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn7.hex +++ b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn7.hex @@ -1 +1 @@ -0024739b174a8b7fd5000208cf283d343c7900000018446a11253b6f551f3b492e02670d02265e50560c15160a7500000030260f6e221e5a7d0831465e5a3743466708393364403e0918050e30245921682a0b1f0f671f1b241249734c4d10215252f35ba643324efa37000000015e7306d8bccbdf92781de46942993ea6563d3beba720d2a70000000000d3c04b13bf4eb30101000100000000000000020000000000ecf2ea000000000000002035dbc1453d385b9bee0f63b8a8e4e5a3bfb1111ff6e496a91afa4d85f5bc9a910100000000000000020000000000ecf2ea0000000000000020081d530bf965a64bd73dd4b914071fc26c87db66e237e186e7d6fadb07da01ae0100000000000000020000000000ecb1f000000000000001c30000000200000000000000020000000000eefa560000000000000021732d321ed1b7de663e1ced660109ff89a01e58e73b76c1687e7b0f96721e655d5b00000000000000020000000000ecb1f0000000000000016e0000000200000000000000020000000000eefa56000000000000002166889dbb6db2c24c0d09f3af846a17a117cfaaea5839c0a782381b7770e2c770c000000000000000020000000000ecb1f000000000000001190000000200000000000000020000000000ef843a0000000000000024270cd4f5c33659ac21297a9ba9e6cb1e49f8ece46f4e1729c69c96f9dd3df69021d42c5b00000000000000020000000000ecb1f000000000000000c10000000200000000000000020000000000ecf2ea000000000000002077acfaa917960e952478413bc7c50a38c9cb476f123702a245b9d5973cafcc3e00000000000000020000000000ecb1f0000000000000006d0000000200000000000000020000000000eefa5600000000000000212e6708fa2184461644d7ab729e56b0c3f863a11ed06432f8a64dee10c2d0f9cabb00000000000000020000000000ed33e4000000000000001830d6a5f9247c322828dec9ce68de57b04d84f4725830db41000000002479647a482148682650332370617a63271a1e180c144f46506850294f025500564b182904000000000000000000000000047269ef300cd26c0796930b000000040100f65baa433940f13700000003013d8609f17e9988eef35ba643324efa37000000010946853f3814a8c17d251065d4a58c0620b9ea9168b7c89cf35ba643324efa370000000159bd7675a24201656816c2baa77bb30c258c207dc684f1530100010159873b9ac9606df35ba643324efa3700000001398b461be12a81cf671679c8f13be2e609b054c86d86fc99f35ba643324efa37000000015e17c3a444d070447679f4af6768ad5367bf5a8fb7bb8fe8000001348a0954c20c580df35ba643324efa37000000011a981222dccc4a2071f15756559fcec55f362e10a6a5dd45f35ba643324efa37000000015495db943bb26c782b2f82397d94742224fa71b34cf50ce9000002000000000000004f00000000000000580000000000000024000000000000002d00f35ba643324efa37000000011add33353d4c5d816deebc0ac46e00d4723353bf3dce27f600001604f4b20000 \ No newline at end of file +0024739b174a8b7fd501f35ba643324efa3700000001000000000000000000000000000000002b16eebc90fa338c527ba65eaaa6b65500000018446a11253b6f551f3b492e02670d02265e50560c15160a7500000030260f6e221e5a7d0831465e5a3743466708393364403e0918050e30245921682a0b1f0f671f1b241249734c4d10215252f35ba643324efa3700000001000000000000000000000000000000005e7306d8bccbdf920000000000d3c04b13bf4eb301010100000000000000020000000000eefa56000000000000002161afbcd19c93bd5183b661b0e4e8099b2dcefde55d7dddd00bb13984a459d19a6b0100000000000000020000000000eefa560000000000000021c89bda27718c6a2ebd4518545aa26b380609b351efaf0d46de5896b3c305ff79c800000000000024126579647a482148682650332370617a63271a1e180c144f46506850294f025500564b180000000100000001000000000673481e7c1932680f50b1fc000000090100f65baa433940f1370000000301167cca5ae2f4b043f35ba643324efa3700000001342dcce399a826943d8609f17e9988ee0946853f3814a8c1f35ba643324efa37000000017d251065d4a58c0620b9ea9168b7c89c59bd7675a2420165000004000000000000001400000000000000280100000000000afa4ef35ba643324efa3700000001671679c8f13be2e609b054c86d86fc995e17c3a444d07044f35ba643324efa37000000017679f4af6768ad5367bf5a8fb7bb8fe83a54ae34f6643c410000040000000000000028000000000000003400f35ba643324efa37000000015495db943bb26c782b2f82397d94742224fa71b34cf50ce9000002000000000000004f00000000000000580000000000000024000000000000002d00f35ba643324efa37000000011add33353d4c5d816deebc0ac46e00d4723353bf3dce27f600000400000000000000360000000000000038010000000000075f51f35ba643324efa3700000001069f80b28db5e98132de625b2d4a62cb0482d6be6b194987f35ba643324efa37000000016f9aec8a552941bd4dad9c1ea2e0ed9a51f2f2545c3187f7000002000000000000005a000000000000005c000000000000005b000000000000006800f35ba643324efa37000000015700ed03edfd6b7349692aa8c3a56cc65fa6453e50769df8010001336f5be88f1916b9f35ba643324efa370000000144ff8480b3768e1f3460bb1df3ac0f502eb28f394b07bdbff35ba643324efa37000000010b46ca8fd4ea152d460dd0a62918cfc45b351756cc712c4a00000153e01cac7555678bf35ba643324efa37000000013863f1383b8d9c410c653472155412e07bde413d9047b00df35ba643324efa37000000014e0009b18c1cd768270cd4f5c33659ac21297a9ba9e6cb1e000002000000000000000a000000000000002c000000000000004100000000000000a100f35ba643324efa37000000011c0562e39da0bc1251011b8937a3e5e41f66579e4beadca2000015c7f2ec0100000000000000020000000000ed33e4000000000000001833977a24f45a18c54955e1c8cbdab950425377cc074dcf4c00 \ No newline at end of file diff --git a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn8.hex b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn8.hex index 4bf75b39c6e6..2009a56376fe 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn8.hex +++ b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn8.hex @@ -1 +1 @@ -0061cb5b837d20ad10000416d69eca8dfb8d00000018633b58205e457c3c1f3c4b28354f2c4d537d5b460e586d1700000030303921356e653b7847040c697c397e616850210f651236241619542e70577163283c2d1669415e4a02380f486b0d1a45f35ba643324efa3700000001063a648227a08438594eefd8cb59f2f077dbba194b02bff80000000002e4aefb2758cc0101010100000000000000020000000000ef843a00000000000000245ff571c3d36849236e752c2aec6695ecfb1ce2f3a0e64ef2e22b363301005b847a9d743800000100000000000000020000000000ecf2ea000000000000002086a49ad79c95eecaf1dea02b78106b918369b0a9644ec40a1c08f7b937ae29fa0100000000000000020000000000eefa5600000000000000210f353b17ab9db648a282f5571b0197c452c7ffe102245b9e66485a7eb96fa6f65100000024712e197c3e696e0c091a441a63481f4443216d13241b09673a6d231d4a2a066e73252817000000010000000000000000057951d364dd1160eed425c1000000020100f65baa433940f1370000000304000000000000001c0000000000000036010000000000071714f35ba643324efa370000000174b12fc6d7626b1162c828b7dcc08db62f42281032f211d2f35ba643324efa3700000001437e31bf9e8298d61633d078a7a76264234288abc7c6beb50100040000000000000011000000000000001301000000000005de6bf35ba643324efa3700000001484f6caaaa458dfb6eea3448c10706220e02fd30b4efcb4bf35ba643324efa37000000015f381e2253bca01531facb90d441ca4046f74540de1c1ae6000004260d980100000000000000020000000000eefa56000000000000002177345b25d177e44e2a4271cf2add922969aada99767e24e56121a4cafdb92ca08601 \ No newline at end of file +0161cb5b837d20ad1001f35ba643324efa3700000001000000000000000000000000000000005c75be4255d9020f59a3303c4602c9df00000018633b58205e457c3c1f3c4b28354f2c4d537d5b460e586d1700000030303921356e653b7847040c697c397e616850210f651236241619542e70577163283c2d1669415e4a02380f486b0d1a45f35ba643324efa370000000100000000000000000000000000000000063a648227a084380000000002e4aefb2758cc01010100000100000000000000020000000000ed33e400000000000000184e44f69ea9acffab0cba80c89439e29e7c193a3f28b3837f0000000000243c42712e197c3e696e0c091a441a63481f4443216d13241b09673a6d231d4a2a066e732500000001000000010000000002e7784d3790699f7096ea9c000000090100f65baa433940f1370000000302000000000000000f00000000000000400000000000000035000000000000006c00f35ba643324efa37000000011756b39783c7ba9574b12fc6d7626b1162c828b7dcc08db60100011633d078a7a76264f35ba643324efa3700000001234288abc7c6beb55273ef42286ab634058a3119a2b0c438f35ba643324efa37000000011f085e8c931ad26331049a8d4a7eb7fa6478854aa85e5c8f000002000000000000001800000000000000440000000000000045000000000000008400f35ba643324efa37000000016d14a71659dc7cd70416d69eca8dfb8d7c2128a2a462ef92000004000000000000000100000000000000060100000000000d0efcf35ba643324efa37000000015ff571c3d36849236e752c2aec6695ec2cf8792fcb727da8f35ba643324efa37000000017927735079f10e7d422d8000999b15f1257579a91c3a4ebd0100017782b62bfa3e0bf9f35ba643324efa370000000144b31142e3645305245b4ed58b9d9a87624b808dabfac151f35ba643324efa37000000014f2d920170ffe3a97b5337dcbf2d24334b6e35476a67a0a8000004000000000000000c00000000000000420100000000000b87bdf35ba643324efa37000000014af129fbbe99f452756e2a6d34ac061b65797d2d85780adef35ba643324efa370000000127723be892ad9a3b14c96e9567b8a11572923f3b4ced553401000158d2f7abab5315ecf35ba643324efa37000000017f8bb54c6b5cb5787fdfbd4882b864de75c7914dc1279903f35ba643324efa3700000001108c28a2d4606025019b78b40eca652e59640779c2d6b12b000001362a3baa4ada1bcaf35ba643324efa37000000011afc85e732a9acfd2437de6c0625c8c016f2b96bb74f8aaef35ba643324efa370000000164aae4d4b63c46dd600f3a1fca32fd92755bb09bc6504f7e0000040000000000000006000000000000005a00f35ba643324efa3700000001237268682493eedb3b69ec16111fe3142f8c9b796728b922010100000000000000020000000000ef843a0000000000000024466da60d68279285749826b9d5f87eb2d74c59bb26019f992c12ce2bd68ccea41d33379ce95625800000 \ No newline at end of file diff --git a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn9.hex b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn9.hex index 0c048131da66..4c8295300606 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn9.hex +++ b/hedera-node/hedera-mono-service/src/test/resources/serdes/MerkleToken-v5-sn9.hex @@ -1 +1 @@ -0113a411275abbf396007c604bbc258a7a03000000185d416f6d683039575f7a160f73104c786e3d7b027e5e3e75000000307e2c72343a27185c4b077d016d5540040b5d0e2a0a38423d57167405247d1844166d762926541a23593032654c652612f35ba643324efa3700000001601b5c210bf4964d50458bd373da0c5204bed7dce3552ee400000000052e885d76cf9c2401010100000000000000020000000000ecb1f000000000000000780000000200000000000000020000000000ecf2ea000000000000002060da6efd092f23d627ce46fad996a300495ea639f53c076da9ecb8cf00dcc79700000000000000020000000000ef843a00000000000000241544971201c85b39799ee7e0789aeaf008989016678ae88017f9984f41a59ccab7ca99ef00000000000000247a286d1e186a0f581b7e64386c7d255c14115d353556046e137d5b6c1b336a57033f3656000000000000000100000000090241be34b3f2320a57fde0000000020100f65baa433940f137000000030108b7c14f114dcf51f35ba643324efa37000000015e5c6f5bb99e6e464c8db23828e247831eb5f5ef1b0d63f7f35ba643324efa37000000016301c09c5d1ec10815e88138b63c4c3a12cfa790e4c3d2e30100020000000000000003000000000000000b000000000000004e000000000000009b01f35ba643324efa37000000014d40f08fc18a658d3c7cf1f1f94420cb1f99202e4c86ddf7000100000000000000020000000000ecb1f000000000000000bc0000000200000000000000020000000000ef843a000000000000002450466ef695ecfaae692a5c264d4a9239678501770824d8685b2c18b437c7a8e645b8111f00000000000000020000000000ecb1f000000000000000640000000200000000000000020000000000ed33e400000000000000181402b1a6565c50b212e24ba4fb168e2117b98b86f88488e000000000000000020000000000ed33e400000000000000182b502c1a38da24d17c06996def4d0980270629792b4190957b091cd40000 \ No newline at end of file +0113a411275abbf39601f35ba643324efa3700000001000000000000000000000000000000002e9de256ae3abf1472e3541c80172386000000185d416f6d683039575f7a160f73104c786e3d7b027e5e3e75000000307e2c72343a27185c4b077d016d5540040b5d0e2a0a38423d57167405247d1844166d762926541a23593032654c652612f35ba643324efa370000000100000000000000000000000000000000601b5c210bf4964d00000000052e885d76cf9c2401010100000000000000020000000000ef843a000000000000002421159b53d2f8fdc214e00da0d1289a221ba906b2ea5aff98741d813d6097edd0cdaf45fa000100000000000000020000000000ecf2ea00000000000000200f8a8f6d942b1966f391a47938b55386d60b19c514d38a87632200a876dbfc8f0100000000000000020000000000ef843a000000000000002413ae08f5bc73e52764a9b532931400f2f4060bf758124203b6586f2d9e33433f3c55ebcd000000002431047a286d1e186a0f581b7e64386c7d255c14115d353556046e137d5b6c1b336a57033f0000000100000001000000000a6c46a30abf8c76062f28ff000000090100f65baa433940f1370000000304000000000000002b000000000000004a010000000000024f24f35ba643324efa37000000014c8db23828e247831eb5f5ef1b0d63f76301c09c5d1ec108f35ba643324efa370000000115e88138b63c4c3a12cfa790e4c3d2e30e861af48f10f94f0100040000000000000009000000000000002200f35ba643324efa370000000118a3d1a670bf79424d40f08fc18a658d3c7cf1f1f94420cb010004000000000000000c000000000000003a01000000000005f054f35ba643324efa3700000001478a9765083615616e6763f13a862130585982e5d1254cd2f35ba643324efa370000000118ed55661f614c3322f85f11e85a34466b119784feb76d300100020000000000000022000000000000004c000000000000000a000000000000002700f35ba643324efa3700000001407445338b484c04654e52a0a7cc7c8b2c157b03f7cce55b0100011a76d6b6e9c831d0f35ba643324efa37000000015332b6d37e466964525908607c4697d200086d1de5b82af4f35ba643324efa37000000011f7e5d1d688e47dd50466ef695ecfaae692a5c264d4a92390100040000000000000001000000000000001300f35ba643324efa37000000017741181c1d22f7d31402b1a6565c50b212e24ba4fb168e210000012b502c1a38da24d1f35ba643324efa37000000017c06996def4d0980270629792b41909523a3a728e0ca0bd9f35ba643324efa37000000016e0c064d0cfd168732680441b57a39251951191a8786cfd30000017b9ebe56a66f7569f35ba643324efa37000000016b3d04e10450dcb6151b0bb7805c9e165e1894997e473258f35ba643324efa37000000017867b9c4b29ddff3411be84e098a6ba87aca45e5b421f6d7000004000000000000000b000000000000001b00f35ba643324efa3700000001779d49fb2454f9ee2dd587bc7545f18f790b038a827567430100263639fc0100000000000000020000000000eefa5600000000000000219bc9c4ca545b8bf5667eeaa655002e53ae590edf764eeef77bda39d89e6ce6516501 \ No newline at end of file diff --git a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/scenarios/CryptoTransferScenarios.java b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/scenarios/CryptoTransferScenarios.java index 25da5614ae45..569edfc1939f 100644 --- a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/scenarios/CryptoTransferScenarios.java +++ b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/scenarios/CryptoTransferScenarios.java @@ -320,6 +320,8 @@ public PlatformTxnAccessor platformTxn() throws Exception { return PlatformTxnAccessor.from(newSignedCryptoTransfer() .changingOwner(ROYALTY_TOKEN_NFT, FIRST_TOKEN_SENDER, NO_RECEIVER_SIG) .adjustingHbars(FIRST_TOKEN_SENDER, +1_000) + // SECOND_TOKEN_SENDER here is used only because it has a key, not because it's sending any tokens + .adjustingHbars(SECOND_TOKEN_SENDER, -1_000) .get()); } }, @@ -329,6 +331,8 @@ public PlatformTxnAccessor platformTxn() throws Exception { return PlatformTxnAccessor.from(newSignedCryptoTransfer() .changingOwner(ROYALTY_TOKEN_NFT, FIRST_TOKEN_SENDER, NO_RECEIVER_SIG) .adjusting(FIRST_TOKEN_SENDER, KNOWN_TOKEN_IMMUTABLE, +1_000) + // SECOND_TOKEN_SENDER here is used only because it has a key, not because it's sending any tokens + .adjusting(SECOND_TOKEN_SENDER, KNOWN_TOKEN_IMMUTABLE, -1_000) .get()); } }, @@ -338,6 +342,8 @@ public PlatformTxnAccessor platformTxn() throws Exception { return PlatformTxnAccessor.from(newSignedCryptoTransfer() .changingOwner(MISSING_TOKEN_NFT, FIRST_TOKEN_SENDER, NO_RECEIVER_SIG) .adjusting(FIRST_TOKEN_SENDER, KNOWN_TOKEN_IMMUTABLE, +1_000) + // SECOND_TOKEN_SENDER here is used only because it has a key, not because it's sending any tokens + .adjusting(SECOND_TOKEN_SENDER, KNOWN_TOKEN_IMMUTABLE, -1_000) .get()); } }, diff --git a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/utils/TestFixturesKeyLookup.java b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/utils/TestFixturesKeyLookup.java index 55fddeb2d778..a28d58b1f4d2 100644 --- a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/utils/TestFixturesKeyLookup.java +++ b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/utils/TestFixturesKeyLookup.java @@ -18,7 +18,6 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.state.token.Account; -import com.hedera.node.app.service.mono.state.virtual.EntityNumVirtualKey; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.spi.state.ReadableKVState; import com.hedera.node.app.spi.state.ReadableStates; @@ -27,8 +26,8 @@ import edu.umd.cs.findbugs.annotations.Nullable; public class TestFixturesKeyLookup implements ReadableAccountStore { - private final ReadableKVState aliases; - private final ReadableKVState accounts; + private final ReadableKVState aliases; + private final ReadableKVState accounts; public TestFixturesKeyLookup(@NonNull final ReadableStates states) { this.accounts = states.get("ACCOUNTS"); @@ -40,18 +39,19 @@ public TestFixturesKeyLookup(@NonNull final ReadableStates states) { public Account getAccountById(@NonNull AccountID accountID) { final var alias = accountID.alias(); if (alias != null && alias.length() > 0) { - final var num = aliases.get(alias.asUtf8String()); + final var num = aliases.get(alias); if (num == null) { return null; } else { - final var account = accounts.get(new EntityNumVirtualKey(num)); - return account == null ? null : getNewAccount(num, alias, account); + final var account = accounts.get(num); + return account == null ? null : getNewAccount(num.accountNum(), alias, account); } } else if (!accountID.hasAccountNum()) { return null; } else { final long num = accountID.accountNumOrThrow(); - final var account = accounts.get(new EntityNumVirtualKey(num)); + final var account = + accounts.get(AccountID.newBuilder().accountNum(num).build()); return account == null ? null : getNewAccount(num, Bytes.EMPTY, account); } } @@ -62,11 +62,7 @@ private Account getNewAccount(long num, Bytes alias, Account account) { @Override @Nullable - public AccountID getAccountIDByAlias(@NonNull final String alias) { - final var entityNum = aliases.get(alias); - if (entityNum == null) { - return null; - } - return AccountID.newBuilder().accountNum(entityNum).build(); + public AccountID getAccountIDByAlias(@NonNull final Bytes alias) { + return aliases.get(alias); } } diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/FreezeServiceImpl.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/FreezeServiceImpl.java index afb84aa2af1c..4daecda2f221 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/FreezeServiceImpl.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/FreezeServiceImpl.java @@ -35,9 +35,7 @@ public final class FreezeServiceImpl implements FreezeService { public static final String UPGRADE_FILE_HASH_KEY = "UPGRADE_FILE_HASH"; public static final String DUAL_STATE_KEY = "DUAL_STATE"; - - private static final SemanticVersion CURRENT_VERSION = - SemanticVersion.newBuilder().minor(34).build(); + private static final SemanticVersion GENESIS_VERSION = SemanticVersion.DEFAULT; @Override public void registerSchemas(@NonNull SchemaRegistry registry) { @@ -45,7 +43,7 @@ public void registerSchemas(@NonNull SchemaRegistry registry) { } private Schema networkAdminSchema() { - return new Schema(CURRENT_VERSION) { + return new Schema(GENESIS_VERSION) { @NonNull @Override @SuppressWarnings("rawtypes") diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/NetworkServiceImpl.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/NetworkServiceImpl.java index 8b27a07bbb26..7667093ad627 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/NetworkServiceImpl.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/NetworkServiceImpl.java @@ -46,8 +46,7 @@ public final class NetworkServiceImpl implements NetworkService { public static final String CONTEXT_KEY = "CONTEXT"; public static final String STAKING_KEY = "STAKING"; public static final String RUNNING_HASHES_KEY = "RUNNING_HASHES"; - private static final SemanticVersion CURRENT_VERSION = - SemanticVersion.newBuilder().minor(34).build(); + private static final SemanticVersion GENESIS_VERSION = SemanticVersion.DEFAULT; private static final ImmutableHash GENESIS_HASH = new ImmutableHash(new byte[DigestType.SHA_384.digestLength()]); @Override @@ -56,7 +55,7 @@ public void registerSchemas(final @NonNull SchemaRegistry registry) { } private Schema networkSchema() { - return new Schema(CURRENT_VERSION) { + return new Schema(GENESIS_VERSION) { @NonNull @Override public Set statesToCreate() { diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkGetAccountDetailsHandler.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkGetAccountDetailsHandler.java index fa9ef73e6f71..6de6d1274590 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkGetAccountDetailsHandler.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkGetAccountDetailsHandler.java @@ -42,7 +42,6 @@ import com.hedera.hapi.node.token.GrantedTokenAllowance; import com.hedera.hapi.node.transaction.Query; import com.hedera.hapi.node.transaction.Response; -import com.hedera.node.app.service.evm.contracts.execution.StaticProperties; import com.hedera.node.app.service.networkadmin.impl.utils.NetworkAdminServiceUtil; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableTokenRelationStore; @@ -194,19 +193,13 @@ private static List getTokenRelationships( ReadableTokenStore readableTokenStore, ReadableTokenRelationStore tokenRelationStore) { final var tokenRelationshipList = new ArrayList(); - var tokenNum = account.headTokenNumber(); + var tokenId = TokenID.newBuilder().tokenNum(account.headTokenNumber()).build(); int count = 0; - while (tokenNum != 0 && count <= maxRelsPerInfoQuery) { + while (tokenId != null && !tokenId.equals(TokenID.DEFAULT) && count <= maxRelsPerInfoQuery) { final var tokenRelation = tokenRelationStore.get( - AccountID.newBuilder().accountNum(account.accountNumber()).build(), - TokenID.newBuilder().tokenNum(tokenNum).build()); + AccountID.newBuilder().accountNum(account.accountNumber()).build(), tokenId); if (tokenRelation != null) { - final var tokenId = TokenID.newBuilder() - .shardNum(StaticProperties.getShard()) - .realmNum(StaticProperties.getRealm()) - .tokenNum(tokenNum) - .build(); final TokenMetadata token = readableTokenStore.getTokenMeta(tokenId); if (token != null) { final TokenRelationship tokenRelationship = TokenRelationship.newBuilder() @@ -224,7 +217,7 @@ private static List getTokenRelationships( .build(); tokenRelationshipList.add(tokenRelationship); } - tokenNum = tokenRelation.nextToken(); + tokenId = tokenRelation.nextToken(); } else { break; } diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetReceiptHandler.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetReceiptHandler.java index 595e8e06f620..a68f9e7a76ca 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetReceiptHandler.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetReceiptHandler.java @@ -74,7 +74,7 @@ public void validate(@NonNull final QueryContext context) throws PreCheckExcepti if (!op.hasTransactionID()) throw new PreCheckException(INVALID_TRANSACTION_ID); // The receipt must exist for that transaction ID - final var recordCache = context.createStore(RecordCache.class); + final var recordCache = context.recordCache(); final var receipt = recordCache.getReceipt(op.transactionIDOrThrow()); mustExist(receipt, INVALID_TRANSACTION_ID); } diff --git a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkAdminHandlerTestBase.java b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkAdminHandlerTestBase.java index b881abf8684e..a4909227e470 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkAdminHandlerTestBase.java +++ b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkAdminHandlerTestBase.java @@ -16,6 +16,8 @@ package com.hedera.node.app.service.networkadmin.impl.test.handlers; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.lenient; @@ -26,6 +28,7 @@ import com.hedera.hapi.node.base.TokenSupplyType; import com.hedera.hapi.node.base.TokenType; import com.hedera.hapi.node.base.TransactionID; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.AccountApprovalForAllAllowance; import com.hedera.hapi.node.state.token.AccountCryptoAllowance; @@ -36,8 +39,6 @@ import com.hedera.hapi.node.transaction.TransactionRecord; import com.hedera.node.app.fixtures.state.FakeHederaState; import com.hedera.node.app.fixtures.state.FakeSchemaRegistry; -import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableTokenRelationStore; import com.hedera.node.app.service.token.ReadableTokenStore; @@ -83,15 +84,17 @@ public class NetworkAdminHandlerTestBase { AccountID.newBuilder().accountNum(32134).build(); protected static final long payerBalance = 10_000L; - protected final EntityNum fungibleTokenNum = EntityNum.fromLong(1L); - protected final EntityNum nonFungibleTokenNum = EntityNum.fromLong(2L); - protected final EntityNumPair fungiblePair = - EntityNumPair.fromLongs(accountNum.longValue(), fungibleTokenNum.longValue()); - protected final EntityNumPair nonFungiblePair = - EntityNumPair.fromLongs(accountNum.longValue(), nonFungibleTokenNum.longValue()); - - protected final TokenID tokenId = - TokenID.newBuilder().tokenNum(fungibleTokenNum.longValue()).build(); + protected final TokenID fungibleTokenId = asToken(1L); + protected final TokenID nonFungibleTokenId = asToken(2L); + protected final EntityIDPair fungiblePair = EntityIDPair.newBuilder() + .accountId(accountId) + .tokenId(fungibleTokenId) + .build(); + protected final EntityIDPair nonFungiblePair = EntityIDPair.newBuilder() + .accountId(accountId) + .tokenId(nonFungibleTokenId) + .build(); + protected final String tokenName = "test token"; protected final String tokenSymbol = "TT"; protected final AccountID treasury = AccountID.newBuilder().accountNum(100).build(); @@ -100,8 +103,8 @@ public class NetworkAdminHandlerTestBase { protected final String memo = "test memo"; protected MapReadableKVState readableAccounts; - protected MapReadableKVState readableTokenState; - protected MapReadableKVState readableTokenRelState; + protected MapReadableKVState readableTokenState; + protected MapReadableKVState readableTokenRelState; protected ReadableTokenStore readableTokenStore; @@ -191,7 +194,7 @@ private void givenAccountsInReadableStore() { private void givenTokensInReadableStore() { readableTokenState = readableTokenState(); - given(readableStates.get(TOKENS)).willReturn(readableTokenState); + given(readableStates.get(TOKENS)).willReturn(readableTokenState); readableTokenStore = new ReadableTokenStoreImpl(readableStates); } @@ -200,7 +203,7 @@ private void givenReadableTokenRelsStore() { .value(fungiblePair, fungibleTokenRelation) .value(nonFungiblePair, nonFungibleTokenRelation) .build(); - given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); + given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates); } @@ -249,7 +252,7 @@ protected MapReadableKVState.Builder emptyReadableAccountSta } @NonNull - protected MapReadableKVState.Builder emptyReadableTokenRelsStateBuilder() { + protected MapReadableKVState.Builder emptyReadableTokenRelsStateBuilder() { return MapReadableKVState.builder(TOKEN_RELS); } @@ -260,33 +263,33 @@ protected RecordCacheImpl emptyRecordCacheBuilder() { } @NonNull - protected MapReadableKVState readableTokenState() { - return MapReadableKVState.builder(TOKENS) - .value(fungibleTokenNum, fungibleToken) - .value(nonFungibleTokenNum, nonFungibleToken) + protected MapReadableKVState readableTokenState() { + return MapReadableKVState.builder(TOKENS) + .value(fungibleTokenId, fungibleToken) + .value(nonFungibleTokenId, nonFungibleToken) .build(); } protected void givenValidFungibleToken() { - givenValidFungibleToken(autoRenewId.accountNum()); + givenValidFungibleToken(autoRenewId); } - protected void givenValidFungibleToken(long autoRenewAccountNumber) { - givenValidFungibleToken(autoRenewAccountNumber, false, false, false, false, true, true); + protected void givenValidFungibleToken(AccountID autoRenewAccountId) { + givenValidFungibleToken(autoRenewAccountId, false, false, false, false, true, true); } protected void givenValidNonFungibleToken() { givenValidFungibleToken(); nonFungibleToken = fungibleToken .copyBuilder() - .tokenNumber(nonFungibleTokenNum.longValue()) + .tokenId(nonFungibleTokenId) .customFees(List.of()) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) .build(); } protected void givenValidFungibleToken( - long autoRenewAccountNumber, + AccountID autoRenewAccountId, boolean deleted, boolean paused, boolean accountsFrozenByDefault, @@ -294,12 +297,12 @@ protected void givenValidFungibleToken( boolean withAdminKey, boolean withSubmitKey) { fungibleToken = new Token( - tokenId.tokenNum(), + fungibleTokenId, tokenName, tokenSymbol, 1000, 1000, - treasury.accountNum(), + treasury, null, null, null, @@ -311,7 +314,7 @@ protected void givenValidFungibleToken( deleted, TokenType.FUNGIBLE_COMMON, TokenSupplyType.INFINITE, - autoRenewAccountNumber, + autoRenewAccountId, autoRenewSecs, expirationTime, memo, @@ -364,29 +367,29 @@ protected void givenValidAccount( protected void givenFungibleTokenRelation() { fungibleTokenRelation = TokenRelation.newBuilder() - .tokenNumber(tokenId.tokenNum()) - .accountNumber(accountNum) + .tokenId(fungibleTokenId) + .accountId(accountId) .balance(1000L) .frozen(false) .kycGranted(false) .deleted(false) .automaticAssociation(true) - .nextToken(0L) - .previousToken(3L) + .nextToken(asToken(0L)) + .previousToken(asToken(3L)) .build(); } protected void givenNonFungibleTokenRelation() { nonFungibleTokenRelation = TokenRelation.newBuilder() - .tokenNumber(nonFungibleTokenNum.longValue()) - .accountNumber(accountNum) + .tokenId(nonFungibleTokenId) + .accountId(asAccount(accountNum)) .balance(1000L) .frozen(false) .kycGranted(false) .deleted(false) .automaticAssociation(true) - .nextToken(0L) - .previousToken(3L) + .nextToken(asToken(0L)) + .previousToken(asToken(3L)) .build(); } diff --git a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkGetAccountDetailsHandlerTest.java b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkGetAccountDetailsHandlerTest.java index 2eb01445948d..2db228726172 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkGetAccountDetailsHandlerTest.java +++ b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkGetAccountDetailsHandlerTest.java @@ -49,7 +49,6 @@ import com.hedera.hapi.node.token.GrantedTokenAllowance; import com.hedera.hapi.node.transaction.Query; import com.hedera.hapi.node.transaction.Response; -import com.hedera.node.app.service.evm.contracts.execution.StaticProperties; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkGetAccountDetailsHandler; import com.hedera.node.app.service.networkadmin.impl.utils.NetworkAdminServiceUtil; import com.hedera.node.app.service.token.ReadableAccountStore; @@ -234,11 +233,7 @@ void getsResponseWithTokenRelations() { .build(); List tokenRelationships = new ArrayList<>(); var tokenRelation = TokenRelationship.newBuilder() - .tokenId(TokenID.newBuilder() - .shardNum(StaticProperties.getShard()) - .realmNum(StaticProperties.getRealm()) - .tokenNum(nonFungibleTokenNum.longValue()) - .build()) + .tokenId(nonFungibleTokenId) .balance(1000L) .decimals(1000) .symbol(tokenSymbol) diff --git a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetReceiptHandlerTest.java b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetReceiptHandlerTest.java index 14395c032a5d..005ef9b8623e 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetReceiptHandlerTest.java +++ b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetReceiptHandlerTest.java @@ -85,7 +85,7 @@ void validatesQueryWhenValidReceipt() throws Throwable { final var query = createGetTransactionRecieptQuery(transactionID, false, false); given(context.query()).willReturn(query); - given(context.createStore(RecordCache.class)).willReturn(cache); + given(context.recordCache()).willReturn(cache); assertThatCode(() -> networkTransactionGetReceiptHandler.validate(context)) .doesNotThrowAnyException(); diff --git a/hedera-node/hedera-schedule-service-impl/src/test/java/com/hedera/node/app/service/schedule/impl/test/handlers/ScheduleDeleteHandlerParityTest.java b/hedera-node/hedera-schedule-service-impl/src/test/java/com/hedera/node/app/service/schedule/impl/test/handlers/ScheduleDeleteHandlerParityTest.java index c3d91a7a4346..089f79bb59fb 100644 --- a/hedera-node/hedera-schedule-service-impl/src/test/java/com/hedera/node/app/service/schedule/impl/test/handlers/ScheduleDeleteHandlerParityTest.java +++ b/hedera-node/hedera-schedule-service-impl/src/test/java/com/hedera/node/app/service/schedule/impl/test/handlers/ScheduleDeleteHandlerParityTest.java @@ -71,6 +71,7 @@ import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.mock; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.base.ScheduleID; import com.hedera.hapi.node.state.token.Account; @@ -79,7 +80,6 @@ import com.hedera.hapi.node.state.token.AccountFungibleTokenAllowance; import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.service.mono.pbj.PbjConverter; -import com.hedera.node.app.service.mono.state.virtual.EntityNumVirtualKey; import com.hedera.node.app.service.mono.state.virtual.schedule.ScheduleVirtualValue; import com.hedera.node.app.service.schedule.ReadableScheduleStore; import com.hedera.node.app.service.schedule.impl.ReadableScheduleStoreImpl; @@ -202,7 +202,7 @@ public static ReadableScheduleStore mockSchedule(Long schedId, KeyTree key, Tran return new ReadableScheduleStoreImpl(new MapReadableStates(Map.of("SCHEDULES_BY_ID", schedulesById))); } - private static ReadableKVState wellKnownAccountsState() { + private static ReadableKVState wellKnownAccountsState() { return new MapReadableKVState<>(ACCOUNTS_KEY, wellKnownAccountStoreAt()); } @@ -225,50 +225,48 @@ public class SigReqAdapterUtils { .spenderNum(DEFAULT_PAYER.getAccountNum()) .build(); - public static Map wellKnownAccountStoreAt() { - final var destination = new HashMap(); + public static Map wellKnownAccountStoreAt() { + final var destination = new HashMap(); destination.put( - EntityNumVirtualKey.fromLong(FIRST_TOKEN_SENDER.getAccountNum()), + toPbj(FIRST_TOKEN_SENDER), toPbjAccount(FIRST_TOKEN_SENDER.getAccountNum(), FIRST_TOKEN_SENDER_KT.asPbjKey(), 10_000L)); destination.put( - EntityNumVirtualKey.fromLong(SECOND_TOKEN_SENDER.getAccountNum()), + toPbj(SECOND_TOKEN_SENDER), toPbjAccount(SECOND_TOKEN_SENDER.getAccountNum(), SECOND_TOKEN_SENDER_KT.asPbjKey(), 10_000L)); destination.put( - EntityNumVirtualKey.fromLong(TOKEN_RECEIVER.getAccountNum()), - toPbjAccount(TOKEN_RECEIVER.getAccountNum(), TOKEN_WIPE_KT.asPbjKey(), 0L)); + toPbj(TOKEN_RECEIVER), toPbjAccount(TOKEN_RECEIVER.getAccountNum(), TOKEN_WIPE_KT.asPbjKey(), 0L)); destination.put( - EntityNumVirtualKey.fromLong(DEFAULT_NODE.getAccountNum()), - toPbjAccount(DEFAULT_NODE.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), 0L)); + toPbj(DEFAULT_NODE), toPbjAccount(DEFAULT_NODE.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), 0L)); destination.put( - EntityNumVirtualKey.fromLong(DEFAULT_PAYER.getAccountNum()), + toPbj(DEFAULT_PAYER), toPbjAccount(DEFAULT_PAYER.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), DEFAULT_PAYER_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(STAKING_FUND.getAccountNum()), + toPbj(STAKING_FUND), toPbjAccount(STAKING_FUND.getAccountNum(), toPbj(asKeyUnchecked(EMPTY_KEY)), 0L)); destination.put( - EntityNumVirtualKey.fromLong(MASTER_PAYER.getAccountNum()), + toPbj(MASTER_PAYER), toPbjAccount(MASTER_PAYER.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), DEFAULT_PAYER_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(TREASURY_PAYER.getAccountNum()), + toPbj(TREASURY_PAYER), toPbjAccount(TREASURY_PAYER.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), DEFAULT_PAYER_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(NO_RECEIVER_SIG.getAccountNum()), + toPbj(NO_RECEIVER_SIG), toPbjAccount(NO_RECEIVER_SIG.getAccountNum(), NO_RECEIVER_SIG_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(RECEIVER_SIG.getAccountNum()), + toPbj(RECEIVER_SIG), toPbjAccount(RECEIVER_SIG.getAccountNum(), RECEIVER_SIG_KT.asPbjKey(), DEFAULT_BALANCE, true)); destination.put( - EntityNumVirtualKey.fromLong(SYS_ACCOUNT.getAccountNum()), + toPbj(SYS_ACCOUNT), toPbjAccount(SYS_ACCOUNT.getAccountNum(), SYS_ACCOUNT_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(MISC_ACCOUNT.getAccountNum()), + toPbj(MISC_ACCOUNT), toPbjAccount(MISC_ACCOUNT.getAccountNum(), MISC_ACCOUNT_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(CUSTOM_PAYER_ACCOUNT.getAccountNum()), + toPbj(CUSTOM_PAYER_ACCOUNT), toPbjAccount( CUSTOM_PAYER_ACCOUNT.getAccountNum(), CUSTOM_PAYER_ACCOUNT_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(OWNER_ACCOUNT.getAccountNum()), + toPbj(OWNER_ACCOUNT), toPbjAccount( OWNER_ACCOUNT.getAccountNum(), OWNER_ACCOUNT_KT.asPbjKey(), @@ -278,7 +276,7 @@ public static Map wellKnownAccountStoreAt() { List.of(fungibleTokenAllowances), List.of(nftAllowances))); destination.put( - EntityNumVirtualKey.fromLong(DELEGATING_SPENDER.getAccountNum()), + toPbj(DELEGATING_SPENDER), toPbjAccount( DELEGATING_SPENDER.getAccountNum(), DELEGATING_SPENDER_KT.asPbjKey(), @@ -288,20 +286,20 @@ public static Map wellKnownAccountStoreAt() { List.of(fungibleTokenAllowances), List.of(nftAllowances))); destination.put( - EntityNumVirtualKey.fromLong(COMPLEX_KEY_ACCOUNT.getAccountNum()), + toPbj(COMPLEX_KEY_ACCOUNT), toPbjAccount( COMPLEX_KEY_ACCOUNT.getAccountNum(), COMPLEX_KEY_ACCOUNT_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(TOKEN_TREASURY.getAccountNum()), + toPbj(TOKEN_TREASURY), toPbjAccount(TOKEN_TREASURY.getAccountNum(), TOKEN_TREASURY_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(DILIGENT_SIGNING_PAYER.getAccountNum()), + toPbj(DILIGENT_SIGNING_PAYER), toPbjAccount( DILIGENT_SIGNING_PAYER.getAccountNum(), DILIGENT_SIGNING_PAYER_KT.asPbjKey(), DEFAULT_BALANCE)); destination.put( - EntityNumVirtualKey.fromLong(FROM_OVERLAP_PAYER.getAccountNum()), + toPbj(FROM_OVERLAP_PAYER), toPbjAccount( FROM_OVERLAP_PAYER.getAccountNum(), FROM_OVERLAP_PAYER_KT.asPbjKey(), DEFAULT_BALANCE)); return destination; diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/TransactionProcessor.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/TransactionProcessor.java index 07061a44dcfc..438fce28d019 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/TransactionProcessor.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/TransactionProcessor.java @@ -17,21 +17,23 @@ package com.hedera.node.app.service.contract.impl.exec; import static com.hedera.hapi.node.base.ResponseCodeEnum.*; +import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.isEvmAddress; import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.pbjToBesuAddress; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static java.util.Objects.requireNonNull; import com.hedera.node.app.service.contract.impl.exec.gas.CustomGasCharging; import com.hedera.node.app.service.contract.impl.exec.processors.CustomMessageCallProcessor; +import com.hedera.node.app.service.contract.impl.exec.utils.FrameBuilder; +import com.hedera.node.app.service.contract.impl.exec.utils.FrameRunner; import com.hedera.node.app.service.contract.impl.hevm.*; import com.hedera.node.app.service.contract.impl.state.HederaEvmAccount; import com.hedera.node.app.spi.workflows.HandleException; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import java.util.Objects; import org.hyperledger.besu.datatypes.Address; import org.hyperledger.besu.evm.processor.ContractCreationProcessor; -import org.hyperledger.besu.evm.tracing.OperationTracer; /** * Modeled after the Besu {@code MainnetTransactionProcessor}, so that all four HAPI @@ -39,66 +41,94 @@ * {@code ContractCallLocal}) can reduce to a single code path. */ public class TransactionProcessor { - public static final String CONFIG_CONTEXT_VARIABLE = "contractsConfig"; - + private final FrameBuilder frameBuilder; + private final FrameRunner frameRunner; private final CustomGasCharging gasCharging; - private final CustomMessageCallProcessor messageCallProcessor; - private final ContractCreationProcessor contractCreationProcessor; + private final CustomMessageCallProcessor messageCall; + private final ContractCreationProcessor contractCreation; public TransactionProcessor( + @NonNull final FrameBuilder frameBuilder, + @NonNull final FrameRunner frameRunner, @NonNull final CustomGasCharging gasCharging, - @NonNull final CustomMessageCallProcessor messageCallProcessor, - @NonNull final ContractCreationProcessor contractCreationProcessor) { - this.gasCharging = Objects.requireNonNull(gasCharging); - this.messageCallProcessor = Objects.requireNonNull(messageCallProcessor); - this.contractCreationProcessor = Objects.requireNonNull(contractCreationProcessor); + @NonNull final CustomMessageCallProcessor messageCall, + @NonNull final ContractCreationProcessor contractCreation) { + this.frameBuilder = requireNonNull(frameBuilder); + this.frameRunner = requireNonNull(frameRunner); + this.gasCharging = requireNonNull(gasCharging); + this.messageCall = requireNonNull(messageCall); + this.contractCreation = requireNonNull(contractCreation); } public HederaEvmTransactionResult processTransaction( @NonNull final HederaEvmTransaction transaction, - @NonNull final HederaWorldUpdater worldUpdater, + @NonNull final HederaWorldUpdater updater, @NonNull final HederaEvmContext context, - @NonNull final OperationTracer tracer, + @NonNull final HederaTracer tracer, @NonNull final Configuration config) { try { - final var initialCall = computeInitialCall(transaction, worldUpdater, context, config); - // TODO - use CustomGasCharging when finished + // Compute the sender, relayer, and to address (will throw if invalid) + final var parties = computeInvolvedParties(transaction, updater, config); + if (transaction.isEthereumTransaction()) { + parties.sender().incrementNonce(); + } + + // Charge gas and return intrinsic gas and relayer allowance used (will throw on failure) + final var gasCharges = + gasCharging.chargeForGas(parties.sender(), parties.relayer(), context, updater, transaction); + + // Build the initial frame for the transaction + final var initialFrame = frameBuilder.buildInitialFrameWith( + transaction, + updater, + context, + config, + parties.sender().getAddress(), + parties.toAddress(), + gasCharges.intrinsicGas()); + // Return the result of running the frame to completion + return frameRunner.runToCompletion( + transaction.gasLimit(), initialFrame, tracer, messageCall, contractCreation); } catch (final HandleException failure) { return HederaEvmTransactionResult.abortFor(failure.getStatus()); } - throw new AssertionError("Not implemented"); } - private record InitialCall( + private record InvolvedParties( @NonNull HederaEvmAccount sender, @Nullable HederaEvmAccount relayer, @NonNull Address toAddress) {} - private InitialCall computeInitialCall( + private InvolvedParties computeInvolvedParties( @NonNull final HederaEvmTransaction transaction, @NonNull final HederaWorldUpdater worldUpdater, - @NonNull final HederaEvmContext context, @NonNull final Configuration config) { final var sender = worldUpdater.getHederaAccount(transaction.senderId()); validateTrue(sender != null, INVALID_ACCOUNT_ID); + HederaEvmAccount relayer = null; + if (transaction.isEthereumTransaction()) { + relayer = worldUpdater.getHederaAccount(requireNonNull(transaction.relayerId())); + validateTrue(relayer != null, INVALID_ACCOUNT_ID); + } if (transaction.isCreate()) { throw new AssertionError("Not implemented"); } else { final var to = worldUpdater.getHederaAccount(transaction.contractIdOrThrow()); if (maybeLazyCreate(transaction, to, config)) { validateTrue(transaction.hasValue(), INVALID_CONTRACT_ID); - final var evmAddress = transaction.contractIdOrThrow().evmAddressOrThrow(); - return new InitialCall(sender, null, pbjToBesuAddress(evmAddress)); + final var alias = transaction.contractIdOrThrow().evmAddressOrThrow(); + validateTrue(isEvmAddress(alias), INVALID_CONTRACT_ID); + return new InvolvedParties(sender, relayer, pbjToBesuAddress(alias)); + } else { + validateTrue(to != null, INVALID_CONTRACT_ID); + return new InvolvedParties(sender, relayer, requireNonNull(to).getAddress()); } } - throw new AssertionError("Not implemented"); } private boolean maybeLazyCreate( @NonNull final HederaEvmTransaction transaction, @Nullable final HederaEvmAccount to, @NonNull final Configuration config) { - return to == null - && transaction.isEthereumTransaction() - && messageCallProcessor.isImplicitCreationEnabled(config); + return to == null && transaction.isEthereumTransaction() && messageCall.isImplicitCreationEnabled(config); } } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/CustomGasCharging.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/CustomGasCharging.java index f1edbbf5d4eb..fd0045d074aa 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/CustomGasCharging.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/CustomGasCharging.java @@ -62,25 +62,27 @@ public CustomGasCharging(@NonNull final GasCalculator gasCalculator) { * @param context the context of the transaction, including the network gas price * @param worldUpdater the world updater for the transaction * @param transaction the transaction to charge gas for - * @return the amount charged to the relayer, if any + * @return the result of the gas charging * @throws HandleException if the gas charging fails for any reason */ - public long chargeForGas( + public GasCharges chargeForGas( @NonNull final HederaEvmAccount sender, @Nullable final HederaEvmAccount relayer, @NonNull final HederaEvmContext context, @NonNull final HederaWorldUpdater worldUpdater, @NonNull final HederaEvmTransaction transaction) { if (context.staticCall()) { - return 0L; + return new GasCharges(0L, 0L); } final var intrinsicGas = gasCalculator.transactionIntrinsicGasCost(Bytes.EMPTY, transaction.isCreate()); validateTrue(transaction.gasLimit() >= intrinsicGas, INSUFFICIENT_GAS); if (transaction.isEthereumTransaction()) { - return chargeWithRelayer(sender, requireNonNull(relayer), context, worldUpdater, transaction); + final var allowanceUsed = + chargeWithRelayer(sender, requireNonNull(relayer), context, worldUpdater, transaction); + return new GasCharges(intrinsicGas, allowanceUsed); } else { chargeWithOnlySender(sender, context, worldUpdater, transaction); - return 0L; + return new GasCharges(intrinsicGas, 0L); } } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/GasChargingResult.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/GasCharges.java similarity index 68% rename from hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/GasChargingResult.java rename to hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/GasCharges.java index c4f84d944c91..cfc2af41d4c9 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/GasChargingResult.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/GasCharges.java @@ -16,9 +16,4 @@ package com.hedera.node.app.service.contract.impl.exec.gas; -import com.hedera.node.app.service.contract.impl.state.HederaEvmAccount; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; - -public record GasChargingResult( - @NonNull HederaEvmAccount sender, @Nullable HederaEvmAccount relayer, long allowanceUsed) {} +public record GasCharges(long intrinsicGas, long relayerAllowanceUsed) {} diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/operations/CustomCreate2Operation.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/operations/CustomCreate2Operation.java index 95f6c60ffddb..10f4fc2aaaa1 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/operations/CustomCreate2Operation.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/operations/CustomCreate2Operation.java @@ -60,7 +60,7 @@ protected long cost(@NonNull final MessageFrame frame) { if (updater.isHollowAccount(alias) && !featureFlags.isImplicitCreationEnabled(frame)) { return null; } - updater.setupCreate2(frame.getRecipientAddress(), alias); + updater.setupAliasedCreate(frame.getRecipientAddress(), alias); frame.warmUpAddress(alias); return alias; } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/utils/FrameBuilder.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/utils/FrameBuilder.java new file mode 100644 index 000000000000..c487e1275583 --- /dev/null +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/utils/FrameBuilder.java @@ -0,0 +1,135 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.contract.impl.exec.utils; + +import static com.hedera.node.app.service.contract.impl.exec.utils.FrameUtils.CONFIG_CONTEXT_VARIABLE; +import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.asLongZeroAddress; + +import com.hedera.node.app.service.contract.impl.hevm.HederaEvmContext; +import com.hedera.node.app.service.contract.impl.hevm.HederaEvmTransaction; +import com.hedera.node.app.service.contract.impl.hevm.HederaWorldUpdater; +import com.hedera.node.config.data.LedgerConfig; +import com.swirlds.config.api.Configuration; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayDeque; +import java.util.Map; +import javax.inject.Inject; +import javax.inject.Singleton; +import org.apache.tuweni.bytes.Bytes; +import org.hyperledger.besu.datatypes.Address; +import org.hyperledger.besu.datatypes.Wei; +import org.hyperledger.besu.evm.Code; +import org.hyperledger.besu.evm.code.CodeFactory; +import org.hyperledger.besu.evm.frame.MessageFrame; + +/** + * Infrastructure component that builds the initial {@link MessageFrame} instance for a transaction. + * This includes tasks like, + *

    + *
  1. Putting the {@link Configuration} in the frame context variables.
  2. + *
  3. Setting the gas price and block values from the {@link HederaEvmContext}.
  4. + *
  5. Setting input data and code based on the message call type.
  6. + *
+ */ +@Singleton +public class FrameBuilder { + private static final int MAX_STACK_SIZE = 1024; + + @Inject + public FrameBuilder() { + // Dagger2 + } + + /** + * Builds the initial {@link MessageFrame} instance for a transaction. + * + * @param transaction the transaction + * @param worldUpdater the world updater for the transaction + * @param context the Hedera EVM context (gas price, block values, etc.) + * @param config the active Hedera configuration + * @param from the sender of the transaction + * @param to the recipient of the transaction + * @param intrinsicGas the intrinsic gas cost, needed to calculate remaining gas + * @return the initial frame + */ + @SuppressWarnings("java:S107") + public MessageFrame buildInitialFrameWith( + @NonNull final HederaEvmTransaction transaction, + @NonNull final HederaWorldUpdater worldUpdater, + @NonNull final HederaEvmContext context, + @NonNull final Configuration config, + @NonNull final Address from, + @NonNull final Address to, + final long intrinsicGas) { + final var value = transaction.weiValue(); + final var ledgerConfig = config.getConfigData(LedgerConfig.class); + final var nominalCoinbase = asLongZeroAddress(ledgerConfig.fundingAccount()); + final var builder = MessageFrame.builder() + .messageFrameStack(new ArrayDeque<>()) + .maxStackSize(MAX_STACK_SIZE) + .worldUpdater(worldUpdater.updater()) + .initialGas(transaction.gasAvailable(intrinsicGas)) + .originator(from) + .gasPrice(Wei.of(context.gasPrice())) + .sender(from) + .value(value) + .apparentValue(value) + .blockValues(context.blockValuesOf(transaction.gasLimit())) + .depth(0) + .completer(unused -> {}) + .isStatic(context.staticCall()) + .miningBeneficiary(nominalCoinbase) + .blockHashLookup(context.blocks()::blockHashOf) + .contextVariables(Map.of(CONFIG_CONTEXT_VARIABLE, config)); + if (transaction.isCreate()) { + return finishedAsCreate(to, builder, transaction); + } else { + return finishedAsCall(to, builder, context, transaction); + } + } + + private MessageFrame finishedAsCreate( + @NonNull final Address to, + @NonNull final MessageFrame.Builder builder, + @NonNull final HederaEvmTransaction transaction) { + return builder.type(MessageFrame.Type.CONTRACT_CREATION) + .address(to) + .contract(to) + .inputData(Bytes.EMPTY) + .code(CodeFactory.createCode(transaction.evmPayload(), 0, false)) + .build(); + } + + private MessageFrame finishedAsCall( + @NonNull final Address to, + @NonNull final MessageFrame.Builder builder, + @NonNull final HederaEvmContext context, + @NonNull final HederaEvmTransaction transaction) { + final Code code; + if (transaction.permitsMissingContract()) { + code = context.loadIfPresent(to); + } else { + code = context.load(to); + } + return builder.type(MessageFrame.Type.MESSAGE_CALL) + .address(to) + .contract(to) + .inputData(transaction.evmPayload()) + .code(code) + .build(); + } +} diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/utils/FrameRunner.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/utils/FrameRunner.java new file mode 100644 index 000000000000..2c20f6579d5a --- /dev/null +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/utils/FrameRunner.java @@ -0,0 +1,127 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.contract.impl.exec.utils; + +import static com.hedera.node.app.service.contract.impl.exec.utils.FrameUtils.contractsConfigOf; +import static com.hedera.node.app.service.contract.impl.hevm.HederaEvmTransactionResult.failureFrom; +import static com.hedera.node.app.service.contract.impl.hevm.HederaEvmTransactionResult.successFrom; +import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.*; +import static java.util.Objects.requireNonNull; +import static org.hyperledger.besu.evm.frame.MessageFrame.State.COMPLETED_SUCCESS; + +import com.hedera.node.app.service.contract.impl.exec.gas.CustomGasCalculator; +import com.hedera.node.app.service.contract.impl.exec.processors.CustomMessageCallProcessor; +import com.hedera.node.app.service.contract.impl.hevm.HederaEvmTransactionResult; +import com.hedera.node.app.service.contract.impl.hevm.HederaTracer; +import com.hedera.node.app.service.contract.impl.state.ProxyWorldUpdater; +import edu.umd.cs.findbugs.annotations.NonNull; +import javax.inject.Inject; +import javax.inject.Singleton; +import org.hyperledger.besu.evm.frame.MessageFrame; +import org.hyperledger.besu.evm.processor.ContractCreationProcessor; + +/** + * An infrastructure service that runs the EVM transaction beginning with the given {@link MessageFrame} + * to completion and returns the result. + */ +@Singleton +public class FrameRunner { + private final CustomGasCalculator gasCalculator; + + @Inject + public FrameRunner(@NonNull final CustomGasCalculator gasCalculator) { + this.gasCalculator = gasCalculator; + } + + /** + * Runs the EVM transaction implied by the given {@link MessageFrame} to completion using the provided + * {@link org.hyperledger.besu.evm.processor.AbstractMessageProcessor} implementations, and returns the result. + * + * @param gasLimit the gas limit for the transaction + * @param frame the frame to run + * @param tracer the tracer to use + * @param messageCall the message call processor to use + * @param contractCreation the contract creation processor to use + * @return the result of the transaction + */ + public HederaEvmTransactionResult runToCompletion( + final long gasLimit, + @NonNull final MessageFrame frame, + @NonNull final HederaTracer tracer, + @NonNull final CustomMessageCallProcessor messageCall, + @NonNull final ContractCreationProcessor contractCreation) { + requireAllNonNull(frame, tracer, messageCall, contractCreation); + + // We compute the Hedera id up front because the called contract could + // selfdestruct, preventing us from looking up its contract id later on + final var recipientAddress = frame.getRecipientAddress(); + final var recipientEvmAddress = asEvmContractId(recipientAddress); + final var recipientId = isLongZero(recipientAddress) + ? asNumberedContractId(recipientAddress) + : ((ProxyWorldUpdater) frame.getWorldUpdater()).getHederaContractId(recipientAddress); + + // Now run the transaction implied by the frame + tracer.initProcess(frame); + final var stack = frame.getMessageFrameStack(); + stack.addFirst(frame); + while (!stack.isEmpty()) { + runToCompletion(stack.peekFirst(), tracer, messageCall, contractCreation); + } + tracer.finalizeProcess(frame); + + // And package up its result + final var gasUsed = effectiveGasUsed(gasLimit, frame); + if (frame.getState() == COMPLETED_SUCCESS) { + return successFrom(gasUsed, recipientId, recipientEvmAddress, frame); + } else { + return failureFrom(gasUsed, frame); + } + } + + private void requireAllNonNull( + @NonNull final MessageFrame frame, + @NonNull final HederaTracer tracer, + @NonNull final CustomMessageCallProcessor messageCall, + @NonNull final ContractCreationProcessor contractCreation) { + requireNonNull(frame); + requireNonNull(tracer); + requireNonNull(messageCall); + requireNonNull(contractCreation); + } + + private void runToCompletion( + @NonNull final MessageFrame frame, + @NonNull final HederaTracer tracer, + @NonNull final CustomMessageCallProcessor messageCall, + @NonNull final ContractCreationProcessor contractCreation) { + final var executor = + switch (frame.getType()) { + case MESSAGE_CALL -> messageCall; + case CONTRACT_CREATION -> contractCreation; + }; + executor.process(frame, tracer); + } + + private long effectiveGasUsed(final long gasLimit, @NonNull final MessageFrame frame) { + var nominalUsed = gasLimit - frame.getRemainingGas(); + final var selfDestructRefund = gasCalculator.getSelfDestructRefundAmount() + * Math.min(frame.getSelfDestructs().size(), nominalUsed / gasCalculator.getMaxRefundQuotient()); + nominalUsed -= (selfDestructRefund + frame.getGasRefund()); + final var maxRefundPercent = contractsConfigOf(frame).maxRefundPercentOfGasLimit(); + return Math.max(nominalUsed, gasLimit - gasLimit * maxRefundPercent / 100); + } +} diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/utils/FrameUtils.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/utils/FrameUtils.java index 5de18850eefe..6790836ab222 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/utils/FrameUtils.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/utils/FrameUtils.java @@ -16,22 +16,26 @@ package com.hedera.node.app.service.contract.impl.exec.utils; -import static com.hedera.node.app.service.contract.impl.exec.TransactionProcessor.CONFIG_CONTEXT_VARIABLE; import static java.util.Objects.requireNonNull; +import com.hedera.node.config.data.ContractsConfig; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.function.Predicate; import org.hyperledger.besu.evm.frame.MessageFrame; public class FrameUtils { + public static final String CONFIG_CONTEXT_VARIABLE = "contractsConfig"; + + private FrameUtils() { + throw new UnsupportedOperationException("Utility Class"); + } + public static @NonNull Configuration configOf(@NonNull final MessageFrame frame) { return requireNonNull(frame.getContextVariable(CONFIG_CONTEXT_VARIABLE)); } - public static boolean testConfigOf( - @NonNull final MessageFrame frame, @NonNull final Predicate test) { - return test.test(configOf(frame)); + public static @NonNull ContractsConfig contractsConfigOf(@NonNull final MessageFrame frame) { + return configOf(frame).getConfigData(ContractsConfig.class); } public static boolean isDelegateCall(@NonNull final MessageFrame frame) { diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/v030/V030Module.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/v030/V030Module.java index 6e56eeb26881..f9503d7a29fe 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/v030/V030Module.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/v030/V030Module.java @@ -32,6 +32,8 @@ import com.hedera.node.app.service.contract.impl.exec.operations.CustomCreateOperation; import com.hedera.node.app.service.contract.impl.exec.processors.CustomContractCreationProcessor; import com.hedera.node.app.service.contract.impl.exec.processors.CustomMessageCallProcessor; +import com.hedera.node.app.service.contract.impl.exec.utils.FrameBuilder; +import com.hedera.node.app.service.contract.impl.exec.utils.FrameRunner; import dagger.Binds; import dagger.Module; import dagger.Provides; @@ -67,10 +69,13 @@ public interface V030Module { @Singleton @ServicesV030 static TransactionProcessor provideTransactionProcessor( + @NonNull final FrameBuilder frameBuilder, + @NonNull final FrameRunner frameRunner, @ServicesV030 @NonNull final CustomMessageCallProcessor messageCallProcessor, @ServicesV030 @NonNull final ContractCreationProcessor contractCreationProcessor, @NonNull final CustomGasCharging gasCharging) { - return new TransactionProcessor(gasCharging, messageCallProcessor, contractCreationProcessor); + return new TransactionProcessor( + frameBuilder, frameRunner, gasCharging, messageCallProcessor, contractCreationProcessor); } @Provides diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/v034/V034Module.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/v034/V034Module.java index 4060b3f31445..eeeb09da06b1 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/v034/V034Module.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/v034/V034Module.java @@ -32,6 +32,8 @@ import com.hedera.node.app.service.contract.impl.exec.operations.CustomCreateOperation; import com.hedera.node.app.service.contract.impl.exec.processors.CustomContractCreationProcessor; import com.hedera.node.app.service.contract.impl.exec.processors.CustomMessageCallProcessor; +import com.hedera.node.app.service.contract.impl.exec.utils.FrameBuilder; +import com.hedera.node.app.service.contract.impl.exec.utils.FrameRunner; import com.hedera.node.app.service.contract.impl.exec.v030.Version030AddressChecks; import dagger.Binds; import dagger.Module; @@ -68,10 +70,13 @@ public interface V034Module { @Singleton @ServicesV034 static TransactionProcessor provideTransactionProcessor( + @NonNull final FrameBuilder frameBuilder, + @NonNull final FrameRunner frameRunner, @ServicesV034 @NonNull final CustomMessageCallProcessor messageCallProcessor, @ServicesV034 @NonNull final ContractCreationProcessor contractCreationProcessor, @NonNull final CustomGasCharging gasCharging) { - return new TransactionProcessor(gasCharging, messageCallProcessor, contractCreationProcessor); + return new TransactionProcessor( + frameBuilder, frameRunner, gasCharging, messageCallProcessor, contractCreationProcessor); } @Provides diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/v038/V038Module.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/v038/V038Module.java index ffe94df59725..c87caf4695cc 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/v038/V038Module.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/v038/V038Module.java @@ -32,6 +32,8 @@ import com.hedera.node.app.service.contract.impl.exec.operations.CustomCreateOperation; import com.hedera.node.app.service.contract.impl.exec.processors.CustomContractCreationProcessor; import com.hedera.node.app.service.contract.impl.exec.processors.CustomMessageCallProcessor; +import com.hedera.node.app.service.contract.impl.exec.utils.FrameBuilder; +import com.hedera.node.app.service.contract.impl.exec.utils.FrameRunner; import com.hedera.node.app.service.contract.impl.exec.v034.Version034FeatureFlags; import dagger.Binds; import dagger.Module; @@ -69,10 +71,13 @@ public interface V038Module { @Singleton @ServicesV038 static TransactionProcessor provideTransactionProcessor( + @NonNull final FrameBuilder frameBuilder, + @NonNull final FrameRunner frameRunner, @ServicesV038 @NonNull final CustomMessageCallProcessor messageCallProcessor, @ServicesV038 @NonNull final ContractCreationProcessor contractCreationProcessor, @NonNull final CustomGasCharging gasCharging) { - return new TransactionProcessor(gasCharging, messageCallProcessor, contractCreationProcessor); + return new TransactionProcessor( + frameBuilder, frameRunner, gasCharging, messageCallProcessor, contractCreationProcessor); } @Provides diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/config/GenesisUsage.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmCode.java similarity index 50% rename from hedera-node/hedera-app/src/main/java/com/hedera/node/app/config/GenesisUsage.java rename to hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmCode.java index 610ad7012474..ab9e38c4a670 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/config/GenesisUsage.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmCode.java @@ -14,24 +14,20 @@ * limitations under the License. */ -package com.hedera.node.app.config; +package com.hedera.node.app.service.contract.impl.hevm; -import static java.lang.annotation.RetentionPolicy.RUNTIME; - -import java.lang.annotation.Documented; -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.Target; -import javax.inject.Qualifier; +import edu.umd.cs.findbugs.annotations.NonNull; +import org.hyperledger.besu.datatypes.Address; +import org.hyperledger.besu.evm.Code; +import org.hyperledger.besu.evm.account.Account; /** - * Qualifier for Dagger to define the boolean value of whether the node is a genesis node. + * TODO - not sure this makes sense, why can't we just call {@link Account#getCode()} directly? * - * @deprecated we need to define the correct workflow to define that genesis is used + * (Answer: we probably can. This class should be deleted in upcoming PR.) */ -@Deprecated(forRemoval = true) -@Target({ElementType.METHOD, ElementType.PARAMETER}) -@Qualifier -@Retention(RUNTIME) -@Documented -public @interface GenesisUsage {} +public interface HederaEvmCode { + Code load(@NonNull Address contract); + + Code loadIfPresent(@NonNull Address contract); +} diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmContext.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmContext.java index 3370ee82f5ad..53d279233ee2 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmContext.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmContext.java @@ -16,4 +16,22 @@ package com.hedera.node.app.service.contract.impl.hevm; -public record HederaEvmContext(long gasPrice, boolean staticCall, HederaEvmBlocks blocks) {} +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import org.hyperledger.besu.datatypes.Address; +import org.hyperledger.besu.evm.Code; +import org.hyperledger.besu.evm.frame.BlockValues; + +public record HederaEvmContext(long gasPrice, boolean staticCall, HederaEvmCode code, HederaEvmBlocks blocks) { + public Code load(@NonNull final Address contract) { + return code.load(Objects.requireNonNull(contract)); + } + + public Code loadIfPresent(@NonNull final Address contract) { + return code.loadIfPresent(Objects.requireNonNull(contract)); + } + + public BlockValues blockValuesOf(final long gasLimit) { + return blocks.blockValuesOf(gasLimit); + } +} diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmTransaction.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmTransaction.java index e1a72f40b044..7a2a383bdbaf 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmTransaction.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmTransaction.java @@ -16,19 +16,22 @@ package com.hedera.node.app.service.contract.impl.hevm; +import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.pbjToTuweniBytes; + import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.ContractID; import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Objects; +import org.hyperledger.besu.datatypes.Wei; public record HederaEvmTransaction( @NonNull AccountID senderId, @Nullable AccountID relayerId, @Nullable ContractID contractId, long nonce, - @NonNull Bytes callData, + @NonNull Bytes payload, @Nullable Bytes chainId, long value, long gasLimit, @@ -42,6 +45,10 @@ public boolean isEthereumTransaction() { return relayerId != null; } + public boolean permitsMissingContract() { + return isEthereumTransaction() && hasValue(); + } + public @NonNull ContractID contractIdOrThrow() { return Objects.requireNonNull(contractId); } @@ -50,6 +57,18 @@ public boolean hasValue() { return value > 0; } + public org.apache.tuweni.bytes.Bytes evmPayload() { + return pbjToTuweniBytes(payload); + } + + public Wei weiValue() { + return Wei.of(value); + } + + public long gasAvailable(final long intrinsicGas) { + return gasLimit - intrinsicGas; + } + public long upfrontCostGiven(final long gasPrice) { final var gasCost = gasCostGiven(gasPrice); return gasCost == Long.MAX_VALUE ? Long.MAX_VALUE : gasCost + value; diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmTransactionProcessor.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmTransactionProcessor.java index 5bcf59fe00e1..ee1559386553 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmTransactionProcessor.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmTransactionProcessor.java @@ -24,7 +24,6 @@ import java.util.Map; import javax.inject.Inject; import javax.inject.Singleton; -import org.hyperledger.besu.evm.tracing.OperationTracer; @Singleton public class HederaEvmTransactionProcessor { @@ -41,7 +40,7 @@ public HederaEvmTransactionResult process( @NonNull final HederaWorldUpdater worldUpdater, @NonNull final HederaEvmContext context, @NonNull final HederaEvmVersion version, - @NonNull final OperationTracer tracer, + @NonNull final HederaTracer tracer, @NonNull final Configuration config) { return requireNonNull(transactionProcessors.get(version)) .processTransaction( diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmTransactionResult.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmTransactionResult.java index 91e04c034962..947269b97d4a 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmTransactionResult.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaEvmTransactionResult.java @@ -16,21 +16,30 @@ package com.hedera.node.app.service.contract.impl.hevm; +import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.pbjLogsFrom; +import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.tuweniToPbjBytes; import static java.util.Objects.requireNonNull; +import com.hedera.hapi.node.base.ContractID; import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.hapi.node.contract.ContractLoginfo; +import com.hedera.node.app.service.contract.impl.utils.ConversionUtils; import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collections; import java.util.List; +import org.hyperledger.besu.datatypes.Wei; +import org.hyperledger.besu.evm.frame.MessageFrame; +import org.hyperledger.besu.evm.log.Log; public record HederaEvmTransactionResult( long gasUsed, long gasPrice, - @Nullable Bytes recipient, + @Nullable ContractID recipientId, + @Nullable ContractID recipientEvmAddress, @NonNull Bytes output, - @Nullable Bytes haltReason, + @Nullable String haltReason, @Nullable ResponseCodeEnum abortReason, @Nullable Bytes revertReason, @NonNull List logs) { @@ -47,6 +56,67 @@ public record HederaEvmTransactionResult( * @return the result */ public static HederaEvmTransactionResult abortFor(@NonNull final ResponseCodeEnum reason) { - return new HederaEvmTransactionResult(0, 0, null, Bytes.EMPTY, null, reason, null, List.of()); + return new HederaEvmTransactionResult( + 0, 0, null, null, Bytes.EMPTY, null, reason, null, Collections.emptyList()); + } + + /** + * Create a result for a transaction that succeeded. + * + * @param gasUsed the gas used by the transaction + * @return the result + */ + public static HederaEvmTransactionResult successFrom( + final long gasUsed, + @NonNull final ContractID recipientId, + @NonNull final ContractID recipientEvmAddress, + @NonNull final MessageFrame frame) { + requireNonNull(frame); + return successFrom( + gasUsed, frame.getGasPrice(), recipientId, recipientEvmAddress, frame.getOutputData(), frame.getLogs()); + } + + public static HederaEvmTransactionResult successFrom( + final long gasUsed, + @NonNull final Wei gasPrice, + @NonNull final ContractID recipientId, + @NonNull final ContractID recipientEvmAddress, + @NonNull final org.apache.tuweni.bytes.Bytes output, + @NonNull final List logs) { + return new HederaEvmTransactionResult( + gasUsed, + requireNonNull(gasPrice).toLong(), + requireNonNull(recipientId), + requireNonNull(recipientEvmAddress), + tuweniToPbjBytes(requireNonNull(output)), + null, + null, + null, + pbjLogsFrom(requireNonNull(logs))); + } + + /** + * Create a result for a transaction that failed. + * + * @param gasUsed the gas used by the transaction + * @return the result + */ + public static HederaEvmTransactionResult failureFrom(final long gasUsed, @NonNull final MessageFrame frame) { + requireNonNull(frame); + + return new HederaEvmTransactionResult( + gasUsed, + frame.getGasPrice().toLong(), + null, + null, + Bytes.EMPTY, + frame.getExceptionalHaltReason().map(Object::toString).orElse(null), + null, + frame.getRevertReason().map(ConversionUtils::tuweniToPbjBytes).orElse(null), + Collections.emptyList()); + } + + public boolean isSuccess() { + return abortReason == null && revertReason == null && haltReason == null; } } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaTracer.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaTracer.java new file mode 100644 index 000000000000..3de204b19201 --- /dev/null +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaTracer.java @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.contract.impl.hevm; + +import edu.umd.cs.findbugs.annotations.NonNull; +import org.hyperledger.besu.evm.frame.MessageFrame; +import org.hyperledger.besu.evm.tracing.OperationTracer; + +/** + * Placeholder for future Hedera-specific tracing implementation. + */ +public interface HederaTracer extends OperationTracer { + void initProcess(@NonNull MessageFrame frame); + + void finalizeProcess(@NonNull MessageFrame frame); +} diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaWorldUpdater.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaWorldUpdater.java index 2fd04831b257..4d46af605d4b 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaWorldUpdater.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/hevm/HederaWorldUpdater.java @@ -54,6 +54,16 @@ public interface HederaWorldUpdater extends WorldUpdater { @Nullable HederaEvmAccount getHederaAccount(@NonNull ContractID contractId); + /** + * Returns the {@code 0.0.X} Hedera contract id for the given address, including when + * the address is pending creation. + * + * @param address the address to get the id for + * @return the id of the account at the given address + * @throws IllegalArgumentException if the address has no corresponding contract id + */ + ContractID getHederaContractId(@NonNull Address address); + /** * Collects the given fee from the given account. The caller should have already * verified that the account exists and has sufficient balance to pay the fee, so @@ -126,16 +136,17 @@ Optional tryTransferFromContract( /** * Given the possibly zero address of the recipient of a {@code CONTRACT_CREATION} message, - * and the EIP-1014 address computed by an in-progress {@code CREATE2} operation, sets up the - * {@link PendingCreation} this {@link ProxyWorldUpdater} will use to complete the creation of - * the new account in {@link ProxyWorldUpdater#createAccount(Address, long, Wei)}. + * and either the canonical {@code CREATE1} address, or the EIP-1014 address computed by an + * in-progress {@code CREATE2} operation, sets up the {@link PendingCreation} this + * {@link ProxyWorldUpdater} will use to complete the creation of the new account in + * {@link ProxyWorldUpdater#createAccount(Address, long, Wei)}. * *

Does not return anything, as the {@code CREATE2} address is already known. * * @param receiver the address of the recipient of a {@code CONTRACT_CREATION} message, zero if a top-level message * @param alias the EIP-1014 address computed by an in-progress {@code CREATE2} operation */ - void setupCreate2(@NonNull Address receiver, @NonNull Address alias); + void setupAliasedCreate(@NonNull Address receiver, @NonNull Address alias); /** * Returns whether this address refers to a hollow account (i.e. a lazy-created account that diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/HederaEvmAccount.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/HederaEvmAccount.java index 7701dd7d4b36..54e9d9192537 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/HederaEvmAccount.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/HederaEvmAccount.java @@ -17,6 +17,7 @@ package com.hedera.node.app.service.contract.impl.state; import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.ContractID; import edu.umd.cs.findbugs.annotations.NonNull; import org.hyperledger.besu.evm.account.MutableAccount; @@ -36,4 +37,12 @@ public interface HederaEvmAccount extends MutableAccount { */ @NonNull AccountID hederaId(); + + /** + * Returns the Hedera contract id for this account. + * + * @return the Hedera contract id, including if the account is a token facade + */ + @NonNull + ContractID hederaContractId(); } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/PendingCreation.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/PendingCreation.java index 5e3b4ff58dff..b7b88c107c91 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/PendingCreation.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/PendingCreation.java @@ -18,6 +18,7 @@ import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.aliasFrom; import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.isLongZero; +import static java.util.Objects.requireNonNull; import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; @@ -25,6 +26,10 @@ import org.hyperledger.besu.datatypes.Address; public record PendingCreation(@NonNull Address address, long number, long parentNumber) { + public PendingCreation { + requireNonNull(address); + } + @Nullable public Bytes aliasIfApplicable() { return isLongZero(address) ? null : aliasFrom(address); diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/ProxyEvmAccount.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/ProxyEvmAccount.java index d07dcaaea708..79cd4f7f832b 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/ProxyEvmAccount.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/ProxyEvmAccount.java @@ -17,6 +17,7 @@ package com.hedera.node.app.service.contract.impl.state; import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.ContractID; import edu.umd.cs.findbugs.annotations.NonNull; import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.units.bigints.UInt256; @@ -120,6 +121,11 @@ public boolean isTokenFacade() { return AccountID.newBuilder().accountNum(number).build(); } + @Override + public @NonNull ContractID hederaContractId() { + return ContractID.newBuilder().contractNum(number).build(); + } + /** * Returns the number of treasury titles held by this account. * diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/ProxyWorldUpdater.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/ProxyWorldUpdater.java index d8bcb8035a60..0b8ea9df0473 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/ProxyWorldUpdater.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/ProxyWorldUpdater.java @@ -120,6 +120,19 @@ public HederaEvmAccount getHederaAccount(@NonNull AccountID accountId) { return address == null ? null : (HederaEvmAccount) get(address); } + @Override + public ContractID getHederaContractId(@NonNull final Address address) { + // As an important special case, return the pending creation's contract ID if its address matches + if (pendingCreation != null && pendingCreation.address().equals(requireNonNull(address))) { + return ContractID.newBuilder().contractNum(pendingCreation.number()).build(); + } + final HederaEvmAccount account = (HederaEvmAccount) get(address); + if (account == null) { + throw new IllegalArgumentException("No contract pending or extant at " + address); + } + return account.hederaContractId(); + } + @Nullable @Override public HederaEvmAccount getHederaAccount(@NonNull ContractID contractId) { @@ -188,7 +201,7 @@ public Address setupCreate(@NonNull final Address receiver) { * {@inheritDoc} */ @Override - public void setupCreate2(@NonNull final Address receiver, @NonNull final Address alias) { + public void setupAliasedCreate(@NonNull final Address receiver, @NonNull final Address alias) { setupPendingCreation(receiver, alias); } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/TokenEvmAccount.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/TokenEvmAccount.java index 82117447158a..c0848943ec1c 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/TokenEvmAccount.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/state/TokenEvmAccount.java @@ -16,9 +16,11 @@ package com.hedera.node.app.service.contract.impl.state; +import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.numberOfLongZero; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.ContractID; import edu.umd.cs.findbugs.annotations.NonNull; import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.units.bigints.UInt256; @@ -122,7 +124,12 @@ public boolean isTokenFacade() { } @Override - public AccountID hederaId() { + public @NonNull AccountID hederaId() { throw new IllegalStateException("Token facade has no usable Hedera id"); } + + @Override + public @NonNull ContractID hederaContractId() { + return ContractID.newBuilder().contractNum(numberOfLongZero(address)).build(); + } } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/utils/ConversionUtils.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/utils/ConversionUtils.java index 0949da9ee3cf..5b29a69dba1c 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/utils/ConversionUtils.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/utils/ConversionUtils.java @@ -18,14 +18,20 @@ import static java.util.Objects.requireNonNull; +import com.hedera.hapi.node.base.ContractID; +import com.hedera.hapi.node.contract.ContractLoginfo; import com.hedera.node.app.spi.meta.bni.Dispatch; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.ArrayList; +import java.util.List; import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.bytes.Bytes32; import org.apache.tuweni.units.bigints.UInt256; import org.hyperledger.besu.datatypes.Address; import org.hyperledger.besu.datatypes.Hash; +import org.hyperledger.besu.evm.log.Log; +import org.hyperledger.besu.evm.log.LogsBloomFilter; public class ConversionUtils { public static final long EVM_ADDRESS_LENGTH_AS_LONG = 20L; @@ -33,6 +39,44 @@ public class ConversionUtils { public static final int EVM_ADDRESS_LENGTH_AS_INT = 20; public static final int NUM_LONG_ZEROS = 12; + private ConversionUtils() { + throw new UnsupportedOperationException("Utility Class"); + } + + /** + * Given a list of Besu {@link Log}s, converts them to a list of PBJ {@link ContractLoginfo}. + * + * @param logs the Besu {@link Log}s + * @return the PBJ {@link ContractLoginfo}s + */ + public static List pbjLogsFrom(@NonNull final List logs) { + final List pbjLogs = new ArrayList<>(); + for (final var log : logs) { + pbjLogs.add(pbjLogFrom(log)); + } + return pbjLogs; + } + + /** + * Given a Besu {@link Log}, converts it a PBJ {@link ContractLoginfo}. + * + * @param log the Besu {@link Log} + * @return the PBJ {@link ContractLoginfo} + */ + public static ContractLoginfo pbjLogFrom(@NonNull final Log log) { + final var loggerNumber = numberOfLongZero(log.getLogger()); + final List loggedTopics = new ArrayList<>(); + for (final var topic : log.getTopics()) { + loggedTopics.add(tuweniToPbjBytes(topic)); + } + return ContractLoginfo.newBuilder() + .contractID(ContractID.newBuilder().contractNum(loggerNumber)) + .data(tuweniToPbjBytes(log.getData())) + .topic(loggedTopics) + .bloom(bloomFor(log)) + .build(); + } + /** * Given an EVM address (possibly long-zero), returns the number of the corresponding Hedera entity * within the given {@link Dispatch}; or {@link #MISSING_ENTITY_NUMBER} if the address is not long-zero @@ -111,6 +155,14 @@ public static Address asLongZeroAddress(final long number) { return com.hedera.pbj.runtime.io.buffer.Bytes.wrap(requireNonNull(bytes).toArrayUnsafe()); } + public static ContractID asEvmContractId(@NonNull final Address address) { + return ContractID.newBuilder().evmAddress(tuweniToPbjBytes(address)).build(); + } + + public static ContractID asNumberedContractId(@NonNull final Address address) { + return ContractID.newBuilder().contractNum(numberOfLongZero(address)).build(); + } + /** * Converts a PBJ bytes to Tuweni bytes. * @@ -222,4 +274,9 @@ private static long longFrom( | (b7 & 0xFFL) << 8 | (b8 & 0xFFL); } + + private static com.hedera.pbj.runtime.io.buffer.Bytes bloomFor(@NonNull final Log log) { + return com.hedera.pbj.runtime.io.buffer.Bytes.wrap( + LogsBloomFilter.builder().insertLog(log).build().toArray()); + } } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/module-info.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/module-info.java index c2c501a1d58a..4a2afffc8a6b 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/module-info.java @@ -27,16 +27,20 @@ exports com.hedera.node.app.service.contract.impl.state to com.hedera.node.app.service.contract.impl.test, com.hedera.node.app; - exports com.hedera.node.app.service.contract.impl.utils to + + opens com.hedera.node.app.service.contract.impl.utils to com.hedera.node.app.service.contract.impl.test; + exports com.hedera.node.app.service.contract.impl.infra to com.hedera.node.app.service.contract.impl.test; exports com.hedera.node.app.service.contract.impl.exec.gas to com.hedera.node.app.service.contract.impl.test; exports com.hedera.node.app.service.contract.impl.exec.v030 to com.hedera.node.app.service.contract.impl.test; - exports com.hedera.node.app.service.contract.impl.exec.utils to + + opens com.hedera.node.app.service.contract.impl.exec.utils to com.hedera.node.app.service.contract.impl.test; + exports com.hedera.node.app.service.contract.impl.exec.failure to com.hedera.node.app.service.contract.impl.test; exports com.hedera.node.app.service.contract.impl.exec; @@ -48,4 +52,6 @@ com.hedera.node.app.service.contract.impl.test; exports com.hedera.node.app.service.contract.impl.exec.v038 to com.hedera.node.app.service.contract.impl.test; + exports com.hedera.node.app.service.contract.impl.utils; + exports com.hedera.node.app.service.contract.impl.exec.utils; } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/TestHelpers.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/TestHelpers.java index f4f5eb645a38..3c2b06110d66 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/TestHelpers.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/TestHelpers.java @@ -16,48 +16,80 @@ package com.hedera.node.app.service.contract.impl.test; -import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.tuweniToPbjBytes; +import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.*; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.ContractID; import com.hedera.hapi.node.base.ResponseCodeEnum; -import com.hedera.node.app.service.contract.impl.hevm.HederaEvmBlocks; -import com.hedera.node.app.service.contract.impl.hevm.HederaEvmContext; -import com.hedera.node.app.service.contract.impl.hevm.HederaEvmTransaction; +import com.hedera.node.app.service.contract.impl.exec.gas.GasCharges; +import com.hedera.node.app.service.contract.impl.hevm.*; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.math.BigInteger; +import java.util.List; import java.util.Objects; import org.hyperledger.besu.datatypes.Address; +import org.hyperledger.besu.datatypes.Wei; +import org.hyperledger.besu.evm.Code; +import org.hyperledger.besu.evm.code.CodeFactory; +import org.hyperledger.besu.evm.log.Log; +import org.hyperledger.besu.evm.log.LogTopic; import org.hyperledger.besu.evm.operation.Operation; public class TestHelpers { + public static int HEDERA_MAX_REFUND_PERCENTAGE = 20; public static long REQUIRED_GAS = 123L; public static long NONCE = 678; public static long VALUE = 999_999; public static long INTRINSIC_GAS = 12_345; public static long GAS_LIMIT = 1_000_000; + public static long DEFAULT_COINBASE = 98; + public static long SOME_BLOCK_NO = 321321; public static long USER_OFFERED_GAS_PRICE = 666; public static long NETWORK_GAS_PRICE = 777; + public static long BESU_MAX_REFUND_QUOTIENT = 2; public static long MAX_GAS_ALLOWANCE = 666_666_666; public static Bytes CALL_DATA = Bytes.wrap(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9}); + public static Bytes OUTPUT_DATA = Bytes.wrap(new byte[] {9, 8, 7, 6, 5, 4, 3, 2, 1}); + public static Bytes TOPIC = Bytes.wrap(new byte[] {11, 21, 31, 41, 51, 61, 71, 81, 91}); public static Bytes MAINNET_CHAIN_ID = Bytes.fromHex("0127"); public static AccountID SENDER_ID = AccountID.newBuilder().accountNum(1234).build(); public static AccountID RELAYER_ID = AccountID.newBuilder().accountNum(2345).build(); public static ContractID CALLED_CONTRACT_ID = ContractID.newBuilder().contractNum(666).build(); + public static ContractID INVALID_CONTRACT_ADDRESS = + ContractID.newBuilder().evmAddress(Bytes.wrap("abcdefg")).build(); public static Address SYSTEM_ADDRESS = Address.fromHexString(BigInteger.valueOf(750).toString(16)); public static Address HTS_PRECOMPILE_ADDRESS = Address.fromHexString("0x167"); public static Address NON_SYSTEM_LONG_ZERO_ADDRESS = Address.fromHexString("0x1234576890"); + public static org.apache.tuweni.bytes.Bytes SOME_REVERT_REASON = + org.apache.tuweni.bytes.Bytes.wrap("I prefer not to".getBytes()); + public static ContractID NON_SYSTEM_CONTRACT_ID = ContractID.newBuilder() + .contractNum(numberOfLongZero(NON_SYSTEM_LONG_ZERO_ADDRESS)) + .build(); public static Address EIP_1014_ADDRESS = Address.fromHexString("0x89abcdef89abcdef89abcdef89abcdef89abcdef"); public static ContractID CALLED_CONTRACT_EVM_ADDRESS = ContractID.newBuilder() .evmAddress(tuweniToPbjBytes(EIP_1014_ADDRESS)) .build(); + public static Code CONTRACT_CODE = CodeFactory.createCode(pbjToTuweniBytes(CALL_DATA), 0, false); + public static Log BESU_LOG = new Log( + NON_SYSTEM_LONG_ZERO_ADDRESS, + pbjToTuweniBytes(TestHelpers.CALL_DATA), + List.of(LogTopic.of(pbjToTuweniBytes(TestHelpers.TOPIC)))); + + public static GasCharges CHARGING_RESULT = new GasCharges(INTRINSIC_GAS, MAX_GAS_ALLOWANCE / 2); + public static HederaEvmTransactionResult SUCCESS_RESULT = HederaEvmTransactionResult.successFrom( + GAS_LIMIT / 2, + Wei.of(NETWORK_GAS_PRICE), + CALLED_CONTRACT_ID, + CALLED_CONTRACT_EVM_ADDRESS, + pbjToTuweniBytes(CALL_DATA), + List.of(BESU_LOG)); public static void assertSameResult( final Operation.OperationResult expected, final Operation.OperationResult actual) { @@ -116,13 +148,41 @@ public static HederaEvmTransaction wellKnownHapiCall( maxGasAllowance); } - public static HederaEvmContext wellKnownContextWith(@NonNull final HederaEvmBlocks blocks) { - return new HederaEvmContext(NETWORK_GAS_PRICE, false, blocks); + public static HederaEvmTransaction wellKnownHapiCreate() { + return wellKnownHapiCreate(null, VALUE, GAS_LIMIT, NETWORK_GAS_PRICE, 0); + } + + public static HederaEvmTransaction wellKnownRelayedHapiCreate() { + return wellKnownHapiCreate(RELAYER_ID, VALUE, GAS_LIMIT, USER_OFFERED_GAS_PRICE, MAX_GAS_ALLOWANCE); + } + + private static HederaEvmTransaction wellKnownHapiCreate( + @Nullable final AccountID relayer, + final long value, + final long gasLimit, + final long userGasPrice, + final long maxGasAllowance) { + return new HederaEvmTransaction( + SENDER_ID, + relayer, + null, + NONCE, + CALL_DATA, + MAINNET_CHAIN_ID, + value, + gasLimit, + userGasPrice, + maxGasAllowance); + } + + public static HederaEvmContext wellKnownContextWith( + @NonNull final HederaEvmCode code, @NonNull final HederaEvmBlocks blocks) { + return new HederaEvmContext(NETWORK_GAS_PRICE, false, code, blocks); } public static HederaEvmContext wellKnownContextWith( - @NonNull final HederaEvmBlocks blocks, final boolean staticCall) { - return new HederaEvmContext(NETWORK_GAS_PRICE, staticCall, blocks); + @NonNull final HederaEvmCode code, @NonNull final HederaEvmBlocks blocks, final boolean staticCall) { + return new HederaEvmContext(NETWORK_GAS_PRICE, staticCall, code, blocks); } public static void assertFailsWith(@NonNull final ResponseCodeEnum status, @NonNull final Runnable something) { diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/TransactionProcessorTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/TransactionProcessorTest.java index 3ee10ea8b9a9..840942136957 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/TransactionProcessorTest.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/TransactionProcessorTest.java @@ -19,21 +19,23 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.*; import static com.hedera.node.app.service.contract.impl.test.TestHelpers.*; import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.inOrder; import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.node.app.service.contract.impl.exec.TransactionProcessor; import com.hedera.node.app.service.contract.impl.exec.gas.CustomGasCharging; import com.hedera.node.app.service.contract.impl.exec.processors.CustomMessageCallProcessor; -import com.hedera.node.app.service.contract.impl.hevm.HederaEvmBlocks; -import com.hedera.node.app.service.contract.impl.hevm.HederaEvmTransaction; -import com.hedera.node.app.service.contract.impl.hevm.HederaWorldUpdater; +import com.hedera.node.app.service.contract.impl.exec.utils.FrameBuilder; +import com.hedera.node.app.service.contract.impl.exec.utils.FrameRunner; +import com.hedera.node.app.service.contract.impl.hevm.*; import com.hedera.node.app.service.contract.impl.state.HederaEvmAccount; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; import org.hyperledger.besu.datatypes.Wei; +import org.hyperledger.besu.evm.frame.MessageFrame; import org.hyperledger.besu.evm.processor.ContractCreationProcessor; -import org.hyperledger.besu.evm.tracing.OperationTracer; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -42,12 +44,24 @@ @ExtendWith(MockitoExtension.class) class TransactionProcessorTest { + @Mock + private MessageFrame initialFrame; + + @Mock + private FrameBuilder frameBuilder; + + @Mock + private FrameRunner frameRunner; + @Mock private CustomMessageCallProcessor messageCallProcessor; @Mock private ContractCreationProcessor contractCreationProcessor; + @Mock + private HederaEvmCode code; + @Mock private HederaEvmBlocks blocks; @@ -55,7 +69,7 @@ class TransactionProcessorTest { private HederaWorldUpdater worldUpdater; @Mock - private OperationTracer tracer; + private HederaTracer tracer; @Mock private Configuration config; @@ -64,7 +78,10 @@ class TransactionProcessorTest { private HederaEvmAccount senderAccount; @Mock - private HederaEvmAccount calledAccount; + private HederaEvmAccount relayerAccount; + + @Mock + private HederaEvmAccount receiverAccount; @Mock private CustomGasCharging gasCharging; @@ -73,7 +90,8 @@ class TransactionProcessorTest { @BeforeEach void setUp() { - subject = new TransactionProcessor(gasCharging, messageCallProcessor, contractCreationProcessor); + subject = new TransactionProcessor( + frameBuilder, frameRunner, gasCharging, messageCallProcessor, contractCreationProcessor); } @Test @@ -84,18 +102,99 @@ void abortsOnMissingSender() { @Test void lazyCreationAttemptWithNoValueFailsFast() { givenSenderAccount(); + givenRelayerAccount(); given(messageCallProcessor.isImplicitCreationEnabled(config)).willReturn(true); assertAbortsWith(wellKnownRelayedHapiCall(0), INVALID_CONTRACT_ID); } + @Test + void lazyCreationAttemptWithInvalidAddress() { + givenSenderAccount(); + givenRelayerAccount(); + final var invalidCreation = new HederaEvmTransaction( + SENDER_ID, + RELAYER_ID, + INVALID_CONTRACT_ADDRESS, + NONCE, + CALL_DATA, + MAINNET_CHAIN_ID, + VALUE, + GAS_LIMIT, + USER_OFFERED_GAS_PRICE, + MAX_GAS_ALLOWANCE); + given(messageCallProcessor.isImplicitCreationEnabled(config)).willReturn(true); + assertAbortsWith(invalidCreation, INVALID_CONTRACT_ID); + } + + @Test + void requiresEthTxToHaveNonNullRelayer() { + givenSenderAccount(); + assertAbortsWith(wellKnownRelayedHapiCall(0), INVALID_ACCOUNT_ID); + } + + @Test + void nonLazyCreateCandidateMustHaveReceiver() { + givenSenderAccount(); + givenRelayerAccount(); + assertAbortsWith(wellKnownRelayedHapiCall(0), INVALID_CONTRACT_ID); + } + + @Test + @SuppressWarnings("unchecked") + void ethCallHappyPathAsExpected() { + final var inOrder = inOrder(frameBuilder, frameRunner, gasCharging, messageCallProcessor, senderAccount); + + givenSenderAccount(); + givenRelayerAccount(); + givenReceiverAccount(); + + final var context = wellKnownContextWith(code, blocks); + final var transaction = wellKnownRelayedHapiCall(0); + + given(gasCharging.chargeForGas(senderAccount, relayerAccount, context, worldUpdater, transaction)) + .willReturn(CHARGING_RESULT); + given(senderAccount.getAddress()).willReturn(EIP_1014_ADDRESS); + given(receiverAccount.getAddress()).willReturn(NON_SYSTEM_LONG_ZERO_ADDRESS); + given(frameBuilder.buildInitialFrameWith( + eq(transaction), + eq(worldUpdater), + eq(context), + eq(config), + eq(EIP_1014_ADDRESS), + eq(NON_SYSTEM_LONG_ZERO_ADDRESS), + eq(CHARGING_RESULT.intrinsicGas()))) + .willReturn(initialFrame); + given(frameRunner.runToCompletion( + eq(transaction.gasLimit()), eq(initialFrame), eq(tracer), any(), eq(contractCreationProcessor))) + .willReturn(SUCCESS_RESULT); + + final var result = subject.processTransaction(transaction, worldUpdater, context, tracer, config); + + inOrder.verify(senderAccount).incrementNonce(); + inOrder.verify(gasCharging).chargeForGas(senderAccount, relayerAccount, context, worldUpdater, transaction); + inOrder.verify(frameBuilder) + .buildInitialFrameWith( + eq(transaction), + eq(worldUpdater), + eq(context), + eq(config), + eq(EIP_1014_ADDRESS), + eq(NON_SYSTEM_LONG_ZERO_ADDRESS), + eq(CHARGING_RESULT.intrinsicGas())); + inOrder.verify(frameRunner) + .runToCompletion( + transaction.gasLimit(), initialFrame, tracer, messageCallProcessor, contractCreationProcessor); + assertSame(SUCCESS_RESULT, result); + } + private void assertAbortsWith(@NonNull final ResponseCodeEnum reason) { assertAbortsWith(wellKnownHapiCall(), reason); } private void assertAbortsWith( @NonNull final HederaEvmTransaction transaction, @NonNull final ResponseCodeEnum reason) { - final var result = - subject.processTransaction(transaction, worldUpdater, wellKnownContextWith(blocks), tracer, config); + final var result = subject.processTransaction( + transaction, worldUpdater, wellKnownContextWith(code, blocks), tracer, config); assertEquals(reason, result.abortReason()); } @@ -103,16 +202,20 @@ private void givenSenderAccount() { given(worldUpdater.getHederaAccount(SENDER_ID)).willReturn(senderAccount); } + private void givenRelayerAccount() { + given(worldUpdater.getHederaAccount(RELAYER_ID)).willReturn(relayerAccount); + } + private void givenSenderAccount(final long balance) { given(worldUpdater.getHederaAccount(SENDER_ID)).willReturn(senderAccount); given(senderAccount.getBalance()).willReturn(Wei.of(balance)); } private void givenReceiverAccount() { - given(worldUpdater.getHederaAccount(CALLED_CONTRACT_ID)).willReturn(calledAccount); + given(worldUpdater.getHederaAccount(CALLED_CONTRACT_ID)).willReturn(receiverAccount); } private void givenEvmReceiverAccount() { - given(worldUpdater.getHederaAccount(CALLED_CONTRACT_EVM_ADDRESS)).willReturn(calledAccount); + given(worldUpdater.getHederaAccount(CALLED_CONTRACT_EVM_ADDRESS)).willReturn(receiverAccount); } } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/gas/CustomGasChargingTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/gas/CustomGasChargingTest.java index 762bf5b30797..a646174e35cf 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/gas/CustomGasChargingTest.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/gas/CustomGasChargingTest.java @@ -25,6 +25,7 @@ import com.hedera.node.app.service.contract.impl.exec.gas.CustomGasCharging; import com.hedera.node.app.service.contract.impl.hevm.HederaEvmBlocks; +import com.hedera.node.app.service.contract.impl.hevm.HederaEvmCode; import com.hedera.node.app.service.contract.impl.hevm.HederaWorldUpdater; import com.hedera.node.app.service.contract.impl.state.HederaEvmAccount; import com.hedera.node.app.service.contract.impl.test.TestHelpers; @@ -45,6 +46,9 @@ class CustomGasChargingTest { @Mock private HederaEvmAccount relayer; + @Mock + private HederaEvmCode code; + @Mock private HederaEvmBlocks blocks; @@ -63,9 +67,9 @@ void setUp() { @Test void staticCallsDoNotChargeGas() { - final var allowanceCharged = subject.chargeForGas( - sender, relayer, wellKnownContextWith(blocks, true), worldUpdater, wellKnownHapiCall()); - assertEquals(0, allowanceCharged); + final var chargingResult = subject.chargeForGas( + sender, relayer, wellKnownContextWith(code, blocks, true), worldUpdater, wellKnownHapiCall()); + assertEquals(0, chargingResult.relayerAllowanceUsed()); verifyNoInteractions(gasCalculator); } @@ -77,7 +81,7 @@ void failsImmediatelyIfGasLimitBelowIntrinsicGas() { () -> subject.chargeForGas( sender, relayer, - wellKnownContextWith(blocks), + wellKnownContextWith(code, blocks), worldUpdater, wellKnownRelayedHapiCallWithGasLimit(TestHelpers.INTRINSIC_GAS - 1))); } @@ -89,7 +93,8 @@ void failsImmediatelyIfPayerBalanceBelowUpfrontCost() { given(sender.getBalance()).willReturn(Wei.of(transaction.upfrontCostGiven(NETWORK_GAS_PRICE) - 1)); assertFailsWith( INSUFFICIENT_PAYER_BALANCE, - () -> subject.chargeForGas(sender, relayer, wellKnownContextWith(blocks), worldUpdater, transaction)); + () -> subject.chargeForGas( + sender, relayer, wellKnownContextWith(code, blocks), worldUpdater, transaction)); } @Test @@ -98,9 +103,9 @@ void deductsGasCostIfUpfrontCostIsAfforded() { final var transaction = wellKnownHapiCall(); given(sender.hederaId()).willReturn(SENDER_ID); given(sender.getBalance()).willReturn(Wei.of(transaction.upfrontCostGiven(NETWORK_GAS_PRICE))); - final var allowanceCharged = - subject.chargeForGas(sender, relayer, wellKnownContextWith(blocks), worldUpdater, transaction); - assertEquals(0, allowanceCharged); + final var chargingResult = + subject.chargeForGas(sender, relayer, wellKnownContextWith(code, blocks), worldUpdater, transaction); + assertEquals(0, chargingResult.relayerAllowanceUsed()); verify(worldUpdater).collectFee(SENDER_ID, transaction.gasCostGiven(NETWORK_GAS_PRICE)); } @@ -111,7 +116,8 @@ void requiresSufficientGasAllowanceIfUserOfferedPriceIsZero() { final var transaction = wellKnownRelayedHapiCallWithUserGasPriceAndMaxAllowance(0, insufficientMaxAllowance); assertFailsWith( INSUFFICIENT_TX_FEE, - () -> subject.chargeForGas(sender, relayer, wellKnownContextWith(blocks), worldUpdater, transaction)); + () -> subject.chargeForGas( + sender, relayer, wellKnownContextWith(code, blocks), worldUpdater, transaction)); } @Test @@ -121,7 +127,8 @@ void requiresRelayerToHaveSufficientBalanceIfUserOfferedPriceIsZero() { given(relayer.getBalance()).willReturn(Wei.of(transaction.gasCostGiven(NETWORK_GAS_PRICE) - 1)); assertFailsWith( INSUFFICIENT_PAYER_BALANCE, - () -> subject.chargeForGas(sender, relayer, wellKnownContextWith(blocks), worldUpdater, transaction)); + () -> subject.chargeForGas( + sender, relayer, wellKnownContextWith(code, blocks), worldUpdater, transaction)); } @Test @@ -131,9 +138,9 @@ void chargesRelayerOnlyIfUserOfferedPriceIsZero() { final var gasCost = transaction.gasCostGiven(NETWORK_GAS_PRICE); given(relayer.getBalance()).willReturn(Wei.of(gasCost)); given(relayer.hederaId()).willReturn(RELAYER_ID); - final var allowanceCharged = - subject.chargeForGas(sender, relayer, wellKnownContextWith(blocks), worldUpdater, transaction); - assertEquals(gasCost, allowanceCharged); + final var chargingResult = + subject.chargeForGas(sender, relayer, wellKnownContextWith(code, blocks), worldUpdater, transaction); + assertEquals(gasCost, chargingResult.relayerAllowanceUsed()); verify(worldUpdater).collectFee(RELAYER_ID, gasCost); } @@ -144,9 +151,9 @@ void chargesSenderOnlyIfUserOfferedPriceIsAtLeastNetworkPrice() { final var gasCost = transaction.gasCostGiven(NETWORK_GAS_PRICE); given(sender.getBalance()).willReturn(Wei.of(gasCost)); given(sender.hederaId()).willReturn(SENDER_ID); - final var allowanceCharged = - subject.chargeForGas(sender, relayer, wellKnownContextWith(blocks), worldUpdater, transaction); - assertEquals(0, allowanceCharged); + final var chargingResult = + subject.chargeForGas(sender, relayer, wellKnownContextWith(code, blocks), worldUpdater, transaction); + assertEquals(0, chargingResult.relayerAllowanceUsed()); verify(worldUpdater).collectFee(SENDER_ID, gasCost); } @@ -158,7 +165,8 @@ void rejectsIfSenderCannotCoverOfferedGasCost() { given(sender.getBalance()).willReturn(Wei.of(transaction.offeredGasCost() - 1)); assertFailsWith( INSUFFICIENT_PAYER_BALANCE, - () -> subject.chargeForGas(sender, relayer, wellKnownContextWith(blocks), worldUpdater, transaction)); + () -> subject.chargeForGas( + sender, relayer, wellKnownContextWith(code, blocks), worldUpdater, transaction)); } @Test @@ -170,7 +178,8 @@ void rejectsIfRelayerCannotCoverRemainingGasCost() { given(relayer.getBalance()).willReturn(Wei.ZERO); assertFailsWith( INSUFFICIENT_PAYER_BALANCE, - () -> subject.chargeForGas(sender, relayer, wellKnownContextWith(blocks), worldUpdater, transaction)); + () -> subject.chargeForGas( + sender, relayer, wellKnownContextWith(code, blocks), worldUpdater, transaction)); } @Test @@ -179,7 +188,8 @@ void failsIfGasAllownaceLessThanRemainingGasCost() { final var transaction = wellKnownRelayedHapiCallWithUserGasPriceAndMaxAllowance(NETWORK_GAS_PRICE / 2, 0); assertFailsWith( INSUFFICIENT_TX_FEE, - () -> subject.chargeForGas(sender, relayer, wellKnownContextWith(blocks), worldUpdater, transaction)); + () -> subject.chargeForGas( + sender, relayer, wellKnownContextWith(code, blocks), worldUpdater, transaction)); } @Test @@ -193,9 +203,9 @@ void chargesSenderAndRelayerIfBothSolventAndWilling() { given(sender.hederaId()).willReturn(SENDER_ID); given(relayer.getBalance()).willReturn(Wei.of(gasCost)); given(relayer.hederaId()).willReturn(RELAYER_ID); - final var allowanceCharged = - subject.chargeForGas(sender, relayer, wellKnownContextWith(blocks), worldUpdater, transaction); - assertEquals(relayerGasCost, allowanceCharged); + final var chargingResult = + subject.chargeForGas(sender, relayer, wellKnownContextWith(code, blocks), worldUpdater, transaction); + assertEquals(relayerGasCost, chargingResult.relayerAllowanceUsed()); verify(worldUpdater).collectFee(SENDER_ID, transaction.offeredGasCost()); verify(worldUpdater).collectFee(RELAYER_ID, relayerGasCost); } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/operations/CustomChainIdOperationTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/operations/CustomChainIdOperationTest.java index 88d418003fc1..bf4ca740b9f3 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/operations/CustomChainIdOperationTest.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/operations/CustomChainIdOperationTest.java @@ -16,7 +16,7 @@ package com.hedera.node.app.service.contract.impl.test.exec.operations; -import static com.hedera.node.app.service.contract.impl.exec.TransactionProcessor.CONFIG_CONTEXT_VARIABLE; +import static com.hedera.node.app.service.contract.impl.exec.utils.FrameUtils.CONFIG_CONTEXT_VARIABLE; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.verify; diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/operations/CustomCreate2OperationTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/operations/CustomCreate2OperationTest.java index de95871015ba..9b10b85fb739 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/operations/CustomCreate2OperationTest.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/operations/CustomCreate2OperationTest.java @@ -70,7 +70,7 @@ void failsWhenPendingContractIsHollowAccountAndLazyCreationDisabled() { final var expected = new Operation.OperationResult(GAS_COST, null); assertSameResult(expected, subject.execute(frame, evm)); - verify(worldUpdater, never()).setupCreate2(RECIEVER_ADDRESS, EIP_1014_ADDRESS); + verify(worldUpdater, never()).setupAliasedCreate(RECIEVER_ADDRESS, EIP_1014_ADDRESS); verify(frame).popStackItems(4); verify(frame).pushStackItem(UInt256.ZERO); verify(featureFlags).isImplicitCreationEnabled(frame); @@ -92,7 +92,7 @@ void finalizesHollowAccountWhenPendingContractIsHollowAccountAndLazyCreationEnab final var expected = new Operation.OperationResult(GAS_COST, null); assertSameResult(expected, subject.execute(frame, evm)); - verify(worldUpdater).setupCreate2(RECIEVER_ADDRESS, EIP_1014_ADDRESS); + verify(worldUpdater).setupAliasedCreate(RECIEVER_ADDRESS, EIP_1014_ADDRESS); verify(stack).addFirst(frameCaptor.capture()); final var childFrame = frameCaptor.getValue(); diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/utils/FrameBuilderTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/utils/FrameBuilderTest.java new file mode 100644 index 000000000000..c2d0c35dbf13 --- /dev/null +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/utils/FrameBuilderTest.java @@ -0,0 +1,182 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.contract.impl.test.exec.utils; + +import static com.hedera.node.app.service.contract.impl.exec.utils.FrameUtils.configOf; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.*; +import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.asLongZeroAddress; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.BDDMockito.given; + +import com.hedera.node.app.service.contract.impl.exec.utils.FrameBuilder; +import com.hedera.node.app.service.contract.impl.hevm.HederaEvmBlocks; +import com.hedera.node.app.service.contract.impl.hevm.HederaEvmCode; +import com.hedera.node.app.service.contract.impl.hevm.HederaWorldUpdater; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import org.apache.tuweni.bytes.Bytes; +import org.hyperledger.besu.datatypes.Hash; +import org.hyperledger.besu.datatypes.Wei; +import org.hyperledger.besu.evm.code.CodeFactory; +import org.hyperledger.besu.evm.frame.BlockValues; +import org.hyperledger.besu.evm.frame.MessageFrame; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class FrameBuilderTest { + @Mock + private BlockValues blockValues; + + @Mock + private HederaEvmCode code; + + @Mock + private HederaEvmBlocks blocks; + + @Mock + private HederaWorldUpdater worldUpdater; + + @Mock + private HederaWorldUpdater stackedUpdater; + + private final FrameBuilder subject = new FrameBuilder(); + + @Test + void constructsExpectedFrameForCallToExtantContract() { + final var transaction = wellKnownHapiCall(); + given(worldUpdater.updater()).willReturn(stackedUpdater); + given(blocks.blockValuesOf(GAS_LIMIT)).willReturn(blockValues); + given(blocks.blockHashOf(SOME_BLOCK_NO)).willReturn(Hash.EMPTY); + given(code.load(NON_SYSTEM_LONG_ZERO_ADDRESS)).willReturn(CONTRACT_CODE); + final var config = HederaTestConfigBuilder.create() + .withValue("ledger.fundingAccount", DEFAULT_COINBASE) + .getOrCreateConfig(); + + final var frame = subject.buildInitialFrameWith( + transaction, + worldUpdater, + wellKnownContextWith(code, blocks), + config, + EIP_1014_ADDRESS, + NON_SYSTEM_LONG_ZERO_ADDRESS, + INTRINSIC_GAS); + + assertEquals(1024, frame.getMaxStackSize()); + assertSame(stackedUpdater, frame.getWorldUpdater()); + assertEquals(transaction.gasAvailable(INTRINSIC_GAS), frame.getRemainingGas()); + assertSame(EIP_1014_ADDRESS, frame.getOriginatorAddress()); + assertEquals(Wei.of(NETWORK_GAS_PRICE), frame.getGasPrice()); + assertEquals(Wei.of(VALUE), frame.getValue()); + assertEquals(Wei.of(VALUE), frame.getApparentValue()); + assertSame(blockValues, frame.getBlockValues()); + assertFalse(frame.isStatic()); + assertEquals(asLongZeroAddress(DEFAULT_COINBASE), frame.getMiningBeneficiary()); + final var hashLookup = frame.getBlockHashLookup(); + assertEquals(Hash.EMPTY, hashLookup.apply(SOME_BLOCK_NO)); + assertSame(config, configOf(frame)); + assertDoesNotThrow(frame::notifyCompletion); + assertEquals(MessageFrame.Type.MESSAGE_CALL, frame.getType()); + assertEquals(NON_SYSTEM_LONG_ZERO_ADDRESS, frame.getRecipientAddress()); + assertEquals(NON_SYSTEM_LONG_ZERO_ADDRESS, frame.getContractAddress()); + assertEquals(transaction.evmPayload(), frame.getInputData()); + assertSame(CONTRACT_CODE, frame.getCode()); + } + + @Test + void constructsExpectedFrameForCallToMissingContract() { + final var transaction = wellKnownRelayedHapiCall(VALUE); + given(worldUpdater.updater()).willReturn(stackedUpdater); + given(blocks.blockValuesOf(GAS_LIMIT)).willReturn(blockValues); + given(blocks.blockHashOf(SOME_BLOCK_NO)).willReturn(Hash.EMPTY); + given(code.loadIfPresent(NON_SYSTEM_LONG_ZERO_ADDRESS)).willReturn(CONTRACT_CODE); + final var config = HederaTestConfigBuilder.create() + .withValue("ledger.fundingAccount", DEFAULT_COINBASE) + .getOrCreateConfig(); + + final var frame = subject.buildInitialFrameWith( + transaction, + worldUpdater, + wellKnownContextWith(code, blocks), + config, + EIP_1014_ADDRESS, + NON_SYSTEM_LONG_ZERO_ADDRESS, + INTRINSIC_GAS); + + assertEquals(1024, frame.getMaxStackSize()); + assertSame(stackedUpdater, frame.getWorldUpdater()); + assertEquals(transaction.gasAvailable(INTRINSIC_GAS), frame.getRemainingGas()); + assertSame(EIP_1014_ADDRESS, frame.getOriginatorAddress()); + assertEquals(Wei.of(NETWORK_GAS_PRICE), frame.getGasPrice()); + assertEquals(Wei.of(VALUE), frame.getValue()); + assertEquals(Wei.of(VALUE), frame.getApparentValue()); + assertSame(blockValues, frame.getBlockValues()); + assertFalse(frame.isStatic()); + assertEquals(asLongZeroAddress(DEFAULT_COINBASE), frame.getMiningBeneficiary()); + final var hashLookup = frame.getBlockHashLookup(); + assertEquals(Hash.EMPTY, hashLookup.apply(SOME_BLOCK_NO)); + assertSame(config, configOf(frame)); + assertDoesNotThrow(frame::notifyCompletion); + assertEquals(MessageFrame.Type.MESSAGE_CALL, frame.getType()); + assertEquals(NON_SYSTEM_LONG_ZERO_ADDRESS, frame.getRecipientAddress()); + assertEquals(NON_SYSTEM_LONG_ZERO_ADDRESS, frame.getContractAddress()); + assertEquals(transaction.evmPayload(), frame.getInputData()); + assertSame(CONTRACT_CODE, frame.getCode()); + } + + @Test + void constructsExpectedFrameForCreate() { + final var transaction = wellKnownHapiCreate(); + given(worldUpdater.updater()).willReturn(stackedUpdater); + given(blocks.blockValuesOf(GAS_LIMIT)).willReturn(blockValues); + given(blocks.blockHashOf(SOME_BLOCK_NO)).willReturn(Hash.EMPTY); + final var config = HederaTestConfigBuilder.create() + .withValue("ledger.fundingAccount", DEFAULT_COINBASE) + .getOrCreateConfig(); + final var expectedCode = CodeFactory.createCode(transaction.evmPayload(), 0, false); + + final var frame = subject.buildInitialFrameWith( + transaction, + worldUpdater, + wellKnownContextWith(code, blocks), + config, + EIP_1014_ADDRESS, + NON_SYSTEM_LONG_ZERO_ADDRESS, + INTRINSIC_GAS); + + assertEquals(1024, frame.getMaxStackSize()); + assertSame(stackedUpdater, frame.getWorldUpdater()); + assertEquals(transaction.gasAvailable(INTRINSIC_GAS), frame.getRemainingGas()); + assertSame(EIP_1014_ADDRESS, frame.getOriginatorAddress()); + assertEquals(Wei.of(NETWORK_GAS_PRICE), frame.getGasPrice()); + assertEquals(Wei.of(VALUE), frame.getValue()); + assertEquals(Wei.of(VALUE), frame.getApparentValue()); + assertSame(blockValues, frame.getBlockValues()); + assertFalse(frame.isStatic()); + assertEquals(asLongZeroAddress(DEFAULT_COINBASE), frame.getMiningBeneficiary()); + final var hashLookup = frame.getBlockHashLookup(); + assertEquals(Hash.EMPTY, hashLookup.apply(SOME_BLOCK_NO)); + assertSame(config, configOf(frame)); + assertDoesNotThrow(frame::notifyCompletion); + assertEquals(MessageFrame.Type.CONTRACT_CREATION, frame.getType()); + assertEquals(NON_SYSTEM_LONG_ZERO_ADDRESS, frame.getRecipientAddress()); + assertEquals(NON_SYSTEM_LONG_ZERO_ADDRESS, frame.getContractAddress()); + assertEquals(Bytes.EMPTY, frame.getInputData()); + assertEquals(expectedCode, frame.getCode()); + } +} diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/utils/FrameRunnerTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/utils/FrameRunnerTest.java new file mode 100644 index 000000000000..8775678cf0c0 --- /dev/null +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/utils/FrameRunnerTest.java @@ -0,0 +1,235 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.contract.impl.test.exec.utils; + +import static com.hedera.node.app.service.contract.impl.exec.failure.CustomExceptionalHaltReason.TOO_MANY_CHILD_RECORDS; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.*; +import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.*; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.doAnswer; + +import com.hedera.hapi.node.base.ContractID; +import com.hedera.node.app.service.contract.impl.exec.gas.CustomGasCalculator; +import com.hedera.node.app.service.contract.impl.exec.processors.CustomMessageCallProcessor; +import com.hedera.node.app.service.contract.impl.exec.utils.FrameRunner; +import com.hedera.node.app.service.contract.impl.exec.utils.FrameUtils; +import com.hedera.node.app.service.contract.impl.hevm.HederaEvmTransactionResult; +import com.hedera.node.app.service.contract.impl.hevm.HederaTracer; +import com.hedera.node.app.service.contract.impl.state.ProxyWorldUpdater; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.*; +import org.hyperledger.besu.datatypes.Address; +import org.hyperledger.besu.datatypes.Wei; +import org.hyperledger.besu.evm.frame.MessageFrame; +import org.hyperledger.besu.evm.processor.ContractCreationProcessor; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class FrameRunnerTest { + @Mock + private MessageFrame frame; + + @Mock + private ProxyWorldUpdater worldUpdater; + + @Mock + private MessageFrame childFrame; + + @Mock + private HederaTracer tracer; + + @Mock + private CustomMessageCallProcessor messageCallProcessor; + + @Mock + private ContractCreationProcessor contractCreationProcessor; + + @Mock + private CustomGasCalculator gasCalculator; + + private FrameRunner subject; + + @BeforeEach + void setUp() { + subject = new FrameRunner(gasCalculator); + } + + @Test + void happyPathWorksWithEip1014Receiver() { + final var inOrder = Mockito.inOrder(frame, childFrame, tracer, messageCallProcessor, contractCreationProcessor); + + givenBaseSuccessWith(EIP_1014_ADDRESS); + given(frame.getWorldUpdater()).willReturn(worldUpdater); + given(worldUpdater.getHederaContractId(EIP_1014_ADDRESS)).willReturn(CALLED_CONTRACT_ID); + + final var result = + subject.runToCompletion(GAS_LIMIT, frame, tracer, messageCallProcessor, contractCreationProcessor); + + inOrder.verify(tracer).initProcess(frame); + inOrder.verify(contractCreationProcessor).process(frame, tracer); + inOrder.verify(messageCallProcessor).process(childFrame, tracer); + inOrder.verify(tracer).finalizeProcess(frame); + + assertTrue(result.isSuccess()); + assertEquals(expectedGasUsed(frame), result.gasUsed()); + assertEquals(pbjLogsFrom(List.of(BESU_LOG)), result.logs()); + assertEquals(CALLED_CONTRACT_ID, result.recipientId()); + assertEquals(CALLED_CONTRACT_EVM_ADDRESS, result.recipientEvmAddress()); + + assertSuccessExpectationsWith(CALLED_CONTRACT_ID, CALLED_CONTRACT_EVM_ADDRESS, frame, result); + } + + @Test + void happyPathWorksWithLongZeroReceiver() { + final var inOrder = Mockito.inOrder(frame, childFrame, tracer, messageCallProcessor, contractCreationProcessor); + + givenBaseSuccessWith(NON_SYSTEM_LONG_ZERO_ADDRESS); + + final var result = + subject.runToCompletion(GAS_LIMIT, frame, tracer, messageCallProcessor, contractCreationProcessor); + + inOrder.verify(tracer).initProcess(frame); + inOrder.verify(contractCreationProcessor).process(frame, tracer); + inOrder.verify(messageCallProcessor).process(childFrame, tracer); + inOrder.verify(tracer).finalizeProcess(frame); + + assertSuccessExpectationsWith( + NON_SYSTEM_CONTRACT_ID, asEvmContractId(NON_SYSTEM_LONG_ZERO_ADDRESS), frame, result); + } + + @Test + void failurePathWorksWithRevertReason() { + final var inOrder = Mockito.inOrder(frame, childFrame, tracer, messageCallProcessor, contractCreationProcessor); + + givenBaseFailureWith(NON_SYSTEM_LONG_ZERO_ADDRESS); + given(frame.getRevertReason()).willReturn(Optional.of(SOME_REVERT_REASON)); + + final var result = + subject.runToCompletion(GAS_LIMIT, frame, tracer, messageCallProcessor, contractCreationProcessor); + + inOrder.verify(tracer).initProcess(frame); + inOrder.verify(contractCreationProcessor).process(frame, tracer); + inOrder.verify(messageCallProcessor).process(childFrame, tracer); + inOrder.verify(tracer).finalizeProcess(frame); + + assertFailureExpectationsWith(frame, result); + assertEquals(tuweniToPbjBytes(SOME_REVERT_REASON), result.revertReason()); + assertNull(result.haltReason()); + } + + @Test + void failurePathWorksWithHaltReason() { + final var inOrder = Mockito.inOrder(frame, childFrame, tracer, messageCallProcessor, contractCreationProcessor); + + givenBaseFailureWith(NON_SYSTEM_LONG_ZERO_ADDRESS); + given(frame.getExceptionalHaltReason()).willReturn(Optional.of(TOO_MANY_CHILD_RECORDS)); + + final var result = + subject.runToCompletion(GAS_LIMIT, frame, tracer, messageCallProcessor, contractCreationProcessor); + + inOrder.verify(tracer).initProcess(frame); + inOrder.verify(contractCreationProcessor).process(frame, tracer); + inOrder.verify(messageCallProcessor).process(childFrame, tracer); + inOrder.verify(tracer).finalizeProcess(frame); + + assertFailureExpectationsWith(frame, result); + assertEquals(TOO_MANY_CHILD_RECORDS.toString(), result.haltReason()); + assertNull(result.revertReason()); + } + + private void assertSuccessExpectationsWith( + @NonNull final ContractID expectedReceiverId, + @NonNull final ContractID expectedReceiverAddress, + @NonNull final MessageFrame frame, + @NonNull final HederaEvmTransactionResult result) { + assertTrue(result.isSuccess()); + assertEquals(expectedGasUsed(frame), result.gasUsed()); + assertEquals(pbjLogsFrom(List.of(BESU_LOG)), result.logs()); + assertEquals(expectedReceiverId, result.recipientId()); + assertEquals(expectedReceiverAddress, result.recipientEvmAddress()); + assertEquals(OUTPUT_DATA, result.output()); + } + + private void assertFailureExpectationsWith( + @NonNull final MessageFrame frame, @NonNull final HederaEvmTransactionResult result) { + assertFalse(result.isSuccess()); + assertEquals(expectedGasUsed(frame), result.gasUsed()); + assertEquals(Bytes.EMPTY, result.output()); + } + + private void givenBaseSuccessWith(@NonNull final Address receiver) { + givenBaseScenarioWithDetails(receiver, true); + } + + private void givenBaseFailureWith(@NonNull final Address receiver) { + givenBaseScenarioWithDetails(receiver, false); + } + + private void givenBaseScenarioWithDetails(@NonNull final Address receiver, final boolean sucess) { + final Deque messageFrameStack = new ArrayDeque<>(); + given(frame.getType()).willReturn(MessageFrame.Type.CONTRACT_CREATION); + given(childFrame.getType()).willReturn(MessageFrame.Type.MESSAGE_CALL); + doAnswer(invocation -> { + messageFrameStack.pop(); + messageFrameStack.push(childFrame); + return null; + }) + .when(contractCreationProcessor) + .process(frame, tracer); + doAnswer(invocation -> { + messageFrameStack.pop(); + return null; + }) + .when(messageCallProcessor) + .process(childFrame, tracer); + given(gasCalculator.getSelfDestructRefundAmount()).willReturn(GAS_LIMIT / 32); + given(gasCalculator.getMaxRefundQuotient()).willReturn(BESU_MAX_REFUND_QUOTIENT); + given(frame.getRemainingGas()).willReturn(GAS_LIMIT / 2); + given(frame.getSelfDestructs()).willReturn(Set.of(EIP_1014_ADDRESS, NON_SYSTEM_LONG_ZERO_ADDRESS)); + given(frame.getGasRefund()).willReturn(GAS_LIMIT / 8); + final var config = HederaTestConfigBuilder.create() + .withValue("contracts.maxRefundPercentOfGasLimit", HEDERA_MAX_REFUND_PERCENTAGE) + .getOrCreateConfig(); + given(frame.getContextVariable(FrameUtils.CONFIG_CONTEXT_VARIABLE)).willReturn(config); + given(frame.getGasPrice()).willReturn(Wei.of(NETWORK_GAS_PRICE)); + if (sucess) { + given(frame.getState()).willReturn(MessageFrame.State.COMPLETED_SUCCESS); + given(frame.getLogs()).willReturn(List.of(BESU_LOG)); + given(frame.getOutputData()).willReturn(pbjToTuweniBytes(OUTPUT_DATA)); + } else { + given(frame.getState()).willReturn(MessageFrame.State.COMPLETED_FAILED); + } + given(frame.getRecipientAddress()).willReturn(receiver); + given(frame.getMessageFrameStack()).willReturn(messageFrameStack); + } + + private long expectedGasUsed(@NonNull final MessageFrame frame) { + var nominalUsage = GAS_LIMIT - frame.getRemainingGas(); + final var selfDestructRefund = gasCalculator.getSelfDestructRefundAmount() + * Math.min(frame.getSelfDestructs().size(), nominalUsage / gasCalculator.getMaxRefundQuotient()); + nominalUsage -= (selfDestructRefund + frame.getGasRefund()); + return Math.max(nominalUsage, GAS_LIMIT - GAS_LIMIT * HEDERA_MAX_REFUND_PERCENTAGE / 100); + } +} diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/utils/FrameUtilsTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/utils/FrameUtilsTest.java new file mode 100644 index 000000000000..6013e9353c9b --- /dev/null +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/utils/FrameUtilsTest.java @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.contract.impl.test.exec.utils; + +import com.hedera.node.app.service.contract.impl.exec.utils.FrameUtils; +import com.hedera.node.app.service.contract.impl.utils.ConversionUtils; +import java.lang.reflect.InvocationTargetException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class FrameUtilsTest { + private static final Set> toBeTested = + new HashSet<>(Arrays.asList(FrameUtils.class, ConversionUtils.class)); + + @Test + void throwsInConstructor() { + for (final var clazz : toBeTested) { + assertFor(clazz); + } + } + + private static final String UNEXPECTED_THROW = "Unexpected `%s` was thrown in `%s` constructor!"; + private static final String NO_THROW = "No exception was thrown in `%s` constructor!"; + + private void assertFor(final Class clazz) { + try { + final var constructor = clazz.getDeclaredConstructor(); + constructor.setAccessible(true); + + constructor.newInstance(); + } catch (final InvocationTargetException expected) { + final var cause = expected.getCause(); + Assertions.assertTrue( + cause instanceof UnsupportedOperationException, String.format(UNEXPECTED_THROW, cause, clazz)); + return; + } catch (final Exception e) { + Assertions.fail(String.format(UNEXPECTED_THROW, e, clazz)); + } + Assertions.fail(String.format(NO_THROW, clazz)); + } +} diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/v030/Version030FeatureFlagsTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/v030/Version030FeatureFlagsTest.java index 5709a173baf4..afc2c6052f1c 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/v030/Version030FeatureFlagsTest.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/v030/Version030FeatureFlagsTest.java @@ -16,7 +16,7 @@ package com.hedera.node.app.service.contract.impl.test.exec.v030; -import static com.hedera.node.app.service.contract.impl.exec.TransactionProcessor.CONFIG_CONTEXT_VARIABLE; +import static com.hedera.node.app.service.contract.impl.exec.utils.FrameUtils.CONFIG_CONTEXT_VARIABLE; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.BDDMockito.given; diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/v034/Version034FeatureFlagsTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/v034/Version034FeatureFlagsTest.java index fb94c116007d..12b250dc002f 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/v034/Version034FeatureFlagsTest.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/v034/Version034FeatureFlagsTest.java @@ -16,7 +16,7 @@ package com.hedera.node.app.service.contract.impl.test.exec.v034; -import static com.hedera.node.app.service.contract.impl.exec.TransactionProcessor.CONFIG_CONTEXT_VARIABLE; +import static com.hedera.node.app.service.contract.impl.exec.utils.FrameUtils.CONFIG_CONTEXT_VARIABLE; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.BDDMockito.given; diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/handlers/AdapterUtils.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/handlers/AdapterUtils.java index 799e372c7ddb..fd24c2d5bf31 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/handlers/AdapterUtils.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/handlers/AdapterUtils.java @@ -20,7 +20,6 @@ import static com.hedera.node.app.service.mono.context.BasicTransactionContext.EMPTY_KEY; import static com.hedera.node.app.service.mono.pbj.PbjConverter.toPbj; import static com.hedera.node.app.service.mono.utils.EntityNum.MISSING_NUM; -import static com.hedera.node.app.service.mono.utils.EntityNum.fromAccountId; import static com.hedera.node.app.service.mono.utils.MiscUtils.asKeyUnchecked; import static com.hedera.test.factories.scenarios.TxnHandlingScenario.COMPLEX_KEY_ACCOUNT; import static com.hedera.test.factories.scenarios.TxnHandlingScenario.COMPLEX_KEY_ACCOUNT_KT; @@ -70,13 +69,12 @@ import static com.hedera.test.factories.txns.SignedTxnFactory.TREASURY_PAYER; import static org.mockito.BDDMockito.given; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.AccountApprovalForAllAllowance; import com.hedera.hapi.node.state.token.AccountCryptoAllowance; import com.hedera.hapi.node.state.token.AccountFungibleTokenAllowance; -import com.hedera.node.app.service.mono.state.virtual.EntityNumValue; -import com.hedera.node.app.service.mono.state.virtual.EntityNumVirtualKey; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; import com.hedera.node.app.spi.state.ReadableKVState; @@ -117,18 +115,12 @@ public static ReadableStates mockStates(final Map keysT return mockStates; } - private static MapReadableKVState wellKnownAliasState() { - final Map wellKnownAliases = Map.ofEntries( - Map.entry(CURRENTLY_UNUSED_ALIAS, new EntityNumValue(MISSING_NUM.longValue())), - Map.entry( - NO_RECEIVER_SIG_ALIAS, - new EntityNumValue(fromAccountId(NO_RECEIVER_SIG).longValue())), - Map.entry( - RECEIVER_SIG_ALIAS, - new EntityNumValue(fromAccountId(RECEIVER_SIG).longValue())), - Map.entry( - FIRST_TOKEN_SENDER_LITERAL_ALIAS.toStringUtf8(), - new EntityNumValue(fromAccountId(FIRST_TOKEN_SENDER).longValue()))); + private static MapReadableKVState wellKnownAliasState() { + final Map wellKnownAliases = Map.ofEntries( + Map.entry(Bytes.wrap(CURRENTLY_UNUSED_ALIAS), toPbj(MISSING_NUM.toGrpcAccountId())), + Map.entry(Bytes.wrap(NO_RECEIVER_SIG_ALIAS), toPbj(NO_RECEIVER_SIG)), + Map.entry(Bytes.wrap(RECEIVER_SIG_ALIAS), toPbj(RECEIVER_SIG)), + Map.entry(Bytes.wrap(FIRST_TOKEN_SENDER_LITERAL_ALIAS.toByteArray()), toPbj(FIRST_TOKEN_SENDER))); return new MapReadableKVState<>(ALIASES_KEY, wellKnownAliases); } @@ -151,63 +143,63 @@ class SigReqAdapterUtils { .spenderNum(DEFAULT_PAYER.getAccountNum()) .build(); - static ReadableKVState wellKnownAccountsState() { + static ReadableKVState wellKnownAccountsState() { return new MapReadableKVState<>(ACCOUNTS_KEY, wellKnownAccountStoreAt()); } - static Map wellKnownAccountStoreAt() { - final var destination = new HashMap(); + public static Map wellKnownAccountStoreAt() { + final var destination = new HashMap(); destination.put( - EntityNumVirtualKey.fromLong(FIRST_TOKEN_SENDER.getAccountNum()), + toPbj(FIRST_TOKEN_SENDER), toPbjAccount(FIRST_TOKEN_SENDER.getAccountNum(), FIRST_TOKEN_SENDER_KT.asPbjKey(), 10_000L, false)); destination.put( - EntityNumVirtualKey.fromLong(SECOND_TOKEN_SENDER.getAccountNum()), + toPbj(SECOND_TOKEN_SENDER), toPbjAccount( SECOND_TOKEN_SENDER.getAccountNum(), SECOND_TOKEN_SENDER_KT.asPbjKey(), 10_000L, false)); destination.put( - EntityNumVirtualKey.fromLong(TOKEN_RECEIVER.getAccountNum()), + toPbj(TOKEN_RECEIVER), toPbjAccount(TOKEN_RECEIVER.getAccountNum(), TOKEN_WIPE_KT.asPbjKey(), 0L, false)); destination.put( - EntityNumVirtualKey.fromLong(DEFAULT_NODE.getAccountNum()), + toPbj(DEFAULT_NODE), toPbjAccount(DEFAULT_NODE.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), 0L, false)); destination.put( - EntityNumVirtualKey.fromLong(DEFAULT_PAYER.getAccountNum()), + toPbj(DEFAULT_PAYER), toPbjAccount( DEFAULT_PAYER.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), DEFAULT_PAYER_BALANCE, false)); destination.put( - EntityNumVirtualKey.fromLong(STAKING_FUND.getAccountNum()), + toPbj(STAKING_FUND), toPbjAccount(STAKING_FUND.getAccountNum(), toPbj(asKeyUnchecked(EMPTY_KEY)), 0L, false)); destination.put( - EntityNumVirtualKey.fromLong(MASTER_PAYER.getAccountNum()), + toPbj(MASTER_PAYER), toPbjAccount( MASTER_PAYER.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), DEFAULT_PAYER_BALANCE, false)); destination.put( - EntityNumVirtualKey.fromLong(TREASURY_PAYER.getAccountNum()), + toPbj(TREASURY_PAYER), toPbjAccount( TREASURY_PAYER.getAccountNum(), DEFAULT_PAYER_KT.asPbjKey(), DEFAULT_PAYER_BALANCE, false)); destination.put( - EntityNumVirtualKey.fromLong(NO_RECEIVER_SIG.getAccountNum()), + toPbj(NO_RECEIVER_SIG), toPbjAccount( NO_RECEIVER_SIG.getAccountNum(), NO_RECEIVER_SIG_KT.asPbjKey(), DEFAULT_BALANCE, false)); destination.put( - EntityNumVirtualKey.fromLong(RECEIVER_SIG.getAccountNum()), + toPbj(RECEIVER_SIG), toPbjAccount( RECEIVER_SIG.getAccountNum(), RECEIVER_SIG_KT.asPbjKey(), DEFAULT_BALANCE, true, false)); destination.put( - EntityNumVirtualKey.fromLong(SYS_ACCOUNT.getAccountNum()), + toPbj(SYS_ACCOUNT), toPbjAccount(SYS_ACCOUNT.getAccountNum(), SYS_ACCOUNT_KT.asPbjKey(), DEFAULT_BALANCE, false)); destination.put( - EntityNumVirtualKey.fromLong(MISC_ACCOUNT.getAccountNum()), + toPbj(MISC_ACCOUNT), toPbjAccount(MISC_ACCOUNT.getAccountNum(), MISC_ACCOUNT_KT.asPbjKey(), DEFAULT_BALANCE, false)); destination.put( - EntityNumVirtualKey.fromLong(CUSTOM_PAYER_ACCOUNT.getAccountNum()), + toPbj(CUSTOM_PAYER_ACCOUNT), toPbjAccount( CUSTOM_PAYER_ACCOUNT.getAccountNum(), CUSTOM_PAYER_ACCOUNT_KT.asPbjKey(), DEFAULT_BALANCE, false)); destination.put( - EntityNumVirtualKey.fromLong(OWNER_ACCOUNT.getAccountNum()), + toPbj(OWNER_ACCOUNT), toPbjAccount( OWNER_ACCOUNT.getAccountNum(), OWNER_ACCOUNT_KT.asPbjKey(), @@ -218,7 +210,7 @@ static Map wellKnownAccountStoreAt() { List.of(nftAllowances), false)); destination.put( - EntityNumVirtualKey.fromLong(DELEGATING_SPENDER.getAccountNum()), + toPbj(DELEGATING_SPENDER), toPbjAccount( DELEGATING_SPENDER.getAccountNum(), DELEGATING_SPENDER_KT.asPbjKey(), @@ -229,45 +221,52 @@ static Map wellKnownAccountStoreAt() { List.of(nftAllowances), false)); destination.put( - EntityNumVirtualKey.fromLong(COMPLEX_KEY_ACCOUNT.getAccountNum()), + toPbj(COMPLEX_KEY_ACCOUNT), toPbjAccount( COMPLEX_KEY_ACCOUNT.getAccountNum(), COMPLEX_KEY_ACCOUNT_KT.asPbjKey(), DEFAULT_BALANCE, false)); destination.put( - EntityNumVirtualKey.fromLong(TOKEN_TREASURY.getAccountNum()), + toPbj(TOKEN_TREASURY), toPbjAccount(TOKEN_TREASURY.getAccountNum(), TOKEN_TREASURY_KT.asPbjKey(), DEFAULT_BALANCE, false)); destination.put( - EntityNumVirtualKey.fromLong(DILIGENT_SIGNING_PAYER.getAccountNum()), + toPbj(DILIGENT_SIGNING_PAYER), toPbjAccount( DILIGENT_SIGNING_PAYER.getAccountNum(), DILIGENT_SIGNING_PAYER_KT.asPbjKey(), DEFAULT_BALANCE, false)); destination.put( - EntityNumVirtualKey.fromLong(FROM_OVERLAP_PAYER.getAccountNum()), + toPbj(FROM_OVERLAP_PAYER), toPbjAccount( FROM_OVERLAP_PAYER.getAccountNum(), FROM_OVERLAP_PAYER_KT.asPbjKey(), DEFAULT_BALANCE, - false)); + true)); destination.put( - EntityNumVirtualKey.fromLong(MISC_RECIEVER_SIG_CONTRACT.getContractNum()), + toPbj(asAccountFromNum(MISC_RECIEVER_SIG_CONTRACT)), toPbjAccount( MISC_RECIEVER_SIG_CONTRACT.getContractNum(), DILIGENT_SIGNING_PAYER_KT.asPbjKey(), DEFAULT_BALANCE, true)); destination.put( - EntityNumVirtualKey.fromLong(IMMUTABLE_CONTRACT.getContractNum()), + toPbj(asAccountFromNum(IMMUTABLE_CONTRACT)), toPbjAccount(IMMUTABLE_CONTRACT.getContractNum(), Key.DEFAULT, DEFAULT_BALANCE, true)); destination.put( - EntityNumVirtualKey.fromLong(MISC_CONTRACT.getContractNum()), + toPbj(asAccountFromNum(MISC_CONTRACT)), toPbjAccount(MISC_CONTRACT.getContractNum(), MISC_ADMIN_KT.asPbjKey(), DEFAULT_BALANCE, true)); return destination; } + public static com.hederahashgraph.api.proto.java.AccountID asAccountFromNum( + com.hederahashgraph.api.proto.java.ContractID id) { + return com.hederahashgraph.api.proto.java.AccountID.newBuilder() + .setAccountNum(id.getContractNum()) + .build(); + } + private static Account toPbjAccount(final long number, final Key key, long balance, boolean isSmartContract) { return toPbjAccount(number, key, balance, false, List.of(), List.of(), List.of(), isSmartContract); } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/hevm/HederaEvmTransactionProcessorTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/hevm/HederaEvmTransactionProcessorTest.java index fc5629fbef9c..46f5be5adbad 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/hevm/HederaEvmTransactionProcessorTest.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/hevm/HederaEvmTransactionProcessorTest.java @@ -22,12 +22,9 @@ import static org.mockito.Mockito.verify; import com.hedera.node.app.service.contract.impl.exec.TransactionProcessor; -import com.hedera.node.app.service.contract.impl.hevm.HederaEvmBlocks; -import com.hedera.node.app.service.contract.impl.hevm.HederaEvmTransactionProcessor; -import com.hedera.node.app.service.contract.impl.hevm.HederaWorldUpdater; +import com.hedera.node.app.service.contract.impl.hevm.*; import com.swirlds.config.api.Configuration; import java.util.Map; -import org.hyperledger.besu.evm.tracing.OperationTracer; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -36,6 +33,9 @@ @ExtendWith(MockitoExtension.class) class HederaEvmTransactionProcessorTest { + @Mock + private HederaEvmCode code; + @Mock private HederaEvmBlocks blocks; @@ -43,7 +43,7 @@ class HederaEvmTransactionProcessorTest { private HederaWorldUpdater worldUpdater; @Mock - private OperationTracer tracer; + private HederaTracer tracer; @Mock private Configuration config; @@ -70,7 +70,7 @@ void setUp() { @Test void calls030AsExpected() { final var transaction = wellKnownHapiCall(); - final var context = wellKnownContextWith(blocks, false); + final var context = wellKnownContextWith(code, blocks, false); subject.process(transaction, worldUpdater, context, VERSION_030, tracer, config); @@ -80,7 +80,7 @@ void calls030AsExpected() { @Test void calls034AsExpected() { final var transaction = wellKnownHapiCall(); - final var context = wellKnownContextWith(blocks, false); + final var context = wellKnownContextWith(code, blocks, false); subject.process(transaction, worldUpdater, context, VERSION_034, tracer, config); @@ -90,7 +90,7 @@ void calls034AsExpected() { @Test void calls038AsExpected() { final var transaction = wellKnownHapiCall(); - final var context = wellKnownContextWith(blocks, false); + final var context = wellKnownContextWith(code, blocks, false); subject.process(transaction, worldUpdater, context, VERSION_038, tracer, config); diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/hevm/HederaEvmTransactionTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/hevm/HederaEvmTransactionTest.java index 0a96a99688b4..dceb0a5baf39 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/hevm/HederaEvmTransactionTest.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/hevm/HederaEvmTransactionTest.java @@ -23,6 +23,12 @@ import org.junit.jupiter.api.Test; class HederaEvmTransactionTest { + @Test + void gasAvailableIsLimitMinusIntrinsic() { + final var subject = TestHelpers.wellKnownHapiCall(); + assertEquals(GAS_LIMIT - INTRINSIC_GAS, subject.gasAvailable(INTRINSIC_GAS)); + } + @Test void computesUpfrontCostWithoutOverflowConcern() { final var subject = TestHelpers.wellKnownHapiCall(); diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/state/ProxyEvmAccountTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/state/ProxyEvmAccountTest.java index 37148b54a19b..d7e9f9b91336 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/state/ProxyEvmAccountTest.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/state/ProxyEvmAccountTest.java @@ -23,6 +23,7 @@ import static org.mockito.Mockito.verify; import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.ContractID; import com.hedera.node.app.service.contract.impl.state.EvmFrameState; import com.hedera.node.app.service.contract.impl.state.ProxyEvmAccount; import com.hedera.node.app.service.contract.impl.utils.ConversionUtils; @@ -68,6 +69,11 @@ void hasExpectedId() { assertEquals(AccountID.newBuilder().accountNum(ACCOUNT_NUM).build(), subject.hederaId()); } + @Test + void hasExpectedContractId() { + assertEquals(ContractID.newBuilder().contractNum(ACCOUNT_NUM).build(), subject.hederaContractId()); + } + @Test void accountHashNotSupported() { assertThrows(UnsupportedOperationException.class, subject::getAddressHash); diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/state/ProxyWorldUpdaterTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/state/ProxyWorldUpdaterTest.java index b67d5a9756ed..fc4f585f8262 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/state/ProxyWorldUpdaterTest.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/state/ProxyWorldUpdaterTest.java @@ -17,6 +17,7 @@ package com.hedera.node.app.service.contract.impl.test.state; import static com.hedera.node.app.service.contract.impl.exec.failure.CustomExceptionalHaltReason.INVALID_RECEIVER_SIGNATURE; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.CALLED_CONTRACT_ID; import static com.hedera.node.app.service.contract.impl.test.TestHelpers.EIP_1014_ADDRESS; import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.*; import static org.hyperledger.besu.datatypes.Address.ALTBN128_ADD; @@ -56,6 +57,8 @@ class ProxyWorldUpdaterTest { private static final Address LONG_ZERO_ADDRESS = asLongZeroAddress(NUMBER); private static final Address NEXT_LONG_ZERO_ADDRESS = asLongZeroAddress(NEXT_NUMBER); private static final Address SOME_EVM_ADDRESS = Address.fromHexString("0x1234123412341234123412341234123412341234"); + private static final Address OTHER_EVM_ADDRESS = + Address.fromHexString("0x1239123912391239123912391239123912391239"); @Mock private Account anImmutableAccount; @@ -233,12 +236,12 @@ void usesHapiPayerIfRecipientIsZeroAddress() { } @Test - void usesAliasIfCreate2IsSetupRecipientIsZeroAddress() { + void usesAliasIfCreate2IsSetupRecipient() { givenDispatch(); givenMatchingEntityNumbers(); given(evmFrameState.getMutableAccount(SOME_EVM_ADDRESS)).willReturn(mutableAccount); - subject.setupCreate2(ALTBN128_ADD, SOME_EVM_ADDRESS); + subject.setupAliasedCreate(ALTBN128_ADD, SOME_EVM_ADDRESS); subject.createAccount(SOME_EVM_ADDRESS, 1, Wei.ZERO); verify(dispatch) @@ -246,6 +249,35 @@ void usesAliasIfCreate2IsSetupRecipientIsZeroAddress() { NEXT_NUMBER, ALTBN128_ADD.toBigInteger().longValueExact(), 1, aliasFrom(SOME_EVM_ADDRESS)); } + @Test + void canResolvePendingCreationHederaId() { + givenDispatch(); + given(dispatch.peekNextEntityNumber()).willReturn(NEXT_NUMBER); + + subject.setupAliasedCreate(ALTBN128_ADD, SOME_EVM_ADDRESS); + + final var contractId = subject.getHederaContractId(SOME_EVM_ADDRESS); + assertEquals(ContractID.newBuilder().contractNum(NEXT_NUMBER).build(), contractId); + } + + @Test + void throwsIseWithoutCorrespondingAccount() { + givenDispatch(); + given(dispatch.peekNextEntityNumber()).willReturn(NEXT_NUMBER); + + subject.setupAliasedCreate(ALTBN128_ADD, SOME_EVM_ADDRESS); + + assertThrows(IllegalArgumentException.class, () -> subject.getHederaContractId(OTHER_EVM_ADDRESS)); + } + + @Test + void getsAvailableContractIdByAddress() { + given(evmFrameState.getAccount(SOME_EVM_ADDRESS)).willReturn(proxyEvmAccount); + given(proxyEvmAccount.hederaContractId()).willReturn(CALLED_CONTRACT_ID); + final var actual = subject.getHederaContractId(SOME_EVM_ADDRESS); + assertEquals(CALLED_CONTRACT_ID, actual); + } + @Test void cannotSetupWithMissingParentNumber() { givenDispatch(); diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/state/TokenEvmAccountTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/state/TokenEvmAccountTest.java index 6ae5bba3aa82..c894b7829652 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/state/TokenEvmAccountTest.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/state/TokenEvmAccountTest.java @@ -20,8 +20,10 @@ import static org.junit.jupiter.api.Assertions.*; import static org.mockito.BDDMockito.given; +import com.hedera.hapi.node.base.ContractID; import com.hedera.node.app.service.contract.impl.state.EvmFrameState; import com.hedera.node.app.service.contract.impl.state.TokenEvmAccount; +import com.hedera.node.app.service.contract.impl.utils.ConversionUtils; import com.hedera.pbj.runtime.io.buffer.Bytes; import org.apache.tuweni.bytes.Bytes32; import org.apache.tuweni.units.bigints.UInt256; @@ -61,6 +63,12 @@ void doesntSupportGettingId() { assertThrows(IllegalStateException.class, subject::hederaId); } + @Test + void doesSupportGettingContractId() { + final var tokenNum = ConversionUtils.numberOfLongZero(TOKEN_ADDRESS); + assertEquals(ContractID.newBuilder().contractNum(tokenNum).build(), subject.hederaContractId()); + } + @Test void usesGivenAddress() { assertSame(TOKEN_ADDRESS, subject.getAddress()); diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/utils/ConversionUtilsTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/utils/ConversionUtilsTest.java index 88100092ab27..922da0fcdf3f 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/utils/ConversionUtilsTest.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/utils/ConversionUtilsTest.java @@ -16,14 +16,23 @@ package com.hedera.node.app.service.contract.impl.test.utils; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.*; +import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.*; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.BDDMockito.given; +import com.hedera.hapi.node.base.ContractID; +import com.hedera.hapi.node.contract.ContractLoginfo; import com.hedera.hapi.node.state.common.EntityNumber; import com.hedera.node.app.service.contract.impl.utils.ConversionUtils; import com.hedera.node.app.spi.meta.bni.Dispatch; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; +import org.apache.tuweni.bytes.Bytes; import org.hyperledger.besu.datatypes.Address; +import org.hyperledger.besu.evm.log.Log; +import org.hyperledger.besu.evm.log.LogsBloomFilter; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -72,4 +81,38 @@ void returnsGivenIfPresentAlias() { final var actual = ConversionUtils.maybeMissingNumberOf(address, dispatch); assertEquals(0x1234L, actual); } + + @Test + void convertsFromBesuLogAsExpected() { + final var expectedBloom = Bytes.wrap(bloomFor(BESU_LOG)); + final var expected = ContractLoginfo.newBuilder() + .contractID(ContractID.newBuilder().contractNum(numberOfLongZero(NON_SYSTEM_LONG_ZERO_ADDRESS))) + .bloom(tuweniToPbjBytes(expectedBloom)) + .data(CALL_DATA) + .topic(List.of(TOPIC)) + .build(); + + final var actual = pbjLogFrom(BESU_LOG); + + assertEquals(expected, actual); + } + + @Test + void convertsFromBesuLogsAsExpected() { + final var expectedBloom = Bytes.wrap(bloomFor(BESU_LOG)); + final var expected = ContractLoginfo.newBuilder() + .contractID(ContractID.newBuilder().contractNum(numberOfLongZero(NON_SYSTEM_LONG_ZERO_ADDRESS))) + .bloom(tuweniToPbjBytes(expectedBloom)) + .data(CALL_DATA) + .topic(List.of(TOPIC)) + .build(); + + final var actual = pbjLogsFrom(List.of(BESU_LOG)); + + assertEquals(List.of(expected), actual); + } + + private byte[] bloomFor(@NonNull final Log log) { + return LogsBloomFilter.builder().insertLog(log).build().toArray(); + } } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/module-info.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/module-info.java index f5f641ce0bdb..711cf4756a93 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/test/java/module-info.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/module-info.java @@ -33,6 +33,8 @@ org.junit.platform.commons; opens com.hedera.node.app.service.contract.impl.test.exec.gas to org.junit.platform.commons; + opens com.hedera.node.app.service.contract.impl.test.exec.utils to + org.junit.platform.commons; opens com.hedera.node.app.service.contract.impl.test.exec.v030 to org.junit.platform.commons; opens com.hedera.node.app.service.contract.impl.test.exec.v034 to diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableAccountStoreImpl.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableAccountStoreImpl.java index c63e24e42416..9714ccdbb060 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableAccountStoreImpl.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableAccountStoreImpl.java @@ -26,7 +26,6 @@ import com.hedera.hapi.node.state.token.Account; import com.hedera.node.app.service.evm.contracts.execution.StaticProperties; import com.hedera.node.app.service.mono.ledger.accounts.AliasManager; -import com.hedera.node.app.service.mono.state.virtual.EntityNumValue; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.spi.state.ReadableKVState; import com.hedera.node.app.spi.state.ReadableStates; @@ -51,7 +50,7 @@ public class ReadableAccountStoreImpl implements ReadableAccountStore { /** The underlying data storage class that holds the account data. */ private final ReadableKVState accountState; /** The underlying data storage class that holds the aliases data built from the state. */ - private final ReadableKVState aliases; + private final ReadableKVState aliases; /** * Create a new {@link ReadableAccountStoreImpl} instance. @@ -83,12 +82,8 @@ public Account getAccountById(@NonNull final AccountID accountID) { @Override @Nullable - public AccountID getAccountIDByAlias(@NonNull final String alias) { - final var entityNum = aliases.get(alias); - if (entityNum == null) { - return null; - } - return AccountID.newBuilder().accountNum(entityNum.num()).build(); + public AccountID getAccountIDByAlias(@NonNull final Bytes alias) { + return aliases.get(alias); } /* Helper methods */ @@ -112,11 +107,11 @@ protected Account getAccountLeaf(@NonNull final AccountID id) { if (isOfEvmAddressSize(alias) && isMirror(alias)) { yield fromMirror(alias); } else { - final var entityNum = aliases.get(alias.asUtf8String()); - yield entityNum == null ? EntityNumValue.DEFAULT.num() : entityNum.num(); + final var entityNum = aliases.get(alias); + yield entityNum == null ? 0L : entityNum.accountNum(); } } - case UNSET -> EntityNumValue.DEFAULT.num(); + case UNSET -> 0L; }; return accountNum == null @@ -147,7 +142,7 @@ private Account getContractLeaf(@NonNull final ContractID id) { } // The evm address is some kind of alias. - var entityNum = aliases.get(evmAddress.asUtf8String()); + var entityNum = aliases.get(evmAddress); // If we didn't find an alias, we will want to auto-create this account. But // we don't want to auto-create an account if there is already another @@ -157,13 +152,12 @@ private Account getContractLeaf(@NonNull final ContractID id) { // address from it and look it up final var evmKeyAliasAddress = keyAliasToEVMAddress(evmAddress); if (evmKeyAliasAddress != null) { - entityNum = aliases.get( - ByteString.copyFrom(evmKeyAliasAddress).toStringUtf8()); + entityNum = aliases.get(Bytes.wrap(evmKeyAliasAddress)); } } - yield entityNum == null ? EntityNumValue.DEFAULT.num() : entityNum.num(); + yield entityNum == null ? 0L : entityNum.accountNum(); } - case UNSET -> EntityNumValue.DEFAULT.num(); + case UNSET -> 0L; }; return contractNum == null diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableTokenRelationStoreImpl.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableTokenRelationStoreImpl.java index 60ac22da5149..33074459b9bf 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableTokenRelationStoreImpl.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableTokenRelationStoreImpl.java @@ -20,8 +20,8 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.token.TokenRelation; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.token.ReadableTokenRelationStore; import com.hedera.node.app.spi.state.ReadableKVState; import com.hedera.node.app.spi.state.ReadableStates; @@ -35,7 +35,7 @@ */ public class ReadableTokenRelationStoreImpl implements ReadableTokenRelationStore { /** The underlying data storage class that holds the token data. */ - private final ReadableKVState readableTokenRelState; + private final ReadableKVState readableTokenRelState; /** * Create a new {@link ReadableTokenRelationStoreImpl} instance. @@ -57,7 +57,8 @@ public TokenRelation get(@NonNull final AccountID accountId, @NonNull final Toke if (AccountID.DEFAULT.equals(accountId) || TokenID.DEFAULT.equals(tokenId)) return null; - return readableTokenRelState.get(EntityNumPair.fromLongs(accountId.accountNum(), tokenId.tokenNum())); + return readableTokenRelState.get( + EntityIDPair.newBuilder().accountId(accountId).tokenId(tokenId).build()); } /** diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableTokenStoreImpl.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableTokenStoreImpl.java index b15a6fa444b9..b9c3ae2bf42a 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableTokenStoreImpl.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableTokenStoreImpl.java @@ -22,7 +22,6 @@ import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.transaction.CustomFee; -import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.spi.state.ReadableKVState; import com.hedera.node.app.spi.state.ReadableStates; @@ -35,7 +34,7 @@ */ public class ReadableTokenStoreImpl implements ReadableTokenStore { /** The underlying data storage class that holds the token data. */ - private final ReadableKVState tokenState; + private final ReadableKVState tokenState; /** * Create a new {@link ReadableTokenStoreImpl} instance. @@ -52,7 +51,7 @@ public ReadableTokenStoreImpl(@NonNull final ReadableStates states) { @Nullable public TokenMetadata getTokenMeta(@NonNull final TokenID id) { requireNonNull(id); - final var token = getTokenLeaf(id.tokenNum()); + final var token = getTokenLeaf(id); if (token.isEmpty()) { return null; } @@ -63,7 +62,7 @@ public TokenMetadata getTokenMeta(@NonNull final TokenID id) { @Nullable public Token get(@NonNull final TokenID id) { requireNonNull(id); - return getTokenLeaf(id.tokenNum()).orElse(null); + return getTokenLeaf(id).orElse(null); } private TokenMetadata tokenMetaFrom(final Token token) { @@ -87,7 +86,7 @@ private TokenMetadata tokenMetaFrom(final Token token) { token.pauseKeyOrElse(null), token.symbol(), hasRoyaltyWithFallback, - token.treasuryAccountNumber(), // remove this and make it a long + token.treasuryAccountId(), token.decimals()); } @@ -99,11 +98,11 @@ private boolean isRoyaltyWithFallback(final CustomFee fee) { * Returns the merkleToken leaf for the given tokenId. If the token doesn't exist returns {@code * Optional.empty()} * - * @param tokenNum given tokenId's number + * @param tokenId given tokenId * @return merkleToken leaf for the given tokenId */ - private Optional getTokenLeaf(final long tokenNum) { - final var token = tokenState.get(EntityNum.fromLong(tokenNum)); + private Optional getTokenLeaf(final TokenID tokenId) { + final var token = tokenState.get(tokenId); return Optional.ofNullable(token); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/TokenServiceImpl.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/TokenServiceImpl.java index 220a6c63ee98..76740170d24f 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/TokenServiceImpl.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/TokenServiceImpl.java @@ -41,6 +41,7 @@ import com.hedera.node.app.spi.state.SchemaRegistry; import com.hedera.node.app.spi.state.StateDefinition; import com.hedera.node.config.data.BootstrapConfig; +import com.hedera.node.config.data.LedgerConfig; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Set; @@ -85,6 +86,7 @@ public void migrate(@NonNull MigrationContext ctx) { // TBD Verify this is correct. We need to preload all the special accounts final var accounts = ctx.newStates().get(ACCOUNTS_KEY); final var bootstrapConfig = ctx.configuration().getConfigData(BootstrapConfig.class); + final var ledgerConfig = ctx.configuration().getConfigData(LedgerConfig.class); final var superUserKeyBytes = bootstrapConfig.genesisPublicKey(); if (superUserKeyBytes.length() != 32) { throw new IllegalStateException("'" + superUserKeyBytes + "' is not a possible Ed25519 public key"); @@ -92,11 +94,21 @@ public void migrate(@NonNull MigrationContext ctx) { final var superUserKey = Key.newBuilder().ed25519(superUserKeyBytes).build(); + long remainingBalance = ledgerConfig.totalTinyBarFloat(); try { accounts.put( AccountID.newBuilder().accountNum(2).build(), Account.newBuilder() .accountNumber(2) + .tinybarBalance(remainingBalance - 100_000_000_000L) + .key(superUserKey) + .declineReward(true) + .build()); + accounts.put( + AccountID.newBuilder().accountNum(3).build(), + Account.newBuilder() + .accountNumber(3) + .tinybarBalance(100_000_000_000L) .key(superUserKey) .declineReward(true) .build()); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableAccountStore.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableAccountStore.java index dd17ef8e2f6e..8b79cba123e5 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableAccountStore.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableAccountStore.java @@ -21,7 +21,6 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.state.token.Account; -import com.hedera.node.app.service.mono.state.virtual.EntityNumValue; import com.hedera.node.app.spi.state.WritableKVState; import com.hedera.node.app.spi.state.WritableStates; import com.hedera.pbj.runtime.io.buffer.Bytes; @@ -110,10 +109,10 @@ public Account getForModify(@NonNull final AccountID id) { yield fromMirror(alias); } else { final var accountID = aliases.get(alias.asUtf8String()); - yield accountID == null ? AccountID.DEFAULT.accountNum() : accountID.accountNum(); + yield accountID == null ? 0L : accountID.accountNum(); } } - case UNSET -> EntityNumValue.DEFAULT.num(); + case UNSET -> 0L; }; return accountNum == null diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableNftStore.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableNftStore.java index 9c6b9f440178..36551c6fb3ee 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableNftStore.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableNftStore.java @@ -80,7 +80,7 @@ public Nft getForModify(final UniqueTokenId id) { public Nft getForModify(final TokenID tokenId, final long serialNumber) { requireNonNull(tokenId); return nftState.getForModify(UniqueTokenId.newBuilder() - .tokenTypeNumber(tokenId.tokenNum()) + .tokenId(tokenId) .serialNumber(serialNumber) .build()); } @@ -119,7 +119,7 @@ public void remove(final @NonNull UniqueTokenId serialNum) { */ public void remove(final @NonNull TokenID tokenId, final long serialNum) { remove(UniqueTokenId.newBuilder() - .tokenTypeNumber(tokenId.tokenNum()) + .tokenId(tokenId) .serialNumber(serialNum) .build()); } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableTokenRelationStore.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableTokenRelationStore.java index 8312e3eff1f8..40f269d07a33 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableTokenRelationStore.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableTokenRelationStore.java @@ -20,8 +20,8 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.token.TokenRelation; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.spi.state.WritableKVState; import com.hedera.node.app.spi.state.WritableStates; import edu.umd.cs.findbugs.annotations.NonNull; @@ -38,7 +38,7 @@ */ public class WritableTokenRelationStore extends ReadableTokenRelationStoreImpl { /** The underlying data storage class that holds the token data. */ - private final WritableKVState tokenRelState; + private final WritableKVState tokenRelState; /** * Create a new {@link WritableTokenRelationStore} instance. @@ -57,7 +57,10 @@ public WritableTokenRelationStore(@NonNull final WritableStates states) { */ public void put(@NonNull final TokenRelation tokenRelation) { tokenRelState.put( - EntityNumPair.fromLongs(tokenRelation.accountNumber(), tokenRelation.tokenNumber()), + EntityIDPair.newBuilder() + .accountId(tokenRelation.accountId()) + .tokenId(tokenRelation.tokenId()) + .build(), Objects.requireNonNull(tokenRelation)); } @@ -67,7 +70,10 @@ public void put(@NonNull final TokenRelation tokenRelation) { * @param tokenRelation the {@code TokenRelation} to be removed */ public void remove(@NonNull final TokenRelation tokenRelation) { - tokenRelState.remove(EntityNumPair.fromLongs(tokenRelation.accountNumber(), tokenRelation.tokenNumber())); + tokenRelState.remove(EntityIDPair.newBuilder() + .accountId(tokenRelation.accountId()) + .tokenId(tokenRelation.tokenId()) + .build()); } /** @@ -84,13 +90,14 @@ public TokenRelation getForModify(@NonNull final AccountID accountId, @NonNull f if (AccountID.DEFAULT.equals(accountId) || TokenID.DEFAULT.equals(tokenId)) return null; - return tokenRelState.getForModify(EntityNumPair.fromLongs(accountId.accountNum(), tokenId.tokenNum())); + return tokenRelState.getForModify( + EntityIDPair.newBuilder().accountId(accountId).tokenId(tokenId).build()); } /** * @return the set of token relations modified in existing state */ - public Set modifiedTokens() { + public Set modifiedTokens() { return tokenRelState.modifiedKeys(); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableTokenStore.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableTokenStore.java index d7ac3feb768d..8ea541ca6bed 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableTokenStore.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableTokenStore.java @@ -18,9 +18,9 @@ import static java.util.Objects.requireNonNull; +import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.state.token.Token; import com.hedera.node.app.service.mono.state.merkle.MerkleToken; -import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.spi.state.WritableKVState; import com.hedera.node.app.spi.state.WritableStates; import edu.umd.cs.findbugs.annotations.NonNull; @@ -37,7 +37,7 @@ */ public class WritableTokenStore extends ReadableTokenStoreImpl { /** The underlying data storage class that holds the token data. */ - private final WritableKVState tokenState; + private final WritableKVState tokenState; /** * Create a new {@link WritableTokenStore} instance. @@ -57,18 +57,18 @@ public WritableTokenStore(@NonNull final WritableStates states) { */ public void put(@NonNull final Token token) { Objects.requireNonNull(token); - tokenState.put(EntityNum.fromLong(token.tokenNumber()), Objects.requireNonNull(token)); + tokenState.put(token.tokenId(), Objects.requireNonNull(token)); } /** * Returns the {@link Token} with the given number using {@link WritableKVState#getForModify}. * If no such token exists, returns {@code Optional.empty()} - * @param tokenNum - the number of the token to be retrieved. + * @param tokenId - the id of the token to be retrieved. */ @NonNull - public Optional getForModify(final long tokenNum) { - requireNonNull(tokenNum); - final var token = tokenState.getForModify(EntityNum.fromLong(tokenNum)); + public Optional getForModify(final TokenID tokenId) { + requireNonNull(tokenId); + final var token = tokenState.getForModify(tokenId); return Optional.ofNullable(token); } @@ -85,7 +85,7 @@ public long sizeOfState() { * @return the set of tokens modified in existing state */ @NonNull - public Set modifiedTokens() { + public Set modifiedTokens() { return tokenState.modifiedKeys(); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java index a32dd5b99bec..805f514c4f7b 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java @@ -114,8 +114,8 @@ protected void changeSupply( requireNonNull(invalidSupplyCode); validateTrue( - treasuryRel.accountNumber() == token.treasuryAccountNumber() - && token.tokenNumber() == treasuryRel.tokenNumber(), + treasuryRel.accountId().equals(token.treasuryAccountId()) + && token.tokenId().equals(treasuryRel.tokenId()), FAIL_INVALID); final long newTotalSupply = token.totalSupply() + amount; @@ -128,7 +128,7 @@ protected void changeSupply( validateTrue(token.maxSupply() >= newTotalSupply, TOKEN_MAX_SUPPLY_REACHED); } - final var treasuryAccount = accountStore.get(asAccount(treasuryRel.accountNumber())); + final var treasuryAccount = accountStore.get(treasuryRel.accountId()); validateTrue(treasuryAccount != null, INVALID_TREASURY_ACCOUNT_FOR_TOKEN); final long newTreasuryBalance = treasuryRel.balance() + amount; @@ -184,7 +184,7 @@ protected void createAndLinkTokenRels( final var firstOfNewTokenRels = newTokenRels.get(0); final var updatedAcct = account.copyBuilder() // replace the head token number with the first token number of the new tokenRels - .headTokenNumber(firstOfNewTokenRels.tokenNumber()) + .headTokenNumber(firstOfNewTokenRels.tokenId().tokenNum()) // and also update the account's total number of token associations .numberAssociations(account.numberAssociations() + newTokenRels.size()) .build(); @@ -225,13 +225,13 @@ private void linkTokenRels( final var lastOfNewTokenRels = newTokenRels.remove(newTokenRels.size() - 1); final var headTokenAsNonHeadTokenRel = headTokenRel .copyBuilder() - .previousToken(lastOfNewTokenRels.tokenNumber()) + .previousToken(lastOfNewTokenRels.tokenId()) .build(); // the old head token rel is no longer the head // Also connect the last of the new tokenRels to the old head token rel newTokenRels.add(lastOfNewTokenRels .copyBuilder() - .nextToken(headTokenAsNonHeadTokenRel.tokenNumber()) + .nextToken(headTokenAsNonHeadTokenRel.tokenId()) .build()); tokenRelStore.put(headTokenAsNonHeadTokenRel); } else { @@ -259,28 +259,26 @@ private List createTokenRelsToAccount( // Link each of the new token IDs together in a doubly-linked list way by setting each // token relation's previous and next token IDs. - // Compute the previous and next token IDs. Unfortunately `TokenRelation` doesn't - // allow for null values, so a value of '0' will have to indicate a null pointer to - // the previous or next token (since no token number 0 can exist) - long prevTokenId = 0; - long nextTokenId = 0; + // Compute the previous and next token IDs. + TokenID prevTokenId = null; + TokenID nextTokenId = null; if (i - 1 >= 0) { // if there is a previous token prevTokenId = Optional.ofNullable(tokens.get(i - 1)) - .map(Token::tokenNumber) - .orElse(0L); + .map(Token::tokenId) + .orElse(null); } if (i + 1 < tokens.size()) { // if there is a next token nextTokenId = Optional.ofNullable(tokens.get(i + 1)) - .map(Token::tokenNumber) - .orElse(0L); + .map(Token::tokenId) + .orElse(null); } // Create the new token relation final var isFrozen = token.hasFreezeKey() && token.accountsFrozenByDefault(); final var kycGranted = !token.hasKycKey(); final var newTokenRel = new TokenRelation( - token.tokenNumber(), - account.accountNumber(), + token.tokenId(), + asAccount(account.accountNumber()), 0, isFrozen, kycGranted, @@ -312,7 +310,7 @@ protected void autoAssociate( final var entitiesConfig = context.configuration().getConfigData(EntitiesConfig.class); final var accountId = asAccount(account.accountNumber()); - final var tokenId = asToken(token.tokenNumber()); + final var tokenId = token.tokenId(); // If token is already associated, no need to associate again validateTrue(tokenRelStore.get(accountId, tokenId) == null, TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT); validateTrue( @@ -331,13 +329,13 @@ protected void autoAssociate( // Create new token relation and commit to store final var newTokenRel = TokenRelation.newBuilder() - .tokenNumber(tokenId.tokenNum()) - .accountNumber(account.accountNumber()) + .tokenId(tokenId) + .accountId(accountId) .automaticAssociation(true) .kycGranted(!token.hasKycKey()) .frozen(token.hasFreezeKey() && token.accountsFrozenByDefault()) - .previousToken(-1) - .nextToken(account.headTokenNumber()) + .previousToken((TokenID) null) + .nextToken(asToken(account.headTokenNumber())) .build(); final var copyAccount = account.copyBuilder() diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoCreateHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoCreateHandler.java index 55bcb16bf540..052e06c14fe9 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoCreateHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoCreateHandler.java @@ -40,7 +40,7 @@ import com.hedera.node.app.service.token.impl.records.CryptoCreateRecordBuilder; import com.hedera.node.app.service.token.impl.validators.CryptoCreateValidator; import com.hedera.node.app.service.token.impl.validators.StakingValidator; -import com.hedera.node.app.spi.info.NodeInfo; +import com.hedera.node.app.spi.info.NetworkInfo; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; @@ -64,18 +64,18 @@ public class CryptoCreateHandler extends BaseCryptoHandler implements Transactio private final CryptoCreateValidator cryptoCreateValidator; private StakingValidator stakingValidator; - private NodeInfo nodeInfo; + private NetworkInfo networkInfo; @Inject public CryptoCreateHandler( @NonNull final CryptoCreateValidator cryptoCreateValidator, @NonNull final StakingValidator stakingValidator, - @NonNull final NodeInfo nodeInfo) { + @NonNull final NetworkInfo networkInfo) { this.cryptoCreateValidator = requireNonNull(cryptoCreateValidator, "The supplied argument 'cryptoCreateValidator' must not be null"); this.stakingValidator = requireNonNull(stakingValidator, "The supplied argument 'stakingValidator' must not be null"); - this.nodeInfo = requireNonNull(nodeInfo, "The supplied argument 'nodeInfo' must not be null"); + this.networkInfo = requireNonNull(networkInfo, "The supplied argument 'nodeInfo' must not be null"); } @Override @@ -211,7 +211,7 @@ private Account validateSemantics( op.stakedNodeId(), accountStore, context, - nodeInfo); + networkInfo); return payer; } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountBalanceHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountBalanceHandler.java index d2d35b4fd4b3..a38b3cf73f26 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountBalanceHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountBalanceHandler.java @@ -21,6 +21,7 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_CONTRACT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; import static java.util.Objects.requireNonNull; @@ -131,29 +132,27 @@ private List getTokenBalances( @NonNull final ReadableTokenStore readableTokenStore, @NonNull final ReadableTokenRelationStore tokenRelationStore) { final var ret = new ArrayList(); - var tokenNum = account.headTokenNumber(); + var tokenId = asToken(account.headTokenNumber()); int count = 0; TokenRelation tokenRelation; Token token; // token from readableToken store by tokenID - TokenID tokenID; // build from tokenNum AccountID accountID; // build from accountNumber TokenBalance tokenBalance; // created TokenBalance object - while (tokenNum != 0 && count < tokenConfig.maxRelsPerInfoQuery()) { + while (tokenId != null && !tokenId.equals(TokenID.DEFAULT) && count < tokenConfig.maxRelsPerInfoQuery()) { accountID = AccountID.newBuilder().accountNum(account.accountNumber()).build(); - tokenID = TokenID.newBuilder().tokenNum(tokenNum).build(); - tokenRelation = tokenRelationStore.get(accountID, tokenID); + tokenRelation = tokenRelationStore.get(accountID, tokenId); if (tokenRelation != null) { - token = readableTokenStore.get(tokenID); + token = readableTokenStore.get(tokenId); if (token != null) { tokenBalance = TokenBalance.newBuilder() - .tokenId(TokenID.newBuilder().tokenNum(tokenNum).build()) + .tokenId(tokenId) .balance(tokenRelation.balance()) .decimals(token.decimals()) .build(); ret.add(tokenBalance); } - tokenNum = tokenRelation.nextToken(); + tokenId = tokenRelation.nextToken(); } else { break; } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountInfoHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountInfoHandler.java index 8bac82c58739..b37814160b42 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountInfoHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountInfoHandler.java @@ -27,6 +27,7 @@ import static com.hedera.hapi.node.base.TokenKycStatus.KYC_NOT_APPLICABLE; import static com.hedera.node.app.hapi.utils.CommonUtils.asEvmAddress; import static com.hedera.node.app.service.evm.accounts.HederaEvmContractAliases.EVM_ADDRESS_LEN; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; import static com.hedera.node.app.spi.key.KeyUtils.ECDSA_SECP256K1_COMPRESSED_KEY_LENGTH; import static com.hedera.node.app.spi.key.KeyUtils.isEmpty; import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; @@ -239,23 +240,21 @@ private List getTokenRelationship( requireNonNull(readableTokenStore); requireNonNull(tokenRelationStore); final var ret = new ArrayList(); - var tokenNum = account.headTokenNumber(); + var tokenId = asToken(account.headTokenNumber()); int count = 0; TokenRelation tokenRelation; Token token; // token from readableToken store by tokenID - TokenID tokenID; // build from tokenNum AccountID accountID; // build from accountNumber - while (tokenNum != 0 && count < tokenConfig.maxRelsPerInfoQuery()) { + while (tokenId != null && !tokenId.equals(TokenID.DEFAULT) && count < tokenConfig.maxRelsPerInfoQuery()) { accountID = AccountID.newBuilder().accountNum(account.accountNumber()).build(); - tokenID = TokenID.newBuilder().tokenNum(tokenNum).build(); - tokenRelation = tokenRelationStore.get(accountID, tokenID); + tokenRelation = tokenRelationStore.get(accountID, tokenId); if (tokenRelation != null) { - token = readableTokenStore.get(tokenID); + token = readableTokenStore.get(tokenId); if (token != null) { - addTokenRelation(ret, token, tokenRelation, tokenNum); + addTokenRelation(ret, token, tokenRelation, tokenId); } - tokenNum = tokenRelation.nextToken(); + tokenId = tokenRelation.nextToken(); } else { break; } @@ -269,12 +268,12 @@ private List getTokenRelationship( * @param ret ArrayList of TokenRelationship object * @param token token from readableToken store by tokenID * @param tokenRelation token relation from token relation store - * @param tokenNum token number + * @param tokenId token id */ private void addTokenRelation( - ArrayList ret, Token token, TokenRelation tokenRelation, long tokenNum) { + ArrayList ret, Token token, TokenRelation tokenRelation, TokenID tokenId) { final var tokenRelationship = TokenRelationship.newBuilder() - .tokenId(TokenID.newBuilder().tokenNum(tokenNum).build()) + .tokenId(tokenId) .symbol(token.symbol()) .balance(tokenRelation.balance()) .decimals(token.decimals()) diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoTransferHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoTransferHandler.java index 9e9e4981e8d7..52dbc4b31e83 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoTransferHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoTransferHandler.java @@ -19,11 +19,15 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_IS_IMMUTABLE; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSACTION_BODY; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSFER_ACCOUNT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TREASURY_ACCOUNT_FOR_TOKEN; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; import static com.hedera.node.app.spi.key.KeyUtils.isEmpty; import static com.hedera.node.app.spi.key.KeyUtils.isValid; import static com.hedera.node.app.spi.validation.Validations.validateAccountID; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; import static java.util.Collections.emptyList; import static java.util.Objects.requireNonNull; @@ -34,15 +38,26 @@ import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.base.TransferList; import com.hedera.hapi.node.token.CryptoTransferTransactionBody; +import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.ReadableTokenStore.TokenMetadata; +import com.hedera.node.app.service.token.impl.handlers.transfer.EnsureAliasesStep; +import com.hedera.node.app.service.token.impl.handlers.transfer.ReplaceAliasesWithIDsInOp; +import com.hedera.node.app.service.token.impl.handlers.transfer.TransferContextImpl; +import com.hedera.node.app.service.token.impl.handlers.transfer.TransferStep; +import com.hedera.node.app.service.token.impl.validators.CryptoTransferValidator; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.PreHandleContext; import com.hedera.node.app.spi.workflows.TransactionHandler; +import com.hedera.node.config.data.HederaConfig; +import com.hedera.node.config.data.LazyCreationConfig; +import com.hedera.node.config.data.LedgerConfig; +import com.hedera.node.config.data.TokensConfig; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; import java.util.List; import javax.inject.Inject; import javax.inject.Singleton; @@ -53,14 +68,18 @@ */ @Singleton public class CryptoTransferHandler implements TransactionHandler { + private final CryptoTransferValidator validator; + @Inject - public CryptoTransferHandler() { - // Exists for injection + public CryptoTransferHandler(@NonNull final CryptoTransferValidator validator) { + this.validator = validator; } @Override public void preHandle(@NonNull final PreHandleContext context) throws PreCheckException { requireNonNull(context); + pureChecks(context.body()); + final var op = context.body().cryptoTransferOrThrow(); final var accountStore = context.createStore(ReadableAccountStore.class); final var tokenStore = context.createStore(ReadableTokenStore.class); @@ -75,9 +94,105 @@ public void preHandle(@NonNull final PreHandleContext context) throws PreCheckEx checkFungibleTokenTransfers(hbarTransfers, context, accountStore, true); } + @Override + public void pureChecks(@NonNull final TransactionBody txn) throws PreCheckException { + requireNonNull(txn); + final var op = txn.cryptoTransfer(); + validateTruePreCheck(op != null, INVALID_TRANSACTION_BODY); + validator.pureChecks(op); + } + @Override public void handle(@NonNull final HandleContext context) throws HandleException { - throw new UnsupportedOperationException("Not implemented"); + requireNonNull(context); + final var txn = context.body(); + final var op = txn.cryptoTransferOrThrow(); + + final var ledgerConfig = context.configuration().getConfigData(LedgerConfig.class); + final var hederaConfig = context.configuration().getConfigData(HederaConfig.class); + final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); + + validator.validateSemantics(op, ledgerConfig, hederaConfig, tokensConfig); + + // create a new transfer context that is specific only for this transaction + final var transferContext = new TransferContextImpl(context); + + // Replace all aliases in the transaction body with its account ids + final var replacedOp = ensureAndReplaceAliasesInOp(txn, transferContext, context); + // Use the op with replaced aliases in further steps + final var steps = decomposeIntoSteps(replacedOp); + for (final var step : steps) { + // Apply all changes to the handleContext's States + step.doIn(transferContext); + } + } + + /** + * Ensures all aliases specified in the transfer exist. If the aliases are in receiver section, and don't exist + * they will be auto-created. This step populates resolved aliases and number of auto creations in the + * transferContext, which is used by subsequent steps and throttling. + * It will also replace all aliases in the {@link CryptoTransferTransactionBody} with its account ids, so it will + * be easier to process in next steps. + * @param txn the given transaction body + * @param transferContext the given transfer context + * @param context the given handle context + * @return the replaced transaction body with all aliases replaced with its account ids + * @throws HandleException if any error occurs during the process + */ + private CryptoTransferTransactionBody ensureAndReplaceAliasesInOp( + final TransactionBody txn, final TransferContextImpl transferContext, final HandleContext context) + throws HandleException { + final var op = txn.cryptoTransferOrThrow(); + + // ensure all aliases exist, if not create then if receivers + ensureExistenceOfAliasesOrCreate(op, transferContext); + if (transferContext.numOfLazyCreations() > 0) { + final var config = context.configuration().getConfigData(LazyCreationConfig.class); + validateTrue(config.enabled(), NOT_SUPPORTED); + } + + // replace all aliases with its account ids, so it will be easier to process in next steps + final var replacedOp = new ReplaceAliasesWithIDsInOp().replaceAliasesWithIds(op, transferContext); + // re-run pure checks on this op to see if there are no duplicates + try { + final var txnBody = txn.copyBuilder().cryptoTransfer(replacedOp).build(); + pureChecks(txnBody); + } catch (PreCheckException e) { + throw new HandleException(e.responseCode()); + } + return replacedOp; + } + + private void ensureExistenceOfAliasesOrCreate( + @NonNull final CryptoTransferTransactionBody op, @NonNull final TransferContextImpl transferContext) { + final var ensureAliasExistence = new EnsureAliasesStep(op); + ensureAliasExistence.doIn(transferContext); + } + + /** + * Decomposes a crypto transfer into a sequence of steps that can be executed in order. + * Each step validates the preconditions needed from TransferContextImpl in order to perform its action. + * Steps are as follows: + *

    + *
  1. (c,o)Ensure existence of alias-referenced accounts
  2. + *
  3. (+,c)Charge custom fees for token transfers
  4. + *
  5. (o)Ensure associations of token recipients
  6. + *
  7. (+)Do zero-sum hbar balance changes
  8. + *
  9. (+)Do zero-sum fungible token transfers
  10. + *
  11. (+)Change NFT owners
  12. + *
  13. (+,c)Pay staking rewards, possibly to previously unmentioned stakee accounts
  14. + *
+ * LEGEND: '+' = creates new BalanceChange(s) from either the transaction body, custom fee schedule, or staking reward situation + * 'c' = updates an existing BalanceChange + * 'o' = causes a side effect not represented as BalanceChange + * @param op The crypto transfer transaction body + * @return A list of steps to execute + */ + private List decomposeIntoSteps(final CryptoTransferTransactionBody op) { + final List steps = new ArrayList<>(); + // TODO: implement other steps + + return steps; } /** @@ -197,8 +312,8 @@ private void checkReceiver( // to check that the receiver signed the transaction, UNLESS the sender or receiver is // the treasury, in which case fallback fees will not be applied when the transaction is handled, // so the receiver key does not need to sign. - final var treasury = tokenMeta.treasuryNum(); - if (treasury != senderId.accountNumOrThrow() && treasury != receiverId.accountNumOrThrow()) { + final var treasuryId = tokenMeta.treasuryAccountId(); + if (!treasuryId.equals(senderId) && !treasuryId.equals(receiverId)) { meta.requireKeyOrThrow(receiverId, INVALID_TREASURY_ACCOUNT_FOR_TOKEN); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoUpdateHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoUpdateHandler.java index 5b63a77f6838..d6ae3b64b40b 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoUpdateHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoUpdateHandler.java @@ -38,7 +38,7 @@ import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.validators.StakingValidator; -import com.hedera.node.app.spi.info.NodeInfo; +import com.hedera.node.app.spi.info.NetworkInfo; import com.hedera.node.app.spi.validation.EntityType; import com.hedera.node.app.spi.validation.ExpiryMeta; import com.hedera.node.app.spi.workflows.HandleContext; @@ -61,17 +61,17 @@ public class CryptoUpdateHandler extends BaseCryptoHandler implements Transactio private final CryptoSignatureWaivers waivers; private StakingValidator stakingValidator; - private NodeInfo nodeInfo; + private NetworkInfo networkInfo; @Inject public CryptoUpdateHandler( @NonNull final CryptoSignatureWaivers waivers, @NonNull final StakingValidator stakingValidator, - @NonNull final NodeInfo nodeInfo) { + @NonNull final NetworkInfo networkInfo) { this.waivers = requireNonNull(waivers, "The supplied argument 'waivers' must not be null"); this.stakingValidator = requireNonNull(stakingValidator, "The supplied argument 'stakingValidator' must not be null"); - this.nodeInfo = requireNonNull(nodeInfo, "The supplied argument 'nodeInfo' must not be null"); + this.networkInfo = requireNonNull(networkInfo, "The supplied argument 'networkInfo' must not be null"); } @Override @@ -272,6 +272,6 @@ private void validateFields( op.stakedNodeId(), accountStore, context, - nodeInfo); + networkInfo); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAccountWipeHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAccountWipeHandler.java index 341b88a67520..bea0fec0bd60 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAccountWipeHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAccountWipeHandler.java @@ -206,7 +206,7 @@ private ValidationResult validateSemantics( final var accountRel = TokenHandlerHelper.getIfUsable(accountId, tokenId, tokenRelStore); validateFalse( - token.treasuryAccountNumber() == accountRel.accountNumber(), + token.treasuryAccountId().equals(accountRel.accountId()), ResponseCodeEnum.CANNOT_WIPE_TOKEN_TREASURY_ACCOUNT); return new ValidationResult(account, token, accountRel); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java index df8b382c946e..a75adf24c286 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java @@ -19,7 +19,6 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.*; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_NFT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; -import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static com.hedera.node.app.service.token.impl.validators.TokenSupplyChangeOpsValidator.verifyTokenInstanceAmounts; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; import static java.util.Objects.requireNonNull; @@ -137,7 +136,7 @@ public void handle(@NonNull final HandleContext context) throws HandleException tokenRelStore); // Update treasury's NFT count - final var treasuryAcct = accountStore.get(asAccount(token.treasuryAccountNumber())); + final var treasuryAcct = accountStore.get(token.treasuryAccountId()); final var updatedTreasuryAcct = treasuryAcct .copyBuilder() .numberOwnedNfts(treasuryAcct.numberOwnedNfts() - nftSerialNums.size()) @@ -163,7 +162,7 @@ private ValidationResult validateSemantics( final var token = TokenHandlerHelper.getIfUsable(tokenId, tokenStore); validateTrue(token.supplyKey() != null, TOKEN_HAS_NO_SUPPLY_KEY); - final var treasuryAcctId = asAccount(token.treasuryAccountNumber()); + final var treasuryAcctId = token.treasuryAccountId(); final var treasuryRel = TokenHandlerHelper.getIfUsable(treasuryAcctId, tokenId, tokenRelStore); return new ValidationResult(token, treasuryRel); } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenCreateHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenCreateHandler.java index c89f4abffcd7..3f369abc4597 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenCreateHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenCreateHandler.java @@ -166,9 +166,7 @@ private void associateAccounts( final var entitiesConfig = context.configuration().getConfigData(EntitiesConfig.class); // This should exist as it is validated in validateSemantics - final var treasury = accountStore.get(AccountID.newBuilder() - .accountNum(newToken.treasuryAccountNumber()) - .build()); + final var treasury = accountStore.get(newToken.treasuryAccountId()); // Validate if token relation can be created between treasury and new token // If this succeeds, create and link token relation. tokenCreateValidator.validateAssociation(entitiesConfig, tokensConfig, treasury, newToken, tokenRelStore); @@ -194,12 +192,12 @@ private void associateAccounts( private Token buildToken( final long newTokenNum, final TokenCreateTransactionBody op, final ExpiryMeta resolvedExpiryMeta) { return new Token( - newTokenNum, + asToken(newTokenNum), op.name(), op.symbol(), op.decimals(), 0, // is this correct ? - op.treasury().accountNum(), + op.treasury(), op.adminKey(), op.kycKey(), op.freezeKey(), @@ -211,7 +209,9 @@ private Token buildToken( false, op.tokenType(), op.supplyType(), - resolvedExpiryMeta.autoRenewNum(), + AccountID.newBuilder() + .accountNum(resolvedExpiryMeta.autoRenewNum()) + .build(), resolvedExpiryMeta.autoRenewPeriod(), resolvedExpiryMeta.expiry(), op.memo(), diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDeleteHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDeleteHandler.java index 606620fb2b4e..34a46799f8fd 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDeleteHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDeleteHandler.java @@ -85,7 +85,7 @@ public void handle(@NonNull final HandleContext context) throws HandleException tokenStore.put(updatedToken); // Update the token treasury account's treasury titles count - final var account = accountStore.get(BaseCryptoHandler.asAccount(token.treasuryAccountNumber())); + final var account = accountStore.get(token.treasuryAccountId()); final var updatedAccount = account.copyBuilder() .numberTreasuryTitles(account.numberTreasuryTitles() - 1) .build(); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDissociateFromAccountHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDissociateFromAccountHandler.java index bc96c2ac6c6c..74234a48f26c 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDissociateFromAccountHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDissociateFromAccountHandler.java @@ -125,7 +125,10 @@ public void handle(@NonNull final HandleContext context) { } } else { // Handle active tokens - validateFalse(tokenRel.accountNumber() == token.treasuryAccountNumber(), ACCOUNT_IS_TREASURY); + validateFalse( + token.treasuryAccountId() != null + && token.treasuryAccountId().equals(tokenRel.accountId()), + ACCOUNT_IS_TREASURY); validateFalse(tokenRel.frozen(), ACCOUNT_FROZEN_FOR_TOKEN); if (tokenRelBalance > 0) { @@ -167,9 +170,7 @@ public void handle(@NonNull final HandleContext context) { // get changes to account and token relations final var updatedTokenRels = new TokenRelListCalculator(tokenRelStore).removeTokenRels(account, tokenRelsToRemove); - final var newHeadTokenId = updatedTokenRels.updatedHeadTokenId() != null - ? updatedTokenRels.updatedHeadTokenId() - : account.headTokenNumber(); + final var newHeadTokenId = updatedTokenRels.updatedHeadTokenId(); // Update the account with the aggregate number of NFTs, auto associations, associations, and positive balances // to remove, as well as the new head token number @@ -178,7 +179,7 @@ public void handle(@NonNull final HandleContext context) { .usedAutoAssociations(account.usedAutoAssociations() - numAutoAssociationsToSubtract) .numberAssociations(account.numberAssociations() - numAssociationsToSubtract) .numberPositiveBalances(account.numberPositiveBalances() - numPositiveBalancesToSubtract) - .headTokenNumber(newHeadTokenId) + .headTokenNumber(newHeadTokenId == null ? -1 : newHeadTokenId.tokenNum()) .build(); // Finally, update the account and the token relations via their respective stores @@ -223,10 +224,12 @@ private ValidatedResult validateSemantics( final TokenRelation dissociatedTokenTreasuryRel; if (possiblyUnusableToken != null) { validateFalse(possiblyUnusableToken.paused(), TOKEN_IS_PAUSED); - final var tokenTreasuryAcct = AccountID.newBuilder() - .accountNum(possiblyUnusableToken.treasuryAccountNumber()) - .build(); - dissociatedTokenTreasuryRel = tokenRelStore.get(tokenTreasuryAcct, tokenId); + if (possiblyUnusableToken.treasuryAccountId() != null) { + final var tokenTreasuryAcct = possiblyUnusableToken.treasuryAccountId(); + dissociatedTokenTreasuryRel = tokenRelStore.get(tokenTreasuryAcct, tokenId); + } else { + dissociatedTokenTreasuryRel = null; + } } else { // If the token isn't found, assume the treasury token rel is null dissociatedTokenTreasuryRel = null; diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetInfoHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetInfoHandler.java index dfe9e033736d..1ed43be378f9 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetInfoHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetInfoHandler.java @@ -33,7 +33,6 @@ import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; import static java.util.Objects.requireNonNull; -import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Duration; import com.hedera.hapi.node.base.HederaFunctionality; import com.hedera.hapi.node.base.QueryHeader; @@ -149,7 +148,7 @@ private Optional infoForToken( info.symbol(token.symbol()); info.name(token.name()); info.memo(token.memo()); - info.treasury(AccountID.newBuilder().accountNum(token.treasuryAccountNumber())); + info.treasury(token.treasuryAccountId()); info.totalSupply(token.totalSupply()); info.maxSupply(token.maxSupply()); info.decimals(token.decimals()); @@ -165,8 +164,8 @@ private Optional infoForToken( info.feeScheduleKey(token.feeScheduleKey()); } - if (token.autoRenewAccountNumber() != 0) { - info.autoRenewAccount(AccountID.newBuilder().accountNum(token.autoRenewAccountNumber())); + if (token.autoRenewAccountId() != null) { + info.autoRenewAccount(token.autoRenewAccountId()); info.autoRenewPeriod(Duration.newBuilder().seconds(token.autoRenewSecs())); } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java index 2ccf5bc514e3..0da8e4e0dc87 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java @@ -18,7 +18,6 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.*; import static com.hedera.node.app.service.mono.state.merkle.internals.BitPackUtils.MAX_NUM_ALLOWED; -import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static com.hedera.node.app.spi.workflows.HandleException.validateFalse; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; @@ -100,8 +99,7 @@ public void handle(@NonNull final HandleContext context) throws HandleException final var token = tokenStore.get(tokenId); validateTrue(token != null, INVALID_TOKEN_ID); // validate treasury relation exists - final var treasuryRel = tokenRelStore.get( - AccountID.newBuilder().accountNum(token.treasuryAccountNumber()).build(), tokenId); + final var treasuryRel = tokenRelStore.get(token.treasuryAccountId(), tokenId); validateTrue(treasuryRel != null, INVALID_TREASURY_ACCOUNT_FOR_TOKEN); if (token.tokenType() == TokenType.FUNGIBLE_COMMON) { @@ -173,11 +171,10 @@ private List mintNonFungible( validateFalse(metadata.isEmpty(), INVALID_TOKEN_MINT_METADATA); // validate token number from treasury relation - final var tokenId = asToken(treasuryRel.tokenNumber()); - validateTrue(treasuryRel.tokenNumber() == token.tokenNumber(), FAIL_INVALID); + final var tokenId = treasuryRel.tokenId(); // get the treasury account - final var treasuryAccount = accountStore.get(asAccount(treasuryRel.accountNumber())); + final var treasuryAccount = accountStore.get(treasuryRel.accountId()); validateTrue(treasuryAccount != null, INVALID_TREASURY_ACCOUNT_FOR_TOKEN); // get the latest serial number minted for the token @@ -231,7 +228,7 @@ private Nft buildNewlyMintedNft( final long currentSerialNumber) { return Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(tokenId.tokenNum()) + .tokenId(tokenId) .serialNumber(currentSerialNumber) .build()) // ownerID is null to indicate owned by treasury diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenUpdateHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenUpdateHandler.java index 424ca9259d05..f007d3bf945c 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenUpdateHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenUpdateHandler.java @@ -130,7 +130,7 @@ public void handle(@NonNull final HandleContext context) throws HandleException // We allow existing treasuries to have any nft balances left over, but the new treasury should // not have any balances left over. Transfer all balances for the current token to new treasury if (op.hasTreasury()) { - final var existingTreasury = asAccount(token.treasuryAccountNumber()); + final var existingTreasury = token.treasuryAccountId(); final var newTreasury = op.treasuryOrThrow(); final var newTreasuryAccount = getIfUsable( newTreasury, accountStore, context.expiryValidator(), INVALID_TREASURY_ACCOUNT_FOR_TOKEN); @@ -179,7 +179,7 @@ private void transferTokensToNewTreasury( final Token token, final WritableTokenRelationStore tokenRelStore, final WritableAccountStore accountStore) { - final var tokenId = asToken(token.tokenNumber()); + final var tokenId = token.tokenId(); // Validate both accounts are not frozen and have the right keys final var oldTreasuryRel = getIfUsable(oldTreasury, tokenId, tokenRelStore); final var newTreasuryRel = getIfUsable(newTreasury, tokenId, tokenRelStore); @@ -213,8 +213,8 @@ private void transferFungibleTokensToTreasury( final WritableAccountStore accountStore) { final var adjustment = fromTreasuryRel.balance(); - final var fromTreasury = accountStore.getAccountById(asAccount(fromTreasuryRel.accountNumber())); - final var toTreasury = accountStore.getAccountById(asAccount(toTreasuryRel.accountNumber())); + final var fromTreasury = accountStore.getAccountById(fromTreasuryRel.accountId()); + final var toTreasury = accountStore.getAccountById(toTreasuryRel.accountId()); adjustBalance(fromTreasuryRel, fromTreasury, -adjustment, tokenRelStore, accountStore); adjustBalance(toTreasuryRel, toTreasury, adjustment, tokenRelStore, accountStore); @@ -272,8 +272,8 @@ private void changeOwnerToNewTreasury( final TokenRelation toTreasuryRel, final WritableTokenRelationStore tokenRelStore, final WritableAccountStore accountStore) { - final var fromTreasury = accountStore.getAccountById(asAccount(fromTreasuryRel.accountNumber())); - final var toTreasury = accountStore.getAccountById(asAccount(toTreasuryRel.accountNumber())); + final var fromTreasury = accountStore.getAccountById(fromTreasuryRel.accountId()); + final var toTreasury = accountStore.getAccountById(toTreasuryRel.accountId()); final var fromRelBalance = fromTreasuryRel.balance(); final var toRelBalance = toTreasuryRel.balance(); @@ -343,8 +343,8 @@ private void updateNameSymbolMemoAndTreasury( if (op.hasMemo() && op.memo().length() > 0) { builder.memo(op.memo()); } - if (op.hasTreasury() && op.treasuryOrThrow().accountNum() != originalToken.treasuryAccountNumber()) { - builder.treasuryAccountNumber(op.treasuryOrThrow().accountNum()); + if (op.hasTreasury() && !op.treasuryOrThrow().equals(originalToken.treasuryAccountId())) { + builder.treasuryAccountId(op.treasuryOrThrow()); } } @@ -363,7 +363,7 @@ private void updateExpiryFields( builder.autoRenewSecs(resolvedExpiry.autoRenewPeriod()); } if (op.hasAutoRenewAccount()) { - builder.autoRenewAccountNumber(resolvedExpiry.autoRenewNum()); + builder.autoRenewAccountId(AccountID.newBuilder().accountNum(resolvedExpiry.autoRenewNum())); } } @@ -432,7 +432,7 @@ private void updateTreasuryTitles( @NonNull final WritableAccountStore accountStore, @NonNull final WritableTokenRelationStore tokenRelStore) { final var newTokenRelation = - tokenRelStore.get(asAccount(newTreasuryAccount.accountNumber()), asToken(originalToken.tokenNumber())); + tokenRelStore.get(asAccount(newTreasuryAccount.accountNumber()), originalToken.tokenId()); final var newRelCopy = newTokenRelation.copyBuilder(); if (originalToken.hasFreezeKey()) { diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/AliasUtils.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/AliasUtils.java new file mode 100644 index 000000000000..f02c14b3f92b --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/AliasUtils.java @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.handlers.transfer; + +import static com.hedera.node.app.service.mono.pbj.PbjConverter.asBytes; +import static com.hedera.node.app.spi.key.KeyUtils.isValid; + +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import com.hedera.pbj.runtime.io.stream.ReadableStreamingData; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.util.Objects; + +public final class AliasUtils { + private AliasUtils() { + throw new UnsupportedOperationException("Utility Class"); + } + /** + * Attempts to parse a {@code Key} from given alias {@code ByteString}. If the Key is of type + * Ed25519 or ECDSA(secp256k1), returns true if it is a valid key; and false otherwise. + * + * @param alias given alias byte string + * @return whether it parses to a valid primitive key + */ + public static boolean isSerializedProtoKey(final Bytes alias) { + try (final var bais = new ByteArrayInputStream(Objects.requireNonNull(asBytes(alias)))) { + final var stream = new ReadableStreamingData(bais); + stream.limit(bais.available()); + final var key = Key.PROTOBUF.parse(stream); + return (key.hasEcdsaSecp256k1() || key.hasEd25519()) && isValid(key); + } catch (final IOException e) { + return false; + } + } + + /** + * Parse a {@code Key} from given alias {@code Bytes}. If there is a parse error, throws a + * {@code HandleException} with {@code INVALID_ALIAS_KEY} response code. + * @param alias given alias bytes + * @return the parsed key + */ + public static Key asKeyFromAlias(Bytes alias) { + try (final var bais = new ByteArrayInputStream(Objects.requireNonNull(asBytes(alias)))) { + final var stream = new ReadableStreamingData(bais); + return Key.PROTOBUF.parse(stream); + } catch (final IOException e) { + throw new HandleException(ResponseCodeEnum.INVALID_ALIAS_KEY); + } + } +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/AutoAccountCreator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/AutoAccountCreator.java new file mode 100644 index 000000000000..aa9cd4fd1fe6 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/AutoAccountCreator.java @@ -0,0 +1,232 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.handlers.transfer; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.FAIL_INVALID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.PAYER_ACCOUNT_NOT_FOUND; +import static com.hedera.node.app.service.mono.txns.crypto.AbstractAutoCreationLogic.AUTO_MEMO; +import static com.hedera.node.app.service.mono.txns.crypto.AbstractAutoCreationLogic.LAZY_MEMO; +import static com.hedera.node.app.service.mono.txns.crypto.AbstractAutoCreationLogic.THREE_MONTHS_IN_SECONDS; +import static com.hedera.node.app.service.token.impl.handlers.transfer.AliasUtils.asKeyFromAlias; +import static com.hedera.node.app.service.token.impl.handlers.transfer.TransferContextImpl.isOfEvmAddressSize; +import static com.hedera.node.app.service.token.impl.validators.TokenAttributesValidator.IMMUTABILITY_SENTINEL_KEY; +import static com.hedera.node.app.spi.key.KeyUtils.ECDSA_SECP256K1_COMPRESSED_KEY_LENGTH; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static com.swirlds.common.utility.CommonUtils.hex; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Duration; +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.Transaction; +import com.hedera.hapi.node.token.CryptoCreateTransactionBody; +import com.hedera.hapi.node.token.CryptoUpdateTransactionBody; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.service.evm.utils.EthSigsUtils; +import com.hedera.node.app.service.mono.utils.EntityIdUtils; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.records.CryptoCreateRecordBuilder; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.config.data.AccountsConfig; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.function.UnaryOperator; +import javax.inject.Inject; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class AutoAccountCreator { + private static final Logger log = LogManager.getLogger(AutoAccountCreator.class); + private WritableAccountStore accountStore; + private HandleContext handleContext; + // checks tokenAliasMap if the change consists an alias that is already used in previous + // iteration of the token transfer list. This map is used to count number of + // maxAutoAssociations needed on auto created account + protected final Map> tokenAliasMap = new HashMap<>(); + private static final CryptoUpdateTransactionBody.Builder UPDATE_TXN_BODY_BUILDER = + CryptoUpdateTransactionBody.newBuilder() + .key(Key.newBuilder().ecdsaSecp256k1(Bytes.EMPTY).build()); + + @Inject + public AutoAccountCreator(@NonNull final HandleContext handleContext) { + this.handleContext = handleContext; + this.accountStore = handleContext.writableStore(WritableAccountStore.class); + } + + /** + * Creates an account for the given alias. + * @param alias the alias to create the account for + * @param isByTokenTransfer whether the account is being created by a token transfer + */ + public AccountID create(@NonNull final Bytes alias, final boolean isByTokenTransfer) { + final var accountsConfig = handleContext.configuration().getConfigData(AccountsConfig.class); + + validateTrue( + accountStore.sizeOfAccountState() + 1 <= accountsConfig.maxNumber(), + ResponseCodeEnum.MAX_ENTITIES_IN_PRICE_REGIME_HAVE_BEEN_CREATED); + + final TransactionBody.Builder syntheticCreation; + String memo; + + if (isByTokenTransfer) { + tokenAliasMap.putIfAbsent(alias, Collections.emptySet()); + } + + final var maxAutoAssociations = + tokenAliasMap.getOrDefault(alias, Collections.emptySet()).size(); + final var isAliasEVMAddress = EntityIdUtils.isOfEvmAddressSize(alias); + if (isAliasEVMAddress) { + syntheticCreation = createHollowAccount(alias, 0L); + memo = LAZY_MEMO; + } else { + final var key = asKeyFromAlias(alias); + syntheticCreation = createAccount(alias, key, 0L, maxAutoAssociations); + memo = AUTO_MEMO; + } + + var fee = autoCreationFeeFor(syntheticCreation); + if (isAliasEVMAddress) { + fee += getLazyCreationFinalizationFee(); + } + // TODO: Check if payer has enough balance to pay for the fee + + final var childRecord = handleContext.dispatchRemovableChildTransaction( + syntheticCreation.memo(memo).build(), CryptoCreateRecordBuilder.class); + + if (!isAliasEVMAddress) { + final var key = asKeyFromAlias(alias); + if (key.hasEcdsaSecp256k1()) { + final var evmAddress = tryAddressRecovery(key, EthSigsUtils::recoverAddressFromPubKey); + if (evmAddress != null) { + childRecord.evmAddress(Bytes.wrap(evmAddress)); + } + } + } + // TODO: Not sure if fee should be set here childRecord.transactionFee(fee); + + final var createdAccountId = accountStore.getAccountIDByAlias(alias); + validateTrue(createdAccountId != null, FAIL_INVALID); + return createdAccountId; + } + + /** + * Get fees for finalization of lazy creation. + * @return fee for finalization of lazy creation + */ + private long getLazyCreationFinalizationFee() { + return autoCreationFeeFor(TransactionBody.newBuilder().cryptoUpdateAccount(UPDATE_TXN_BODY_BUILDER)); + } + + /** + * Get fees for auto creation. + * @param syntheticCreation transaction body for auto creation + * @return fee for auto creation + */ + private long autoCreationFeeFor(final TransactionBody.Builder syntheticCreation) { + final var topLevelPayer = handleContext.body().transactionIDOrThrow().accountIDOrThrow(); + final var payerAccount = accountStore.get(topLevelPayer); + validateTrue(payerAccount != null, PAYER_ACCOUNT_NOT_FOUND); + final var txn = Transaction.newBuilder().body(syntheticCreation.build()).build(); + // final var fees = handleContext.feeCalculator().computePayment(txn, payerAccount.key()); + // return fees.serviceFee() + fees.networkFee() + fees.nodeFee(); + // TODO : need to use fee calculator + return 100; + } + + /** + * Create a transaction body for new hollow-account with the given alias. + * @param alias alias of the account + * @param balance initial balance of the account + * @return transaction body for new hollow-account + */ + public TransactionBody.Builder createHollowAccount(final Bytes alias, final long balance) { + final var baseBuilder = createAccountBase(balance); + baseBuilder.key(IMMUTABILITY_SENTINEL_KEY).alias(alias).memo(LAZY_MEMO); + return TransactionBody.newBuilder().cryptoCreateAccount(baseBuilder.build()); + } + + /** + * Create a transaction body for new account with the given balance and other common fields. + * @param balance initial balance of the account + * @return transaction body for new account + */ + private CryptoCreateTransactionBody.Builder createAccountBase(final long balance) { + return CryptoCreateTransactionBody.newBuilder() + .initialBalance(balance) + .autoRenewPeriod(Duration.newBuilder().seconds(THREE_MONTHS_IN_SECONDS)); + } + + /** + * Create a transaction body for new account with the given alias, key, balance and maxAutoAssociations. + * @param alias alias of the account + * @param key key of the account + * @param balance initial balance of the account + * @param maxAutoAssociations maxAutoAssociations of the account + * @return transaction body for new account + */ + private TransactionBody.Builder createAccount( + final Bytes alias, final Key key, final long balance, final int maxAutoAssociations) { + final var baseBuilder = createAccountBase(balance); + baseBuilder.key(key).alias(alias).memo(AUTO_MEMO); + + if (maxAutoAssociations > 0) { + baseBuilder.maxAutomaticTokenAssociations(maxAutoAssociations); + } + return TransactionBody.newBuilder().cryptoCreateAccount(baseBuilder.build()); + } + + /** + * Try to recover EVM address from the given key. + * @param key key to recover EVM address from + * @param addressRecovery function to recover EVM address from the given key + * @return recovered EVM address if successful, otherwise null + */ + @Nullable + private byte[] tryAddressRecovery(@Nullable final Key key, final UnaryOperator addressRecovery) { + if (key != null && key.hasEcdsaSecp256k1()) { + // Only compressed keys are stored at the moment + final var keyBytes = key.ecdsaSecp256k1OrThrow(); + if (keyBytes.length() == ECDSA_SECP256K1_COMPRESSED_KEY_LENGTH) { + final var keyBytesArray = keyBytes.toByteArray(); + final var evmAddress = addressRecovery.apply(keyBytesArray); + if (isEvmAddress(Bytes.wrap(evmAddress))) { + return evmAddress; + } else { + // Not ever expected, since above checks should imply a valid input to the + // LibSecp256k1 library + log.warn("Unable to recover EVM address from {}", () -> hex(keyBytesArray)); + } + } + } + return null; + } + + /** + * Check if the given address is of a valid EVM address length. + * @param address address to check + * @return true if the given address is a valid EVM address length, false otherwise + */ + private boolean isEvmAddress(final Bytes address) { + return address != null && isOfEvmAddressSize(address); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/EnsureAliasesStep.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/EnsureAliasesStep.java new file mode 100644 index 000000000000..a85fb823fee7 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/EnsureAliasesStep.java @@ -0,0 +1,176 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.handlers.transfer; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ALIAS_KEY; +import static com.hedera.node.app.spi.workflows.HandleException.validateFalse; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static java.util.Collections.emptyList; + +import com.hedera.hapi.node.base.AccountAmount; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.NftTransfer; +import com.hedera.hapi.node.base.TokenTransferList; +import com.hedera.hapi.node.base.TransferList; +import com.hedera.hapi.node.token.CryptoTransferTransactionBody; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * This is the first step in CryptoTransfer logic. This ensures that all aliases are resolved to their canonical forms. + * The resolved forms are stored in TransferContext and then used in the rest of the transfer logic. + */ +public class EnsureAliasesStep implements TransferStep { + final CryptoTransferTransactionBody op; + + // Temporary token transfer resolutions map containing the token transfers to alias, is needed to check if + // an alias is repeated. It is allowed to be repeated in multiple token transfer lists, but not in a single + // token transfer list + private final Map tokenTransferResolutions = new HashMap<>(); + + public EnsureAliasesStep(final CryptoTransferTransactionBody op) { + this.op = op; + } + + @Override + public void doIn(final TransferContext transferContext) { + final var hbarTransfers = op.transfersOrElse(TransferList.DEFAULT).accountAmountsOrElse(emptyList()); + final var tokenTransfers = op.tokenTransfersOrElse(emptyList()); + // resolve hbar adjusts and add all alias resolutions to resolutions map in TransferContext + resolveHbarAdjusts(hbarTransfers, transferContext); + // resolve hbar adjusts and add all alias resolutions to resolutions map + // and token resolutions map in TransferContext + resolveTokenAdjusts(tokenTransfers, transferContext); + } + + /** + * Resolve token adjusts and add all alias resolutions to resolutions map in TransferContext. + * If an accountID is an alias and is repeated within the same token transfer list, INVALID_ALIAS_KEY + * is returned. If it is present in multiple transfer lists and is in resolutions map, it will be returned. + * @param tokenTransfers the token transfers to resolve + * @param transferContext the transfer context + */ + private void resolveTokenAdjusts( + final List tokenTransfers, final TransferContext transferContext) { + for (final var tt : tokenTransfers) { + tokenTransferResolutions.clear(); + for (final var adjust : tt.transfersOrElse(emptyList())) { + if (isAlias(adjust.accountIDOrThrow())) { + final var account = resolveForFungibleToken(adjust, transferContext); + final var alias = adjust.accountIDOrThrow().alias(); + tokenTransferResolutions.put(alias, account); + validateTrue(account != null, INVALID_ACCOUNT_ID); + } + } + + for (final var nftAdjust : tt.nftTransfersOrElse(emptyList())) { + resolveForNft(nftAdjust, transferContext); + } + } + } + + private AccountID resolveForFungibleToken(final AccountAmount adjust, final TransferContext transferContext) { + final var accountId = adjust.accountIDOrThrow(); + validateFalse(tokenTransferResolutions.containsKey(accountId.alias()), INVALID_ALIAS_KEY); + final var account = transferContext.getFromAlias(accountId); + if (account == null) { + final var alias = accountId.alias(); + // If the token resolutions map already contains this unknown alias, we can assume + // it was successfully auto-created by a prior mention in this CryptoTransfer. + // (If it appeared in a sender location, this transfer will fail anyway.) + final var isInResolutions = transferContext.resolutions().containsKey(alias); + if (adjust.amount() > 0 && !isInResolutions) { + transferContext.createFromAlias(alias, true); + } else { + validateTrue(transferContext.resolutions().containsKey(alias), INVALID_ACCOUNT_ID); + } + return transferContext.resolutions().get(alias); + } else { + return account; + } + } + + /** + * Resolve hbar adjusts and add all alias resolutions to resolutions map in TransferContext. + * If the accountID is an alias and is already in the resolutions map, it will be returned. + * If the accountID is an alias and is not in the resolutions map, it will be autoCreated and + * will be added to resolutions map. + * @param hbarTransfers the hbar transfers to resolve + * @param transferContext the transfer context + */ + private void resolveHbarAdjusts(final List hbarTransfers, final TransferContext transferContext) { + for (final var aa : hbarTransfers) { + final var accountId = aa.accountIDOrThrow(); + if (isAlias(accountId)) { + // If an alias is repeated for hbar transfers, it will fail + final var isInResolutions = transferContext.resolutions().containsKey(accountId.alias()); + validateTrue(!isInResolutions, ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS); + + final var account = transferContext.getFromAlias(accountId); + if (aa.amount() > 0) { + if (account == null) { + transferContext.createFromAlias(accountId.alias(), false); + } else { + validateTrue(account != null, INVALID_ACCOUNT_ID); + } + } else { + validateTrue(account != null, INVALID_ACCOUNT_ID); + } + } + } + } + + /** + * Resolve NFT adjusts and add all alias resolutions to resolutions map in TransferContext. + * @param nftAdjust the NFT transfer to resolve + * @param transferContext the transfer context + */ + private void resolveForNft(final NftTransfer nftAdjust, TransferContext transferContext) { + final var receiverId = nftAdjust.receiverAccountIDOrThrow(); + final var senderId = nftAdjust.senderAccountIDOrThrow(); + // sender can't be a missing accountId. It will fail if the alias doesn't exist + if (isAlias(senderId)) { + final var sender = transferContext.getFromAlias(senderId); + validateTrue(sender != null, INVALID_ACCOUNT_ID); + } + // Note a repeated alias is still valid for the NFT receiver case + if (isAlias(receiverId)) { + final var receiver = transferContext.getFromAlias(receiverId); + if (receiver == null) { + final var isInResolutions = transferContext.resolutions().containsKey(receiverId.alias()); + if (!isInResolutions) { + transferContext.createFromAlias(receiverId.alias(), false); + } + } else { + validateTrue(receiver != null, INVALID_ACCOUNT_ID); + } + } + } + + /** + * Check if the given accountID is an alias + * @param accountID the accountID to check + * @return true if the accountID is an alias, false otherwise + */ + public static boolean isAlias(AccountID accountID) { + return accountID.hasAlias() && (!accountID.hasAccountNum() || accountID.accountNum() == 0L); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/ReplaceAliasesWithIDsInOp.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/ReplaceAliasesWithIDsInOp.java new file mode 100644 index 000000000000..324f78b3869e --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/ReplaceAliasesWithIDsInOp.java @@ -0,0 +1,110 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.handlers.transfer; + +import static com.hedera.node.app.service.token.impl.handlers.transfer.EnsureAliasesStep.isAlias; +import static java.util.Collections.emptyList; + +import com.hedera.hapi.node.base.AccountAmount; +import com.hedera.hapi.node.base.NftTransfer; +import com.hedera.hapi.node.base.TokenTransferList; +import com.hedera.hapi.node.base.TransferList; +import com.hedera.hapi.node.token.CryptoTransferTransactionBody; +import java.util.ArrayList; +import java.util.List; + +/** + * Replaces aliases with IDs in the crypto transfer operation. This is needed to make rest of the steps in the + * transfer handler to process easily . + */ +public class ReplaceAliasesWithIDsInOp { + /** + * Replaces aliases with IDs in the crypto transfer operation.It looks at the resolutions happened in {@link EnsureAliasesStep} + * which are stored in {@link TransferContextImpl} and replaces aliases with IDs. + * @param op the crypto transfer operation + * @param transferContext the transfer context + * @return the crypto transfer operation with aliases replaced with IDs + */ + public CryptoTransferTransactionBody replaceAliasesWithIds( + final CryptoTransferTransactionBody op, final TransferContextImpl transferContext) { + final var resolutions = transferContext.resolutions(); + final var replacedAliasesOp = CryptoTransferTransactionBody.newBuilder(); + final var transferList = TransferList.newBuilder(); + final var tokenTransfersList = new ArrayList(); + final var accountAmounts = new ArrayList(); + // replace all aliases in hbar transfers + for (final var aa : op.transfers().accountAmountsOrElse(emptyList())) { + if (isAlias(aa.accountIDOrThrow())) { + final var resolvedId = resolutions.get(aa.accountID().alias()); + accountAmounts.add(aa.copyBuilder().accountID(resolvedId).build()); + } else { + accountAmounts.add(aa); + } + } + transferList.accountAmounts(accountAmounts); + replacedAliasesOp.transfers(transferList); + + // replace all aliases in token transfers + for (final var adjust : op.tokenTransfersOrElse(emptyList())) { + final var tokenTransferList = TokenTransferList.newBuilder().token(adjust.token()); + final List replacedTokenAdjusts = new ArrayList<>(); + for (final var tokenAdjust : adjust.transfersOrElse(emptyList())) { + if (isAlias(tokenAdjust.accountIDOrThrow())) { + final var resolvedId = + resolutions.get(tokenAdjust.accountID().alias()); + replacedTokenAdjusts.add( + tokenAdjust.copyBuilder().accountID(resolvedId).build()); + } else { + replacedTokenAdjusts.add(tokenAdjust); + } + } + if (!replacedTokenAdjusts.isEmpty()) { + tokenTransferList.transfers(replacedTokenAdjusts); + } + // replace aliases in nft adjusts + final List replacedNftAdjusts = new ArrayList<>(); + for (final var nftAdjust : adjust.nftTransfersOrElse(emptyList())) { + final var nftAdjustCopy = nftAdjust.copyBuilder(); + final var isReceiverAlias = isAlias(nftAdjust.receiverAccountIDOrThrow()); + final var isSenderAlias = isAlias(nftAdjust.senderAccountIDOrThrow()); + if (isReceiverAlias || isSenderAlias) { + if (isReceiverAlias) { + final var resolvedId = resolutions.get( + nftAdjust.receiverAccountIDOrThrow().alias()); + nftAdjustCopy.receiverAccountID(resolvedId); + } + if (isSenderAlias) { + final var resolvedId = resolutions.get( + nftAdjust.senderAccountIDOrThrow().alias()); + nftAdjustCopy.receiverAccountID(resolvedId); + } + replacedNftAdjusts.add(nftAdjustCopy.build()); + } else { + replacedNftAdjusts.add(nftAdjust); + } + } + // if there are any transfers or nft adjusts, add them to the token transfer list + if (!replacedNftAdjusts.isEmpty()) { + tokenTransferList.nftTransfers(replacedNftAdjusts); + } + tokenTransfersList.add(tokenTransferList.build()); + } + replacedAliasesOp.transfers(transferList); + replacedAliasesOp.tokenTransfers(tokenTransfersList); + return replacedAliasesOp.build(); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/TransferContext.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/TransferContext.java new file mode 100644 index 000000000000..1cf160ccd0a1 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/TransferContext.java @@ -0,0 +1,72 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.handlers.transfer; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import java.util.Map; + +/** + * Context for the current CryptoTransfer transaction. + * Each CryptoTransfer transaction goes through different steps in handling. The output of one step will + * be needed as input to other steps. For example, in the first step we resolve all the aliases in the transaction body. + * The resolutions are needed in further steps to is IDs instead of aliases. + * It also has helper function to create accounts from alias. + * This class stores all the needed information that is shared between steps in handling a CryptoTransfer transaction. + * The lifecycle of this clas is the same as the lifecycle of a CryptoTransfer transaction. + */ +public interface TransferContext { + /** + * Looks up alias from accountID in form of alias and return the account ID with account number if found. + * Return null otherwise. + * @param aliasedId the account ID with the account number associated with alias + * @return the account ID with account number if found, null otherwise + */ + AccountID getFromAlias(AccountID aliasedId); + + /** + * Creates an account from the given alias. This is called when the account associated with alias + * is not found in the account store + * @param alias the alias of the account + * @param isFromTokenTransfer true if the account is created from token transfer, false otherwise + */ + void createFromAlias(Bytes alias, boolean isFromTokenTransfer); + + /** + * Returns the number of auto-creation of accounts in current transfer + * @return the number of auto-creation of accounts + */ + int numOfAutoCreations(); + + /** + * Returns the number of lazy-creation of accounts in current transfer + * @return the number of lazy-creation of accounts + */ + int numOfLazyCreations(); + + /** + * Returns the resolved accounts with alias and its account ID + * @return the resolved accounts with alias and its account ID + */ + Map resolutions(); + + // Throw if the fee cannot be charged for whatever reason + void chargeExtraFeeToHapiPayer(long amount); + + HandleContext getHandleContext(); +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/TransferContextImpl.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/TransferContextImpl.java new file mode 100644 index 000000000000..5e42b50ee611 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/TransferContextImpl.java @@ -0,0 +1,117 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.handlers.transfer; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.node.app.service.mono.utils.EntityIdUtils.EVM_ADDRESS_SIZE; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.handlers.transfer.AliasUtils.isSerializedProtoKey; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.config.data.AutoCreationConfig; +import com.hedera.node.config.data.LazyCreationConfig; +import com.hedera.node.config.data.TokensConfig; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import java.util.HashMap; +import java.util.Map; + +/** + * The context of a token transfer. This This is used to pass information between the steps of the transfer. + */ +public class TransferContextImpl implements TransferContext { + private final WritableAccountStore accountStore; + private final AutoAccountCreator autoAccountCreator; + private final HandleContext context; + private int numAutoCreations; + private int numLazyCreations; + private final Map resolutions = new HashMap<>(); + private final AutoCreationConfig autoCreationConfig; + private final LazyCreationConfig lazyCreationConfig; + private final TokensConfig tokensConfig; + + public TransferContextImpl(final HandleContext context) { + this.context = context; + this.accountStore = context.writableStore(WritableAccountStore.class); + this.autoAccountCreator = new AutoAccountCreator(context); + this.autoCreationConfig = context.configuration().getConfigData(AutoCreationConfig.class); + this.lazyCreationConfig = context.configuration().getConfigData(LazyCreationConfig.class); + this.tokensConfig = context.configuration().getConfigData(TokensConfig.class); + } + + @Override + public AccountID getFromAlias(final AccountID aliasedId) { + final var account = accountStore.get(aliasedId); + + if (account != null) { + final var id = asAccount(account.accountNumber()); + resolutions.put(aliasedId.alias(), id); + return id; + } + return null; + } + + @Override + public void createFromAlias(final Bytes alias, final boolean isFromTokenTransfer) { + // if it is a serialized proto key, auto-create account + if (isSerializedProtoKey(alias)) { + validateTrue(autoCreationConfig.enabled(), NOT_SUPPORTED); + numAutoCreations++; + } else if (isOfEvmAddressSize(alias)) { + // if it is an evm address create a hollow account + validateTrue(lazyCreationConfig.enabled(), NOT_SUPPORTED); + numLazyCreations++; + } + // if this auto creation is from a token transfer, check if auto creation from tokens is enabled + if (isFromTokenTransfer) { + validateTrue(tokensConfig.autoCreationsIsEnabled(), NOT_SUPPORTED); + } + // Keep the created account in the resolutions map + final var createdAccount = autoAccountCreator.create(alias, isFromTokenTransfer); + resolutions.put(alias, createdAccount); + } + + @Override + public HandleContext getHandleContext() { + return context; + } + + @Override + public int numOfAutoCreations() { + return numAutoCreations; + } + + @Override + public void chargeExtraFeeToHapiPayer(final long amount) { + throw new UnsupportedOperationException("Not yet implemented"); + } + + public Map resolutions() { + return resolutions; + } + + @Override + public int numOfLazyCreations() { + return numLazyCreations; + } + + public static boolean isOfEvmAddressSize(final Bytes alias) { + return alias.length() == EVM_ADDRESS_SIZE; + } +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/TransferStep.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/TransferStep.java new file mode 100644 index 000000000000..60745c43ceef --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/TransferStep.java @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.handlers.transfer; + +import com.hedera.hapi.node.base.Key; +import java.util.Set; + +/** + * Defines the interface for each step in the crypto transfer process. + */ +public interface TransferStep { + /** + * Returns the set of keys that are authorized to perform this step. + * @param transferContext the context of the transfer + * @return the set of keys that are authorized to perform this step + */ + // FUTURE: all the logic in prehandle can be moved into appropriate steps + default Set authorizingKeysIn(TransferContext transferContext) { + return Set.of(); + } + + /** + * Perform the step and commit changes to the modifications in state. + * @param transferContext the context of the transfer + * @throws com.hedera.node.app.spi.workflows.HandleException if the step fails + */ + void doIn(TransferContext transferContext); +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/records/CryptoCreateRecordBuilder.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/records/CryptoCreateRecordBuilder.java index f68da8678fa8..1a4be5c7be45 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/records/CryptoCreateRecordBuilder.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/records/CryptoCreateRecordBuilder.java @@ -17,6 +17,7 @@ package com.hedera.node.app.service.token.impl.records; import com.hedera.hapi.node.base.AccountID; +import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; /** @@ -34,4 +35,12 @@ public interface CryptoCreateRecordBuilder { */ @NonNull CryptoCreateRecordBuilder accountID(@NonNull AccountID accountID); + + /** + * The new EVM address of the account created by this transaction. + * @param evmAddress the new EVM address + * @return this builder + */ + @NonNull + CryptoCreateRecordBuilder evmAddress(@NonNull final Bytes evmAddress); } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenRelListCalculator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenRelListCalculator.java index 858f35c130a3..1c788a986eb3 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenRelListCalculator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenRelListCalculator.java @@ -16,6 +16,7 @@ package com.hedera.node.app.service.token.impl.util; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.AccountID; @@ -23,7 +24,6 @@ import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.node.app.service.token.ReadableTokenRelationStore; -import com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.ArrayList; @@ -64,10 +64,10 @@ public TokenRelListCalculator(@NonNull final ReadableTokenRelationStore tokenRel * Assume that valid account A has head token number 1, and that the following list of token * relations ({@code TR}'s) exists for account A: *
    - *
  1. TR(Account A, Token 1, prevToken = -1, nextToken = 2)
  2. + *
  3. TR(Account A, Token 1, prevToken = null, nextToken = 2)
  4. *
  5. TR(Account A, Token 2, prevToken = 1, nextToken = 3)
  6. *
  7. TR(Account A, Token 3, prevToken = 2, nextToken = 4)
  8. - *
  9. TR(Account A, Token 4, prevToken = 3, nextToken = -1)
  10. + *
  11. TR(Account A, Token 4, prevToken = 3, nextToken = null)
  12. *
* *

Case 1: removing a token relation in the middle of the list @@ -78,9 +78,9 @@ public TokenRelListCalculator(@NonNull final ReadableTokenRelationStore tokenRel * {@code Token 1}. After this method performs its operation, the resulting list of token * relations will be: *

    - *
  1. {@code TR(Account A, Token 1, prevToken = -1, nextToken = 3)}
  2. + *
  3. {@code TR(Account A, Token 1, prevToken = null, nextToken = 3)}
  4. *
  5. {@code TR(Account A, Token 3, prevToken = 1, nextToken = 4)}
  6. - *
  7. {@code TR(Account A, Token 4, prevToken = 3, nextToken = -1)}
  8. + *
  9. {@code TR(Account A, Token 4, prevToken = 3, nextToken = null)}
  10. *
* * TR(Account A, Token 2) is now removed from the list, and the prev/next pointers of the remaining @@ -92,14 +92,14 @@ public TokenRelListCalculator(@NonNull final ReadableTokenRelationStore tokenRel * token rel list, then the resulting list of token relations will be unchanged except for * {@code TR(Account A, Token 1)} which has been removed from the list, and * {@code TR(Account A, Token 2)}, which will now be the head of the list (i.e. it will have a - * {@code prevToken} value of -1) + * {@code prevToken} value of null) * *

Case 3: removing the last/tail-end token relation * Finally, if we remove the token relation {@code TR(Account A, Token 4)} at the end of the * account's token rel list, then the resulting list of token relations will also be unchanged * except for {@code TR(Account A, Token 5)}, which has been removed from the list, and * {@code TR(Account A, Token 4}), which is now the end of the list (i.e. it has a - * {@code nextToken} value of -1) + * {@code nextToken} value of null) * * @param account the account to remove the token relations from * @param tokenRelsToDelete the token relations to remove @@ -111,29 +111,31 @@ public TokenRelsRemovalResult removeTokenRels( @NonNull final Account account, @NonNull final List tokenRelsToDelete) { // Precondition: verify all token relation objects have the same account number as the given account object if (tokenRelsToDelete.stream() - .anyMatch(tokenRel -> tokenRel != null && tokenRel.accountNumber() != account.accountNumber())) { + .anyMatch(tokenRel -> + tokenRel != null && !tokenRel.accountId().equals(asAccount(account.accountNumber())))) { throw new IllegalArgumentException("All token relations must be for the same account"); } // Data Preprocessing: remove nulls and duplicate token rels final var cleanedTokenRelsToDelete = filterNullsAndDuplicates(tokenRelsToDelete); - final var currentHeadTokenNum = account.headTokenNumber(); - final var accountId = BaseCryptoHandler.asAccount(account.accountNumber()); + final var currentHeadTokenId = + TokenID.newBuilder().tokenNum(account.headTokenNumber()).build(); + final var accountId = asAccount(account.accountNumber()); // We'll create this mapping of (tokenId -> tokenRel) to make it easier to check if a token rel is in the list // of token rels to delete. It's only for ease of lookup and doesn't affect the algorithm final var tokenRelsToDeleteByTokenId = cleanedTokenRelsToDelete.stream() - .collect(Collectors.toMap(TokenRelation::tokenNumber, tokenRel -> tokenRel)); + .collect(Collectors.toMap(TokenRelation::tokenId, tokenRel -> tokenRel)); // Recreate all the token relations updated prev and next pointers. This includes the token relations that will // be deleted, but these will be filtered out later - final var updatedTokenRels = new HashMap(); + final var updatedTokenRels = new HashMap(); for (final TokenRelation currentTokenRelToDelete : cleanedTokenRelsToDelete) { // Grab the current, previous, and next token relations with any updates that were made to them in // previous iterations final var currentTokenRel = requireNonNull(getInPriorityOrder( - updatedTokenRels, tokenRelsToDeleteByTokenId, accountId, currentTokenRelToDelete.tokenNumber())); + updatedTokenRels, tokenRelsToDeleteByTokenId, accountId, currentTokenRelToDelete.tokenId())); final var currentPrevTokenRel = getInPriorityOrder( updatedTokenRels, tokenRelsToDeleteByTokenId, accountId, currentTokenRel.previousToken()); final var currentNextTokenRel = getInPriorityOrder( @@ -150,26 +152,24 @@ public TokenRelsRemovalResult removeTokenRels( // Note: even though we might delete the token relation represented by `updatedPrevTokenRel` later in this // loop, we still need to have an updated token rel object since the algorithm removes one token rel at a // time. Otherwise, the resulting pointers would be not always be correct - if (updatedPrevTokenRel != null) - updatedTokenRels.put(updatedPrevTokenRel.tokenNumber(), updatedPrevTokenRel); + if (updatedPrevTokenRel != null) updatedTokenRels.put(updatedPrevTokenRel.tokenId(), updatedPrevTokenRel); final var updatedNextTokenRel = updatedTokenRelsSurroundingCurrentTokenRel.updatedNextTokenRel(); // Likewise with `updatedNextTokenRel`, we need to update this token rel for now, even if we might delete it // later in this loop - if (updatedNextTokenRel != null) - updatedTokenRels.put(updatedNextTokenRel.tokenNumber(), updatedNextTokenRel); + if (updatedNextTokenRel != null) updatedTokenRels.put(updatedNextTokenRel.tokenId(), updatedNextTokenRel); } // Now, filter out all the token rels that are in the list of token rels to delete from `updatedTokenRels`: final var updatedTokenRelsToKeep = updatedTokenRels.values().stream() - .filter(tokenRel -> !tokenRelsToDeleteByTokenId.containsKey(tokenRel.tokenNumber())) + .filter(tokenRel -> !tokenRelsToDeleteByTokenId.containsKey(tokenRel.tokenId())) .toList(); // Calculate the account's new head token number, given the token relations to delete - final var updatedHeadTokenNum = calculateHeadTokenAfterDeletions( - currentHeadTokenNum, account, updatedTokenRels, tokenRelsToDeleteByTokenId); + final var updatedHeadTokenId = calculateHeadTokenAfterDeletions( + currentHeadTokenId, account, updatedTokenRels, tokenRelsToDeleteByTokenId); - return new TokenRelsRemovalResult(updatedHeadTokenNum, updatedTokenRelsToKeep); + return new TokenRelsRemovalResult(updatedHeadTokenId, updatedTokenRelsToKeep); } @NonNull @@ -197,27 +197,27 @@ private List filterNullsAndDuplicates(final List t * for convenience * @param accountId the account ID of the account that the token relations belong to (all token * relation account IDs must match this value) - * @param tokenNumToLookup the token ID of the token relation to retrieve + * @param tokenIdToLookup the token ID of the token relation to retrieve */ @Nullable private TokenRelation getInPriorityOrder( - @NonNull final Map updatedTokenRels, - @NonNull final Map tokenRelsToDeleteByTokenId, + @NonNull final Map updatedTokenRels, + @NonNull final Map tokenRelsToDeleteByTokenId, @NonNull final AccountID accountId, - final long tokenNumToLookup) { + final TokenID tokenIdToLookup) { + if (tokenIdToLookup == null) return null; // when prev/next is null // First we check for the token rel (accountId, token ID) in the updated token relations. This way we get the // most recent prev/next pointer changes even though these changes haven't been committed to any store - final var updatedTokenRelsValue = updatedTokenRels.get(tokenNumToLookup); + final var updatedTokenRelsValue = updatedTokenRels.get(tokenIdToLookup); if (updatedTokenRelsValue != null) return updatedTokenRelsValue; // Next we check for the token rel in our already-loaded collection of token relations to delete - final var tokensToDeleteRelsValue = tokenRelsToDeleteByTokenId.get(tokenNumToLookup); + final var tokensToDeleteRelsValue = tokenRelsToDeleteByTokenId.get(tokenIdToLookup); if (tokensToDeleteRelsValue != null) return tokensToDeleteRelsValue; // Finally, if we haven't found the token rel already, we resort to the token relation store to retrieve it (if // it exists) - return tokenRelStore.get( - accountId, TokenID.newBuilder().tokenNum(tokenNumToLookup).build()); + return tokenRelStore.get(accountId, tokenIdToLookup); } /** @@ -234,21 +234,21 @@ private TokenRelation getInPriorityOrder( @NonNull private TokenRelPointerUpdateResult updatePointersSurroundingTargetTokenRel( @Nullable TokenRelation prevTokenRel, @Nullable final TokenRelation nextTokenRel) { - final var prevTokenRelTokenNum = prevTokenRel != null ? prevTokenRel.tokenNumber() : -1; - final var nextTokenRelTokenNum = nextTokenRel != null ? nextTokenRel.tokenNumber() : -1; + final var prevTokenRelTokenId = prevTokenRel != null ? prevTokenRel.tokenId() : null; + final var nextTokenRelTokenId = nextTokenRel != null ? nextTokenRel.tokenId() : null; // Create a copy of `prevTokenRel` with `prevTokenRel.nextToken()` now pointing to `nextTokenRel.tokenNumber()` // instead of `targetTokenRel.tokenNumber()`. If `prevTokenRel` is null, then no updated token relation will be // created, indicating that there is/was no previous token relation to be updated final TokenRelation newPrevTokenRel = prevTokenRel != null - ? prevTokenRel.copyBuilder().nextToken(nextTokenRelTokenNum).build() + ? prevTokenRel.copyBuilder().nextToken(nextTokenRelTokenId).build() : null; // Likewise, create a copy of nextTokenRel that points to `prevTokenRel` instead of `targetTokenRel`. Like // `prevTokenRel`, if `nextTokenRel` is null, then `newNextTokenRel` will be passed as null to the {@code Pair} // return value final TokenRelation newNextTokenRel = nextTokenRel != null - ? nextTokenRel.copyBuilder().previousToken(prevTokenRelTokenNum).build() + ? nextTokenRel.copyBuilder().previousToken(prevTokenRelTokenId).build() : null; return new TokenRelPointerUpdateResult(newPrevTokenRel, newNextTokenRel); @@ -259,62 +259,61 @@ private TokenRelPointerUpdateResult updatePointersSurroundingTargetTokenRel( * this method computes the expected new head token number for the account. * *

- * Note: if the given token rels are in an illegal state, a fallback value of -1 will be returned + * Note: if the given token rels are in an illegal state, a fallback null value will be + * returned instead of throwing an exception * - * @param currentHeadTokenNum the account's current head token number, i.e. the head token number that may change + * @param currentHeadTokenId the account's current head token id, i.e. the head token number that may change * @param account the account (object, not ID) that the token is related to * @param tokenRelsToDeleteByTokenId a map of token relations to delete, keyed by token ID for convenience of lookup * @return the new head token number for the account */ - private long calculateHeadTokenAfterDeletions( - final long currentHeadTokenNum, + private TokenID calculateHeadTokenAfterDeletions( + final TokenID currentHeadTokenId, @NonNull final Account account, - @NonNull final Map updatedTokenRels, - @NonNull final Map tokenRelsToDeleteByTokenId) { - final var accountId = BaseCryptoHandler.asAccount(account.accountNumber()); + @NonNull final Map updatedTokenRels, + @NonNull final Map tokenRelsToDeleteByTokenId) { + final var accountId = asAccount(account.accountNumber()); - // Calculate the new head token number by walking the linked token rels until we find a token rel that is not in + // Calculate the new head token id by walking the linked token rels until we find a token rel that is not in // the list of token rels to delete - var currentTokenNum = currentHeadTokenNum; + var currentTokenId = currentHeadTokenId; // We use a safety counter to prevent infinite loops in case of a bug var safetyCounter = 0; TokenRelation currentWalkedTokenRel; do { - currentWalkedTokenRel = updatedTokenRels.containsKey(currentTokenNum) - ? updatedTokenRels.get(currentTokenNum) - : tokenRelStore.get( - accountId, - TokenID.newBuilder().tokenNum(currentTokenNum).build()); + currentWalkedTokenRel = updatedTokenRels.containsKey(currentTokenId) + ? updatedTokenRels.get(currentTokenId) + : tokenRelStore.get(accountId, currentTokenId); if (currentWalkedTokenRel != null) { - if (!tokenRelsToDeleteByTokenId.containsKey(currentWalkedTokenRel.tokenNumber())) { + if (!tokenRelsToDeleteByTokenId.containsKey(currentWalkedTokenRel.tokenId())) { // we found the first existing token rel that is not in the list of token rels to delete break; } else { // we found a non-null token rel, but it is in the list of token rels to delete; we therefore // continue walking the linked token rels - currentTokenNum = currentWalkedTokenRel.nextToken(); + currentTokenId = currentWalkedTokenRel.nextToken(); } } else { // We reached the end of the linked token rel pointers chain; there is no token rel that will qualify as - // the new head token number. We therefore set the new head token number to -1 and exit the do-while + // the new head token number. We therefore set the new head token number to null and exit the do-while // loop (since `currentWalkedTokenRel` is null) - currentTokenNum = -1; + currentTokenId = null; } - // Default to a null pointer (value of -1) for infinite looping cases + // Default to a null pointer for infinite looping cases if (safetyCounter++ > account.numberAssociations()) { log.error( "Encountered token rels list that exceeds total token associations for account {}", account.accountNumber()); - return -1; + return null; } - } while (currentWalkedTokenRel != null); + } while (currentWalkedTokenRel != null && currentTokenId != null); - // At this point, `currentTokenNum` is either -1 (if we reached the end of the linked token rel pointers chain), - // zero if a token rel's previous or next pointer was incorrectly set to zero (e.g. initialized by default to - // zero and not set), or the token number of the first token rel that will NOT be deleted. In the first two - // cases, this value is the account's new head token number. Otherwise, return a fallback of number of -1 - return currentTokenNum > 0 ? currentTokenNum : -1; + // At this point, `currentTokenNum` is either null (if we reached the end of the linked token rel pointers + // chain), zero if a token rel's previous or next pointer was incorrectly set to zero (e.g. initialized by + // default to zero and not set), or the token number of the first token rel that will NOT be deleted. In the + // first two cases, this value is the account's new head token number. Otherwise, return null + return currentTokenId != null && currentTokenId.tokenNum() > 0 ? currentTokenId : null; } /** @@ -331,7 +330,7 @@ private long calculateHeadTokenAfterDeletions( * relations in {@code tokenRelsToDelete} */ public record TokenRelsRemovalResult( - @Nullable Long updatedHeadTokenId, @NonNull List updatedTokenRelsStillInChain) {} + @Nullable TokenID updatedHeadTokenId, @NonNull List updatedTokenRelsStillInChain) {} private record TokenRelPointerUpdateResult( @Nullable TokenRelation updatedPrevTokenRel, @Nullable TokenRelation updatedNextTokenRel) {} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/AllowanceValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/AllowanceValidator.java index 1bd864cb28fb..42c631ddf1c9 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/AllowanceValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/AllowanceValidator.java @@ -119,7 +119,7 @@ public static boolean isValidOwner( if (nft.hasOwnerId()) { return nft.ownerId().equals(ownerID); } else { - return ownerID.accountNum() == token.treasuryAccountNumber(); + return ownerID.equals(token.treasuryAccountId()); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/ApproveAllowanceValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/ApproveAllowanceValidator.java index 0dc29d5aa1c2..333ce439cab2 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/ApproveAllowanceValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/ApproveAllowanceValidator.java @@ -22,7 +22,6 @@ import static java.util.Collections.emptyList; import com.hedera.hapi.node.base.AccountID; -import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.base.TokenSupplyType; import com.hedera.hapi.node.base.TokenType; import com.hedera.hapi.node.state.token.Account; @@ -205,7 +204,7 @@ private void validateTokenBasics( final ReadableTokenRelationStore tokenRelStore) { final var ownerId = AccountID.newBuilder().accountNum(owner.accountNumber()).build(); - final var tokenId = TokenID.newBuilder().tokenNum(token.tokenNumber()).build(); + final var tokenId = token.tokenId(); // ONLY reject self-approval for NFT's; else allow to match OZ ERC-20 validateFalse( !token.tokenType().equals(TokenType.FUNGIBLE_COMMON) diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CryptoCreateValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CryptoCreateValidator.java index 6a3c5d0cdaac..dfd64f96ae4b 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CryptoCreateValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CryptoCreateValidator.java @@ -85,7 +85,7 @@ private void validateKeyAndAliasProvidedCase( attributeValidator.validateKey(op.keyOrThrow()); validateTrue(op.alias().length() == EVM_ADDRESS_SIZE, INVALID_ALIAS_KEY); validateFalse(isMirror(op.alias()), INVALID_ALIAS_KEY); - validateTrue(readableAccountStore.getAccountIDByAlias(op.alias().toString()) == null, ALIAS_ALREADY_ASSIGNED); + validateTrue(readableAccountStore.getAccountIDByAlias(op.alias()) == null, ALIAS_ALREADY_ASSIGNED); } /** check if the number of auto associations is too many diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CryptoTransferValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CryptoTransferValidator.java new file mode 100644 index 000000000000..10a2d80aa7cb --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CryptoTransferValidator.java @@ -0,0 +1,176 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.validators; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.BATCH_SIZE_LIMIT_EXCEEDED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.EMPTY_TOKEN_TRANSFER_ACCOUNT_AMOUNTS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_AMOUNTS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_NFT_SERIAL_NUMBER; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSFER_ACCOUNT_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_ID_REPEATED_IN_TOKEN_LIST; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_TRANSFER_LIST_SIZE_LIMIT_EXCEEDED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TRANSFERS_NOT_ZERO_SUM_FOR_TOKEN; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TRANSFER_LIST_SIZE_LIMIT_EXCEEDED; +import static com.hedera.node.app.spi.validation.Validations.validateAccountID; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; +import static java.util.Collections.emptyList; + +import com.hedera.hapi.node.base.AccountAmount; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.NftTransfer; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenTransferList; +import com.hedera.hapi.node.base.TransferList; +import com.hedera.hapi.node.token.CryptoTransferTransactionBody; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.config.data.HederaConfig; +import com.hedera.node.config.data.LedgerConfig; +import com.hedera.node.config.data.TokensConfig; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.HashSet; +import java.util.List; +import javax.inject.Inject; +import javax.inject.Singleton; + +@Singleton +public class CryptoTransferValidator { + + @Inject + public CryptoTransferValidator() { + // For Dagger injection + } + + public void pureChecks(@NonNull final CryptoTransferTransactionBody op) throws PreCheckException { + final var acctAmounts = op.transfersOrElse(TransferList.DEFAULT).accountAmountsOrElse(emptyList()); + final var uniqueAcctIds = new HashSet(); + long netBalance = 0; + for (final AccountAmount acctAmount : acctAmounts) { + validateTruePreCheck(acctAmount.hasAccountID(), INVALID_ACCOUNT_ID); + final var acctId = validateAccountID(acctAmount.accountIDOrThrow()); + uniqueAcctIds.add(acctId); + netBalance += acctAmount.amount(); + } + validateTruePreCheck(netBalance == 0, INVALID_ACCOUNT_AMOUNTS); + validateFalsePreCheck(uniqueAcctIds.size() < acctAmounts.size(), ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS); + + final var tokenTransfers = op.tokenTransfersOrElse(emptyList()); + for (final TokenTransferList tokenTransfer : tokenTransfers) { + final var tokenID = tokenTransfer.token(); + validateTruePreCheck(tokenID != null && !tokenID.equals(TokenID.DEFAULT), INVALID_TOKEN_ID); + + // Validate the fungible transfers + final var uniqueTokenAcctIds = new HashSet(); + final var fungibleTransfers = tokenTransfer.transfersOrElse(emptyList()); + long netTokenBalance = 0; + boolean nonZeroFungibleValueFound = false; + for (final AccountAmount acctAmount : fungibleTransfers) { + validateTruePreCheck(acctAmount.hasAccountID(), INVALID_TRANSFER_ACCOUNT_ID); + uniqueTokenAcctIds.add(acctAmount.accountIDOrThrow()); + netTokenBalance += acctAmount.amount(); + if (!nonZeroFungibleValueFound && acctAmount.amount() != 0) { + nonZeroFungibleValueFound = true; + } + } + validateFalsePreCheck( + uniqueTokenAcctIds.size() < fungibleTransfers.size(), ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS); + validateTruePreCheck(netTokenBalance == 0, TRANSFERS_NOT_ZERO_SUM_FOR_TOKEN); + + // Validate the nft transfers + final var nftTransfers = tokenTransfer.nftTransfersOrElse(emptyList()); + final var nftIds = new HashSet(); + for (final NftTransfer nftTransfer : nftTransfers) { + validateTruePreCheck(nftTransfer.serialNumber() > 0, INVALID_TOKEN_NFT_SERIAL_NUMBER); + validateTruePreCheck(nftTransfer.hasSenderAccountID(), INVALID_TRANSFER_ACCOUNT_ID); + validateTruePreCheck(nftTransfer.hasReceiverAccountID(), INVALID_TRANSFER_ACCOUNT_ID); + + nftIds.add(nftTransfer.serialNumber()); + } + validateFalsePreCheck(nftIds.size() < nftTransfers.size(), TOKEN_ID_REPEATED_IN_TOKEN_LIST); + + // Verify that one and only one of the two types of transfers (fungible or non-fungible) is present + validateFalsePreCheck(!nonZeroFungibleValueFound && nftIds.isEmpty(), EMPTY_TOKEN_TRANSFER_ACCOUNT_AMOUNTS); + validateFalsePreCheck(nonZeroFungibleValueFound && !nftIds.isEmpty(), INVALID_ACCOUNT_AMOUNTS); + } + } + + public void validateSemantics( + @NonNull final CryptoTransferTransactionBody op, + @NonNull final LedgerConfig ledgerConfig, + @NonNull final HederaConfig hederaConfig, + @NonNull final TokensConfig tokensConfig) { + final var transfers = op.transfersOrElse(TransferList.DEFAULT); + + // Validate that there aren't too many hbar transfers + final var hbarTransfers = transfers.accountAmountsOrElse(emptyList()); + validateTrue(hbarTransfers.size() < ledgerConfig.transfersMaxLen(), TRANSFER_LIST_SIZE_LIMIT_EXCEEDED); + + // Validate that allowances are enabled, or that no hbar transfers are an allowance transfer + final var allowancesEnabled = hederaConfig.allowancesIsEnabled(); + validateTrue(allowancesEnabled || !hasAllowance(hbarTransfers), NOT_SUPPORTED); + + // The loop below will validate the counts for token transfers (both fungible and non-fungible) + final var tokenTransfers = op.tokenTransfersOrElse(emptyList()); + var totalFungibleTransfers = 0; + var totalNftTransfers = 0; + final var nftsEnabled = tokensConfig.nftsAreEnabled(); + for (final TokenTransferList tokenTransfer : tokenTransfers) { + // Validate the fungible token transfer(s) (if present) + final var fungibleTransfers = tokenTransfer.transfersOrElse(emptyList()); + validateTrue(allowancesEnabled || !hasAllowance(fungibleTransfers), NOT_SUPPORTED); + totalFungibleTransfers += fungibleTransfers.size(); + + // Validate the nft transfer(s) (if present) + final var nftTransfers = tokenTransfer.nftTransfersOrElse(emptyList()); + validateTrue(nftsEnabled || nftTransfers.isEmpty(), NOT_SUPPORTED); + validateTrue(allowancesEnabled || !hasNftAllowance(nftTransfers), NOT_SUPPORTED); + totalNftTransfers += nftTransfers.size(); + + // Verify that the current total number of (counted) fungible transfers does not exceed the limit + validateTrue( + totalFungibleTransfers < ledgerConfig.tokenTransfersMaxLen(), + TOKEN_TRANSFER_LIST_SIZE_LIMIT_EXCEEDED); + // Verify that the current total number of (counted) nft transfers does not exceed the limit + validateTrue(totalNftTransfers < ledgerConfig.nftTransfersMaxLen(), BATCH_SIZE_LIMIT_EXCEEDED); + } + } + + private boolean hasAllowance(@NonNull final List transfers) { + for (final AccountAmount transfer : transfers) { + if (transfer.isApproval()) { + return true; + } + } + + return false; + } + + private boolean hasNftAllowance(@NonNull final List nftTransfers) { + for (final NftTransfer nftTransfer : nftTransfers) { + if (nftTransfer.isApproval()) { + return true; + } + } + + return false; + } +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CustomFeesValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CustomFeesValidator.java index e8413f6cddca..5a1103a7589b 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CustomFeesValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CustomFeesValidator.java @@ -136,8 +136,7 @@ public void validateForFeeScheduleUpdate( case FRACTIONAL_FEE -> { // fractional fee can be only applied to fungible common tokens validateTrue(isFungibleCommon(tokenType), CUSTOM_FRACTIONAL_FEE_ONLY_ALLOWED_FOR_FUNGIBLE_COMMON); - final var tokenId = - TokenID.newBuilder().tokenNum(token.tokenNumber()).build(); + final var tokenId = token.tokenId(); final var relation = tokenRelationStore.get(collectorId, tokenId); validateTrue(relation != null, TOKEN_NOT_ASSOCIATED_TO_FEE_COLLECTOR); } @@ -214,10 +213,7 @@ private void validateFixedFeeForCreation( if (fixedFee.denominatingTokenIdOrThrow().tokenNum() == 0L) { validateTrue(isFungibleCommon(tokenType), CUSTOM_FEE_DENOMINATION_MUST_BE_FUNGIBLE_COMMON); final var copy = fee.copyBuilder(); - copy.fixedFee(fixedFee.copyBuilder() - .denominatingTokenId(TokenID.newBuilder() - .tokenNum(createdToken.tokenNumber()) - .build())); + copy.fixedFee(fixedFee.copyBuilder().denominatingTokenId(createdToken.tokenId())); feesWithCollectorsToAutoAssociate.add(copy.build()); } else { validateExplicitTokenDenomination( diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/DeleteAllowanceValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/DeleteAllowanceValidator.java index ef3aef150ab4..b161482cc250 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/DeleteAllowanceValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/DeleteAllowanceValidator.java @@ -103,7 +103,7 @@ private void validateNftDeleteAllowances( AccountID.newBuilder() .accountNum(effectiveOwner.accountNumber()) .build(), - TokenID.newBuilder().tokenNum(token.tokenNumber()).build()); + token.tokenId()); validateTrue(relation != null, TOKEN_NOT_ASSOCIATED_TO_ACCOUNT); validateDeleteSerialNums(serialNums, tokenId, nftStore); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/StakingValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/StakingValidator.java index b41a14b29015..50e5b3f52a68 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/StakingValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/StakingValidator.java @@ -24,7 +24,7 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.node.app.service.token.ReadableAccountStore; -import com.hedera.node.app.spi.info.NodeInfo; +import com.hedera.node.app.spi.info.NetworkInfo; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.config.data.StakingConfig; import edu.umd.cs.findbugs.annotations.NonNull; @@ -57,7 +57,7 @@ public void validateStakedId( @Nullable final Long stakedNodeIdInOp, @NonNull ReadableAccountStore accountStore, @NonNull final HandleContext context, - @NonNull final NodeInfo nodeInfo) { + @NonNull final NetworkInfo networkInfo) { final var hasStakingId = stakedAccountIdInOp != null || stakedNodeIdInOp != null; final var stakingConfig = context.configuration().getConfigData(StakingConfig.class); // If staking is not enabled, then can't update staked id @@ -73,7 +73,7 @@ public void validateStakedId( if (stakedIdKind.equals("STAKED_ACCOUNT_ID")) { validateTrue(accountStore.getAccountById(requireNonNull(stakedAccountIdInOp)) != null, INVALID_STAKING_ID); } else if (stakedIdKind.equals("STAKED_NODE_ID")) { - validateTrue(nodeInfo.isValidId((requireNonNull(stakedNodeIdInOp))), INVALID_STAKING_ID); + validateTrue(networkInfo.nodeInfo(requireNonNull(stakedNodeIdInOp)) != null, INVALID_STAKING_ID); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java index affd6d83a282..b1e473d8d955 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java @@ -31,7 +31,6 @@ import static com.hedera.hapi.node.base.TokenType.FUNGIBLE_COMMON; import static com.hedera.hapi.node.base.TokenType.NON_FUNGIBLE_UNIQUE; import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; -import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; import static com.hedera.node.app.spi.workflows.HandleException.validateFalse; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; @@ -191,7 +190,7 @@ public void validateAssociation( && account.numberAssociations() + 1 > tokensConfig.maxPerAccount(), TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED); validateTrue( - tokenRelStore.get(asAccount(account.accountNumber()), asToken(token.tokenNumber())) == null, + tokenRelStore.get(asAccount(account.accountNumber()), token.tokenId()) == null, TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenUpdateValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenUpdateValidator.java index b3ee80811549..742619b8974a 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenUpdateValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenUpdateValidator.java @@ -18,12 +18,12 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_AUTORENEW_ACCOUNT; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_IMMUTABLE; -import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static com.hedera.node.app.service.token.impl.util.TokenHandlerHelper.getIfUsable; import static com.hedera.node.app.spi.key.KeyUtils.isEmpty; import static com.hedera.node.app.spi.validation.ExpiryMeta.NA; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.token.TokenUpdateTransactionBody; import com.hedera.node.app.service.token.ReadableAccountStore; @@ -82,25 +82,26 @@ public ValidationResult validateSemantics( final var resolvedExpiryMeta = resolveExpiry(token, op, context.expiryValidator()); validateNewAndExistingAutoRenewAccount( - resolvedExpiryMeta.autoRenewNum(), - token.autoRenewAccountNumber(), + AccountID.newBuilder() + .accountNum(resolvedExpiryMeta.autoRenewNum()) + .build(), + token.autoRenewAccountId(), readableAccountStore, context.expiryValidator()); return new ValidationResult(token, resolvedExpiryMeta); } private void validateNewAndExistingAutoRenewAccount( - final long resolvedAutoRenewNum, - final long existingAutoRenewNum, + final AccountID resolvedAutoRenewId, + final AccountID existingAutoRenewId, final ReadableAccountStore readableAccountStore, final ExpiryValidator expiryValidator) { // Get resolved auto-renewal account - getIfUsable(asAccount(resolvedAutoRenewNum), readableAccountStore, expiryValidator, INVALID_AUTORENEW_ACCOUNT); + getIfUsable(resolvedAutoRenewId, readableAccountStore, expiryValidator, INVALID_AUTORENEW_ACCOUNT); // If token has an existing auto-renewal account, validate its expiration // FUTURE : Not sure why we should validate existing auto-renew account. Retained as in mono-service - if (existingAutoRenewNum != 0) { - getIfUsable( - asAccount(existingAutoRenewNum), readableAccountStore, expiryValidator, INVALID_AUTORENEW_ACCOUNT); + if (!resolvedAutoRenewId.equals(AccountID.DEFAULT)) { + getIfUsable(existingAutoRenewId, readableAccountStore, expiryValidator, INVALID_AUTORENEW_ACCOUNT); } } @@ -108,8 +109,10 @@ private ExpiryMeta resolveExpiry( @NonNull final Token token, @NonNull final TokenUpdateTransactionBody op, @NonNull final ExpiryValidator expiryValidator) { - final var givenExpiryMeta = - new ExpiryMeta(token.expiry(), token.autoRenewSecs(), token.autoRenewAccountNumber()); + final var givenExpiryMeta = new ExpiryMeta( + token.expiry(), + token.autoRenewSecs(), + token.autoRenewAccountId().accountNum()); final var updateExpiryMeta = new ExpiryMeta( op.hasExpiry() ? op.expiryOrThrow().seconds() : NA, op.hasAutoRenewPeriod() ? op.autoRenewPeriodOrThrow().seconds() : NA, diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableAccountStoreImplTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableAccountStoreImplTest.java index 59bd881c2ee8..be782b42c1ff 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableAccountStoreImplTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableAccountStoreImplTest.java @@ -22,7 +22,6 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.state.token.Account; -import com.hedera.node.app.service.mono.state.virtual.EntityNumValue; import com.hedera.node.app.service.token.impl.ReadableAccountStoreImpl; import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoHandlerTestBase; import com.hedera.pbj.runtime.io.buffer.Bytes; @@ -46,7 +45,7 @@ public void setUp() { readableAccounts = emptyReadableAccountStateBuilder().value(id, account).build(); given(readableStates.get(ACCOUNTS)).willReturn(readableAccounts); readableAliases = readableAliasState(); - given(readableStates.get(ALIASES)).willReturn(readableAliases); + given(readableStates.get(ALIASES)).willReturn(readableAliases); subject = new ReadableAccountStoreImpl(readableStates); } @@ -159,9 +158,9 @@ void getsNullIfMissingAccount() { @Test void getAccountIDByAlias() { - final var accountId = subject.getAccountIDByAlias(alias.toString()); + final var accountId = subject.getAccountIDByAlias(alias.alias()); assertThat(accountId).isEqualTo(id); - final var accountId2 = subject.getAccountIDByAlias("test"); + final var accountId2 = subject.getAccountIDByAlias(Bytes.wrap("test")); assertThat(accountId2).isNull(); } } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableTokenRelationStoreImplTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableTokenRelationStoreImplTest.java index 28fa59cf5e5a..62f4f602f742 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableTokenRelationStoreImplTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableTokenRelationStoreImplTest.java @@ -69,8 +69,8 @@ void testNullConstructorArgs() { @Test void testGet() { final var tokenRelation = TokenRelation.newBuilder() - .tokenNumber(TOKEN_10) - .accountNumber(ACCOUNT_20) + .tokenId(TOKEN_10_ID) + .accountId(ACCOUNT_20_ID) .build(); given(tokenRelState.get(notNull())).willReturn(tokenRelation); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableTokenStoreImplTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableTokenStoreImplTest.java index faa2fb1a14c5..97642dce12aa 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableTokenStoreImplTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableTokenStoreImplTest.java @@ -20,7 +20,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.BDDMockito.given; @@ -30,7 +29,6 @@ import com.hedera.node.app.service.mono.state.submerkle.EntityId; import com.hedera.node.app.service.mono.state.submerkle.FcCustomFee; import com.hedera.node.app.service.mono.state.submerkle.FixedFeeSpec; -import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.token.impl.ReadableTokenStoreImpl; import com.hedera.node.app.service.token.impl.test.handlers.util.TokenHandlerTestBase; import com.hedera.node.app.spi.state.ReadableKVState; @@ -44,10 +42,8 @@ @ExtendWith(MockitoExtension.class) class ReadableTokenStoreImplTest extends TokenHandlerTestBase { - private final EntityNum tokenEntityNum = EntityNum.fromLong(2000); - @Mock - private ReadableKVState tokens; + private ReadableKVState tokens; private static final String TOKENS = "TOKENS"; private final TokenID tokenId = TokenID.newBuilder().tokenNum(2000).build(); @@ -66,13 +62,13 @@ public void setUp() { } private void initializeToken() { - given(states.get(TOKENS)).willReturn(tokens); + given(states.get(TOKENS)).willReturn(tokens); token = createToken(); } @Test void getsMerkleTokenIfTokenIdPresent() { - given(tokens.get(tokenEntityNum)).willReturn(token); + given(tokens.get(tokenId)).willReturn(token); final var meta = subject.getTokenMeta(tokenId); assertEquals(adminKey, meta.adminKey()); @@ -83,12 +79,12 @@ void getsMerkleTokenIfTokenIdPresent() { assertEquals(feeScheduleKey, meta.feeScheduleKey()); assertEquals(pauseKey, meta.pauseKey()); assertTrue(meta.hasRoyaltyWithFallback()); - assertEquals(treasury.accountNum(), meta.treasuryNum()); + assertEquals(treasury, meta.treasuryAccountId()); } @Test void getsNullKeyIfMissingAccount() throws PreCheckException { - given(tokens.get(tokenEntityNum)).willReturn(null); + given(tokens.get(tokenId)).willReturn(null); assertNull(subject.getTokenMeta(tokenId)); } @@ -99,12 +95,12 @@ void classifiesRoyaltyWithFallback() throws PreCheckException { copy.customFees(PbjConverter.fromFcCustomFee( FcCustomFee.royaltyFee(1, 2, new FixedFeeSpec(1, null), new EntityId(1, 2, 5), false))); - given(tokens.get(tokenEntityNum)).willReturn(copy.build()); + given(tokens.get(tokenId)).willReturn(copy.build()); final var meta = subject.getTokenMeta(tokenId); assertTrue(meta.hasRoyaltyWithFallback()); - assertSame(treasury.accountNum(), meta.treasuryNum()); + assertEquals(treasury, meta.treasuryAccountId()); } @Test @@ -113,11 +109,11 @@ void classifiesRoyaltyWithNoFallback() throws PreCheckException { copy.tokenType(NON_FUNGIBLE_UNIQUE); copy.customFees(PbjConverter.fromFcCustomFee(FcCustomFee.royaltyFee(1, 2, null, new EntityId(1, 2, 5), false))); - given(tokens.get(tokenEntityNum)).willReturn(copy.build()); + given(tokens.get(tokenId)).willReturn(copy.build()); final var meta = subject.getTokenMeta(tokenId); assertFalse(meta.hasRoyaltyWithFallback()); - assertSame(treasury.accountNum(), meta.treasuryNum()); + assertEquals(treasury, meta.treasuryAccountId()); } } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableNftStoreTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableNftStoreTest.java index 72a0fbb8f1e7..e890976fff96 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableNftStoreTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableNftStoreTest.java @@ -16,7 +16,7 @@ package com.hedera.node.app.service.token.impl.test; -import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.BDDMockito.given; @@ -58,8 +58,10 @@ void constructorCreatesTokenState() { @Test void getReturnsImmutableToken() { - final var id = - UniqueTokenId.newBuilder().tokenTypeNumber(1).serialNumber(1).build(); + final var id = UniqueTokenId.newBuilder() + .tokenId(fungibleTokenId) + .serialNumber(1) + .build(); final var nft = givenNft(id); writableNftStore.put(nft); @@ -70,8 +72,10 @@ void getReturnsImmutableToken() { @Test void getForModifyReturnsImmutableToken() { - final var id = - UniqueTokenId.newBuilder().tokenTypeNumber(1).serialNumber(1).build(); + final var id = UniqueTokenId.newBuilder() + .tokenId(fungibleTokenId) + .serialNumber(1) + .build(); final var nft = givenNft(id); writableNftStore.put(nft); @@ -84,8 +88,10 @@ void getForModifyReturnsImmutableToken() { @Test void putsTokenChangesToStateInModifications() { - final var id = - UniqueTokenId.newBuilder().tokenTypeNumber(1).serialNumber(1).build(); + final var id = UniqueTokenId.newBuilder() + .tokenId(fungibleTokenId) + .serialNumber(1) + .build(); final var nft = givenNft(id); assertFalse(writableNftState.contains(id)); @@ -100,8 +106,10 @@ void putsTokenChangesToStateInModifications() { @Test void getsSizeOfState() { - final var id = - UniqueTokenId.newBuilder().tokenTypeNumber(1).serialNumber(1).build(); + final var id = UniqueTokenId.newBuilder() + .tokenId(fungibleTokenId) + .serialNumber(1) + .build(); final var nft = givenNft(id); assertEquals(0, writableNftStore.sizeOfState()); @@ -115,9 +123,12 @@ void getsSizeOfState() { @Test void removesByUniqueTokenId() { // Set up the NFT state with an existing NFT - final var nftToRemove = - UniqueTokenId.newBuilder().tokenTypeNumber(1).serialNumber(1).build(); + final var ownerId = AccountID.newBuilder().accountNum(12345).build(); + final var nftToRemove = UniqueTokenId.newBuilder() + .tokenId(fungibleTokenId) + .serialNumber(1) + .build(); writableNftState = emptyWritableNftStateBuilder() .value( nftToRemove, @@ -137,9 +148,11 @@ void removesByUniqueTokenId() { @Test void removesByTokenIdAndSerialNum() { // Set up the NFT state with an existing NFT - final var nftToRemove = - UniqueTokenId.newBuilder().tokenTypeNumber(1).serialNumber(1).build(); - final var ownerId = AccountID.newBuilder().accountNum(12345).build(); + final var nftToRemove = UniqueTokenId.newBuilder() + .tokenId(fungibleTokenId) + .serialNumber(1) + .build(); + final var ownerId = asAccount(12345); writableNftState = emptyWritableNftStateBuilder() .value( nftToRemove, @@ -150,7 +163,7 @@ void removesByTokenIdAndSerialNum() { writableNftStore = new WritableNftStore(writableStates); assertNotNull(writableNftStore.get(nftToRemove)); - writableNftStore.remove(asToken(nftToRemove.tokenTypeNumber()), nftToRemove.serialNumber()); + writableNftStore.remove(nftToRemove.tokenId(), nftToRemove.serialNumber()); // Assert the NFT is removed assertNull(writableNftStore.get(nftToRemove)); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableTokenRelationStoreTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableTokenRelationStoreTest.java index cf68bcddd0b9..eed45a708af2 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableTokenRelationStoreTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableTokenRelationStoreTest.java @@ -16,6 +16,8 @@ package com.hedera.node.app.service.token.impl.test; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.notNull; import static org.mockito.BDDMockito.given; @@ -24,8 +26,8 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.token.TokenRelation; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.token.impl.TokenServiceImpl; import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; import com.hedera.node.app.spi.state.WritableKVStateBase; @@ -51,13 +53,13 @@ class WritableTokenRelationStoreTest { private WritableStates states; @Mock - private WritableKVStateBase tokenRelState; + private WritableKVStateBase tokenRelState; private WritableTokenRelationStore subject; @BeforeEach void setUp() { - given(states.get(TokenServiceImpl.TOKEN_RELS_KEY)) + given(states.get(TokenServiceImpl.TOKEN_RELS_KEY)) .willReturn(tokenRelState); subject = new WritableTokenRelationStore(states); @@ -72,12 +74,18 @@ void testNullConstructorArgs() { @Test void testPut() { final var expectedTokenRel = TokenRelation.newBuilder() - .accountNumber(ACCOUNT_20) - .tokenNumber(TOKEN_10) + .accountId(ACCOUNT_20_ID) + .tokenId(TOKEN_10_ID) .build(); subject.put(expectedTokenRel); - verify(tokenRelState).put(EntityNumPair.fromLongs(ACCOUNT_20, TOKEN_10), expectedTokenRel); + verify(tokenRelState) + .put( + EntityIDPair.newBuilder() + .accountId(ACCOUNT_20_ID) + .tokenId(TOKEN_10_ID) + .build(), + expectedTokenRel); } @Test @@ -89,10 +97,14 @@ void testPutNull() { @Test void testGet() { final var tokenRelation = TokenRelation.newBuilder() - .tokenNumber(TOKEN_10) - .accountNumber(ACCOUNT_20) + .tokenId(TOKEN_10_ID) + .accountId(ACCOUNT_20_ID) .build(); - given(tokenRelState.get(EntityNumPair.fromLongs(ACCOUNT_20, TOKEN_10))).willReturn(tokenRelation); + given(tokenRelState.get(EntityIDPair.newBuilder() + .accountId(ACCOUNT_20_ID) + .tokenId(TOKEN_10_ID) + .build())) + .willReturn(tokenRelation); final var result = subject.get(ACCOUNT_20_ID, TOKEN_10_ID); Assertions.assertThat(result).isEqualTo(tokenRelation); @@ -110,7 +122,10 @@ void testGetEmpty() { @Test void testGetForModify() { TokenRelation tokenRelation = mock(TokenRelation.class); - given(tokenRelState.getForModify(EntityNumPair.fromLongs(ACCOUNT_20, TOKEN_10))) + given(tokenRelState.getForModify(EntityIDPair.newBuilder() + .accountId(ACCOUNT_20_ID) + .tokenId(TOKEN_10_ID) + .build())) .willReturn(tokenRelation); final var result = subject.getForModify(ACCOUNT_20_ID, TOKEN_10_ID); @@ -119,7 +134,10 @@ void testGetForModify() { @Test void testGetForModifyEmpty() { - given(tokenRelState.getForModify(EntityNumPair.fromLongs(-2L, TOKEN_10))) + given(tokenRelState.getForModify(EntityIDPair.newBuilder() + .accountId(asAccount(-2L)) + .tokenId(TOKEN_10_ID) + .build())) .willReturn(null); final var result = @@ -138,7 +156,15 @@ void testSizeOfState() { @Test void testModifiedTokens() { - final var modifiedKeys = Set.of(EntityNumPair.fromLongs(ACCOUNT_20, TOKEN_10), EntityNumPair.fromLongs(1L, 2L)); + final var modifiedKeys = Set.of( + EntityIDPair.newBuilder() + .accountId(ACCOUNT_20_ID) + .tokenId(TOKEN_10_ID) + .build(), + EntityIDPair.newBuilder() + .accountId(asAccount(1L)) + .tokenId(asToken(2L)) + .build()); given(tokenRelState.modifiedKeys()).willReturn(modifiedKeys); final var result = subject.modifiedTokens(); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableTokenStoreTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableTokenStoreTest.java index 3f53b4de30cb..5a24e741684b 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableTokenStoreTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableTokenStoreTest.java @@ -64,7 +64,7 @@ void getForModifyReturnsImmutableToken() { writableTokenStore.put(token); - final var maybeReadToken = writableTokenStore.getForModify(tokenEntityNum.longValue()); + final var maybeReadToken = writableTokenStore.getForModify(tokenId); assertTrue(maybeReadToken.isPresent()); final var readToken = maybeReadToken.get(); @@ -74,13 +74,13 @@ void getForModifyReturnsImmutableToken() { @Test void putsTokenChangesToStateInModifications() { token = createToken(); - assertFalse(writableTokenState.contains(tokenEntityNum)); + assertFalse(writableTokenState.contains(tokenId)); // put, keeps the token in the modifications writableTokenStore.put(token); - assertTrue(writableTokenState.contains(tokenEntityNum)); - final var writtenToken = writableTokenState.get(tokenEntityNum); + assertTrue(writableTokenState.contains(tokenId)); + final var writtenToken = writableTokenState.get(tokenId); assertEquals(token, writtenToken); } @@ -92,6 +92,6 @@ void getsSizeOfState() { writableTokenStore.put(token); assertEquals(1, writableTokenStore.sizeOfState()); - assertEquals(Set.of(tokenEntityNum), writableTokenStore.modifiedTokens()); + assertEquals(Set.of(tokenId), writableTokenStore.modifiedTokens()); } } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoCreateHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoCreateHandlerTest.java index d6de107cdde2..9e35ee1f5c8f 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoCreateHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoCreateHandlerTest.java @@ -30,6 +30,7 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.MEMO_TOO_LONG; import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; import static com.hedera.hapi.node.base.ResponseCodeEnum.PROXY_ACCOUNT_ID_FIELD_IS_DEPRECATED; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -55,7 +56,6 @@ import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.service.mono.context.properties.GlobalDynamicProperties; import com.hedera.node.app.service.mono.context.properties.PropertySource; -import com.hedera.node.app.service.mono.state.virtual.EntityNumValue; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.handlers.CryptoCreateHandler; import com.hedera.node.app.service.token.impl.records.CryptoCreateRecordBuilder; @@ -63,7 +63,7 @@ import com.hedera.node.app.service.token.impl.validators.CryptoCreateValidator; import com.hedera.node.app.service.token.impl.validators.StakingValidator; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; -import com.hedera.node.app.spi.info.NodeInfo; +import com.hedera.node.app.spi.info.NetworkInfo; import com.hedera.node.app.spi.validation.AttributeValidator; import com.hedera.node.app.spi.validation.ExpiryValidator; import com.hedera.node.app.spi.workflows.HandleContext; @@ -72,6 +72,7 @@ import com.hedera.node.app.workflows.handle.validation.StandardizedAttributeValidator; import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import com.hedera.pbj.runtime.io.buffer.Bytes; +import com.swirlds.common.utility.CommonUtils; import com.swirlds.config.api.Configuration; import java.util.function.LongSupplier; import org.junit.jupiter.api.BeforeEach; @@ -103,7 +104,7 @@ class CryptoCreateHandlerTest extends CryptoHandlerTestBase { private CryptoCreateRecordBuilder recordBuilder; @Mock - private NodeInfo nodeInfo; + private NetworkInfo networkInfo; @Mock(strictness = LENIENT) private ExpiryValidator expiryValidator; @@ -135,7 +136,7 @@ public void setUp() { cryptoCreateValidator = new CryptoCreateValidator(); stakingValidator = new StakingValidator(); - subject = new CryptoCreateHandler(cryptoCreateValidator, stakingValidator, nodeInfo); + subject = new CryptoCreateHandler(cryptoCreateValidator, stakingValidator, networkInfo); } @Test @@ -454,6 +455,7 @@ void handleFailsWhenPayerInvalid() { @Test @DisplayName("handle commits when alias is mentioned in the transaction") void handleCommitsAnyAlias() { + final byte[] evmAddress = CommonUtils.unhex("6aeb3773ea468a814d954e6dec795bfee7d76e26"); txn = new CryptoCreateBuilder() .withAlias(Bytes.wrap(evmAddress)) .withStakedAccountId(3) @@ -583,9 +585,9 @@ void validateAliasSigned() { setupConfig(); setupExpiryValidator(); final var writableAliases = emptyWritableAliasStateBuilder() - .value(Bytes.wrap(evmAddress).toString(), new EntityNumValue(accountNum)) + .value(Bytes.wrap(evmAddress), asAccount(accountNum)) .build(); - given(writableStates.get(ALIASES)).willReturn(writableAliases); + given(writableStates.get(ALIASES)).willReturn(writableAliases); writableStore = new WritableAccountStore(writableStates); when(handleContext.writableStore(WritableAccountStore.class)).thenReturn(writableStore); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteAllowanceHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteAllowanceHandlerTest.java index 150962a8d3e1..e93aa5b40e4f 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteAllowanceHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteAllowanceHandlerTest.java @@ -18,6 +18,7 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.EMPTY_ALLOWANCES; import static com.hedera.hapi.node.base.ResponseCodeEnum.SENDER_DOES_NOT_OWN_NFT_SERIAL_NO; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; @@ -159,8 +160,10 @@ void checksEmptyAllowancesInTxn() { @Test void failsDeleteAllowancesOnInvalidTreasury() { - writableTokenStore.put( - nonFungibleToken.copyBuilder().treasuryAccountNumber(200L).build()); + writableTokenStore.put(nonFungibleToken + .copyBuilder() + .treasuryAccountId(asAccount(200L)) + .build()); writableNftStore.put(nftSl1.copyBuilder().spenderId(spenderId).build()); writableNftStore.put(nftSl2.copyBuilder().spenderId(spenderId).build()); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetAccountBalanceHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetAccountBalanceHandlerTest.java index 72907930221b..0d8f026003c6 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetAccountBalanceHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetAccountBalanceHandlerTest.java @@ -16,6 +16,7 @@ package com.hedera.node.app.service.token.impl.test.handlers; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; import static com.hedera.node.app.service.token.impl.test.handlers.util.StateBuilderUtil.TOKENS; import static com.hedera.node.app.service.token.impl.test.handlers.util.StateBuilderUtil.TOKEN_RELS; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; @@ -35,6 +36,7 @@ import com.hedera.hapi.node.base.ResponseHeader; import com.hedera.hapi.node.base.TokenBalance; import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.state.token.TokenRelation; @@ -42,8 +44,6 @@ import com.hedera.hapi.node.token.CryptoGetAccountBalanceResponse; import com.hedera.hapi.node.transaction.Query; import com.hedera.hapi.node.transaction.Response; -import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableTokenRelationStore; import com.hedera.node.app.service.token.ReadableTokenStore; @@ -267,27 +267,32 @@ void getsResponseIfOkResponse() { ReadableAccountStore ReadableAccountStore = new ReadableAccountStoreImpl(readableStates1); given(token1.decimals()).willReturn(100); - final var readableToken = MapReadableKVState.builder(TOKENS) - .value(EntityNum.fromLong(3L), token1) + final var readableToken = MapReadableKVState.builder(TOKENS) + .value(asToken(3L), token1) .build(); - given(readableStates2.get(TOKENS)).willReturn(readableToken); + given(readableStates2.get(TOKENS)).willReturn(readableToken); final var readableTokenStore = new ReadableTokenStoreImpl(readableStates2); final var tokenRelation = TokenRelation.newBuilder() - .tokenNumber(3L) - .accountNumber(accountNum) + .tokenId(asToken(3L)) + .accountId(id) .balance(1000L) .frozen(false) .kycGranted(false) .deleted(false) .automaticAssociation(true) - .nextToken(4L) - .previousToken(2L) + .nextToken(asToken(4L)) + .previousToken(asToken(2L)) .build(); - final var readableTokenRel = MapReadableKVState.builder(TOKEN_RELS) - .value(EntityNumPair.fromLongs(3L, accountNum), tokenRelation) + final var readableTokenRel = MapReadableKVState.builder(TOKEN_RELS) + .value( + EntityIDPair.newBuilder() + .accountId(id) + .tokenId(asToken(3L)) + .build(), + tokenRelation) .build(); - given(readableStates3.get(TOKEN_RELS)).willReturn(readableTokenRel); + given(readableStates3.get(TOKEN_RELS)).willReturn(readableTokenRel); final var readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates3); final var query = createGetAccountBalanceQuery(accountNum); @@ -325,53 +330,68 @@ void checkConfigmaxRelsPerInfoQuery() { given(token1.decimals()).willReturn(100); given(token2.decimals()).willReturn(50); - final var readableToken = MapReadableKVState.builder(TOKENS) - .value(EntityNum.fromLong(3L), token1) - .value(EntityNum.fromLong(4L), token2) - .value(EntityNum.fromLong(5L), token3) + final var readableToken = MapReadableKVState.builder(TOKENS) + .value(asToken(3L), token1) + .value(asToken(4L), token2) + .value(asToken(5L), token3) .build(); - given(readableStates2.get(TOKENS)).willReturn(readableToken); + given(readableStates2.get(TOKENS)).willReturn(readableToken); final var readableTokenStore = new ReadableTokenStoreImpl(readableStates2); final var tokenRelation1 = TokenRelation.newBuilder() - .tokenNumber(3L) - .accountNumber(accountNum) + .tokenId(asToken(3L)) + .accountId(id) .balance(1000L) .frozen(false) .kycGranted(false) .deleted(false) .automaticAssociation(true) - .nextToken(4L) - .previousToken(2L) + .nextToken(asToken(4L)) + .previousToken(asToken(2L)) .build(); final var tokenRelation2 = TokenRelation.newBuilder() - .tokenNumber(4L) - .accountNumber(accountNum) + .tokenId(asToken(4L)) + .accountId(id) .balance(100L) .frozen(false) .kycGranted(false) .deleted(false) .automaticAssociation(true) - .nextToken(5L) - .previousToken(3L) + .nextToken(asToken(5L)) + .previousToken(asToken(3L)) .build(); final var tokenRelation3 = TokenRelation.newBuilder() - .tokenNumber(5L) - .accountNumber(accountNum) + .tokenId(asToken(5L)) + .accountId(id) .balance(10L) .frozen(false) .kycGranted(false) .deleted(false) .automaticAssociation(true) - .nextToken(6L) - .previousToken(4L) + .nextToken(asToken(6L)) + .previousToken(asToken(4L)) .build(); - final var readableTokenRel = MapReadableKVState.builder(TOKEN_RELS) - .value(EntityNumPair.fromLongs(accountNum, 3L), tokenRelation1) - .value(EntityNumPair.fromLongs(accountNum, 4L), tokenRelation2) - .value(EntityNumPair.fromLongs(accountNum, 5L), tokenRelation3) + final var readableTokenRel = MapReadableKVState.builder(TOKEN_RELS) + .value( + EntityIDPair.newBuilder() + .accountId(id) + .tokenId(asToken(3L)) + .build(), + tokenRelation1) + .value( + EntityIDPair.newBuilder() + .accountId(id) + .tokenId(asToken(4L)) + .build(), + tokenRelation2) + .value( + EntityIDPair.newBuilder() + .accountId(id) + .tokenId(asToken(5L)) + .build(), + tokenRelation3) .build(); - given(readableStates3.get(TOKEN_RELS)).willReturn(readableTokenRel); + given(readableStates3.get(TOKEN_RELS)).willReturn(readableTokenRel); final var readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates3); final var query = createGetAccountBalanceQuery(accountNum); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetAccountInfoHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetAccountInfoHandlerTest.java index c6316d30c846..ef84825cdcf3 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetAccountInfoHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetAccountInfoHandlerTest.java @@ -24,6 +24,7 @@ import static com.hedera.node.app.service.token.impl.TokenServiceImpl.STAKING_INFO_KEY; import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKENS_KEY; import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKEN_RELS_KEY; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; @@ -41,6 +42,7 @@ import com.hedera.hapi.node.base.Timestamp; import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.base.TokenRelationship; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.StakingNodeInfo; import com.hedera.hapi.node.state.token.Token; @@ -50,8 +52,6 @@ import com.hedera.hapi.node.token.CryptoGetInfoResponse; import com.hedera.hapi.node.transaction.Query; import com.hedera.hapi.node.transaction.Response; -import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableStakingInfoStore; import com.hedera.node.app.service.token.ReadableTokenRelationStore; @@ -69,6 +69,7 @@ import com.hedera.node.config.converter.BytesConverter; import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import com.hedera.pbj.runtime.io.buffer.Bytes; +import com.swirlds.common.utility.CommonUtils; import java.util.ArrayList; import java.util.List; import org.junit.jupiter.api.BeforeEach; @@ -238,19 +239,19 @@ void getsResponseIfOkResponse() { given(token1.decimals()).willReturn(100); given(token1.symbol()).willReturn("FOO"); - given(token1.tokenNumber()).willReturn(3L); + given(token1.tokenId()).willReturn(asToken(3L)); setupTokenStore(token1); final var tokenRelation = TokenRelation.newBuilder() - .tokenNumber(3L) - .accountNumber(accountNum) + .tokenId(asToken(3L)) + .accountId(id) .balance(1000L) .frozen(false) .kycGranted(false) .deleted(false) .automaticAssociation(true) - .nextToken(4L) - .previousToken(2L) + .nextToken(asToken(4L)) + .previousToken(asToken(2L)) .build(); setupTokenRelationStore(tokenRelation); setupStakingInfoStore(); @@ -277,42 +278,42 @@ void checkMulitpleTokenRelations() { given(token2.decimals()).willReturn(50); given(token1.symbol()).willReturn("FOO"); given(token2.symbol()).willReturn("BAR"); - given(token1.tokenNumber()).willReturn(3L); - given(token2.tokenNumber()).willReturn(4L); + given(token1.tokenId()).willReturn(asToken(3L)); + given(token2.tokenId()).willReturn(asToken(4L)); setupTokenStore(token1, token2); final var tokenRelation1 = TokenRelation.newBuilder() - .tokenNumber(3L) - .accountNumber(accountNum) + .tokenId(asToken(3L)) + .accountId(id) .balance(1000L) .frozen(false) .kycGranted(false) .deleted(false) .automaticAssociation(true) - .nextToken(4L) - .previousToken(2L) + .nextToken(asToken(4L)) + .previousToken(asToken(2L)) .build(); final var tokenRelation2 = TokenRelation.newBuilder() - .tokenNumber(4L) - .accountNumber(accountNum) + .tokenId(asToken(4L)) + .accountId(id) .balance(100L) .frozen(false) .kycGranted(false) .deleted(false) .automaticAssociation(true) - .nextToken(5L) - .previousToken(3L) + .nextToken(asToken(5L)) + .previousToken(asToken(3L)) .build(); final var tokenRelation3 = TokenRelation.newBuilder() - .tokenNumber(5L) - .accountNumber(accountNum) + .tokenId(asToken(5L)) + .accountId(id) .balance(10L) .frozen(false) .kycGranted(false) .deleted(false) .automaticAssociation(true) - .nextToken(6L) - .previousToken(4L) + .nextToken(asToken(6L)) + .previousToken(asToken(4L)) .build(); setupTokenRelationStore(tokenRelation1, tokenRelation2, tokenRelation3); setupStakingInfoStore(); @@ -339,19 +340,19 @@ void testStakeNumber() { given(token1.decimals()).willReturn(100); given(token1.symbol()).willReturn("FOO"); - given(token1.tokenNumber()).willReturn(3L); + given(token1.tokenId()).willReturn(asToken(3L)); setupTokenStore(token1); final var tokenRelation = TokenRelation.newBuilder() - .tokenNumber(3L) - .accountNumber(accountNum) + .tokenId(asToken(3L)) + .accountId(id) .balance(1000L) .frozen(false) .kycGranted(false) .deleted(false) .automaticAssociation(true) - .nextToken(4L) - .previousToken(2L) + .nextToken(asToken(4L)) + .previousToken(asToken(2L)) .build(); setupTokenRelationStore(tokenRelation); setupStakingInfoStore(); @@ -367,31 +368,32 @@ void testStakeNumber() { @Test void testEvmAddressAlias() { + final Bytes evmAddress = Bytes.wrap(CommonUtils.unhex("6aeb3773ea468a814d954e6dec795bfee7d76e26")); final var responseHeader = getOkResponse(); - final var expectedInfo = getExpectedAccountInfoEvm(); + final var expectedInfo = getExpectedAccountInfoEvm(evmAddress); account = account.copyBuilder() .stakedNumber(-1) .declineReward(false) - .alias(Bytes.wrap(evmAddress)) + .alias(evmAddress) .build(); setupAccountStore(); given(token1.decimals()).willReturn(100); given(token1.symbol()).willReturn("FOO"); - given(token1.tokenNumber()).willReturn(3L); + given(token1.tokenId()).willReturn(asToken(3L)); setupTokenStore(token1); final var tokenRelation = TokenRelation.newBuilder() - .tokenNumber(3L) - .accountNumber(accountNum) + .tokenId(asToken(3L)) + .accountId(id) .balance(1000L) .frozen(false) .kycGranted(false) .deleted(false) .automaticAssociation(true) - .nextToken(4L) - .previousToken(2L) + .nextToken(asToken(4L)) + .previousToken(asToken(2L)) .build(); setupTokenRelationStore(tokenRelation); setupStakingInfoStore(); @@ -415,21 +417,26 @@ private void setupAccountStore() { } private void setupTokenStore(Token... tokens) { - final var readableToken = MapReadableKVState.builder(TOKENS_KEY); + final var readableToken = MapReadableKVState.builder(TOKENS_KEY); for (Token token : tokens) { - readableToken.value(EntityNum.fromLong(token.tokenNumber()), token); + readableToken.value(token.tokenId(), token); } - given(readableStates2.get(TOKENS_KEY)).willReturn(readableToken.build()); + given(readableStates2.get(TOKENS_KEY)).willReturn(readableToken.build()); final var readableTokenStore = new ReadableTokenStoreImpl(readableStates2); when(context.createStore(ReadableTokenStore.class)).thenReturn(readableTokenStore); } private void setupTokenRelationStore(TokenRelation... tokenRelations) { - final var readableTokenRel = MapReadableKVState.builder(TOKEN_RELS_KEY); + final var readableTokenRel = MapReadableKVState.builder(TOKEN_RELS_KEY); for (TokenRelation tokenRelation : tokenRelations) { - readableTokenRel.value(EntityNumPair.fromLongs(accountNum, tokenRelation.tokenNumber()), tokenRelation); + readableTokenRel.value( + EntityIDPair.newBuilder() + .accountId(id) + .tokenId(tokenRelation.tokenId()) + .build(), + tokenRelation); } - given(readableStates3.get(TOKEN_RELS_KEY)).willReturn(readableTokenRel.build()); + given(readableStates3.get(TOKEN_RELS_KEY)).willReturn(readableTokenRel.build()); final var readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates3); when(context.createStore(ReadableTokenRelationStore.class)).thenReturn(readableTokenRelStore); } @@ -493,7 +500,7 @@ private AccountInfo getExpectedAccountInfo2() { .build(); } - private AccountInfo getExpectedAccountInfoEvm() { + private AccountInfo getExpectedAccountInfoEvm(Bytes evmAddress) { return AccountInfo.newBuilder() .key(key) .accountID(id) @@ -507,8 +514,8 @@ private AccountInfo getExpectedAccountInfoEvm() { .ownedNfts(2) .maxAutomaticTokenAssociations(10) .ethereumNonce(0) - .alias(Bytes.wrap(evmAddress)) - .contractAccountID("6aea3773ea468a814d954e6dec795bfee7d76e25") + .alias(evmAddress) + .contractAccountID("6aeb3773ea468a814d954e6dec795bfee7d76e26") .tokenRelationships(getExpectedTokenRelationship()) .stakingInfo(getExpectedStakingInfo()) .build(); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerParityTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerParityTest.java index 834df94f031e..38baaa9daa34 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerParityTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerParityTest.java @@ -70,12 +70,14 @@ import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.handlers.CryptoTransferHandler; import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; +import com.hedera.node.app.service.token.impl.validators.CryptoTransferValidator; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.workflows.PreCheckException; import org.junit.jupiter.api.Test; class CryptoTransferHandlerParityTest extends ParityTestBase { - private final CryptoTransferHandler subject = new CryptoTransferHandler(); + private final CryptoTransferValidator validator = new CryptoTransferValidator(); + private final CryptoTransferHandler subject = new CryptoTransferHandler(validator); @Test void cryptoTransferTokenReceiverIsMissingAliasScenario() throws PreCheckException { @@ -355,7 +357,11 @@ void tokenTransactWithOwnershipChangeNoReceiverSigReqAndFallbackNotTriggeredDueT context.registerStore(ReadableTokenStore.class, readableTokenStore); subject.preHandle(context); assertEquals(context.payerKey(), DEFAULT_PAYER_KT.asPbjKey()); - assertThat(context.requiredNonPayerKeys(), contains(FIRST_TOKEN_SENDER_KT.asPbjKey())); + // We don't want the NO_RECEIVER_SIG_KT to be included in the required keys because the account's receiver sig + // required is false + assertThat( + context.requiredNonPayerKeys(), + contains(FIRST_TOKEN_SENDER_KT.asPbjKey(), SECOND_TOKEN_SENDER_KT.asPbjKey())); } @Test @@ -366,7 +372,10 @@ void tokenTransactWithOwnershipChangeNoReceiverSigReqAndFallbackNotTriggeredDueT context.registerStore(ReadableTokenStore.class, readableTokenStore); subject.preHandle(context); assertEquals(context.payerKey(), DEFAULT_PAYER_KT.asPbjKey()); - assertThat(context.requiredNonPayerKeys(), contains(FIRST_TOKEN_SENDER_KT.asPbjKey())); + // Again, we don't want NO_RECEIVER_SIG_KT in the required keys because receiver sig required is false + assertThat( + context.requiredNonPayerKeys(), + containsInAnyOrder(FIRST_TOKEN_SENDER_KT.asPbjKey(), SECOND_TOKEN_SENDER_KT.asPbjKey())); } @Test diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerPureChecksTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerPureChecksTest.java new file mode 100644 index 000000000000..037783f20907 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerPureChecksTest.java @@ -0,0 +1,352 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.EMPTY_TOKEN_TRANSFER_ACCOUNT_AMOUNTS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_AMOUNTS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSACTION_BODY; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; + +import com.hedera.hapi.node.base.AccountAmount; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenTransferList; +import com.hedera.hapi.node.base.TransactionID; +import com.hedera.hapi.node.token.TokenAssociateTransactionBody; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import java.util.Collections; +import java.util.List; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +class CryptoTransferHandlerPureChecksTest extends CryptoTransferHandlerTestBase { + + @SuppressWarnings("DataFlowIssue") + @Test + void pureChecksNullArgThrows() { + Assertions.assertThatThrownBy(() -> subject.pureChecks(null)).isInstanceOf(NullPointerException.class); + } + + @Test + void pureChecksHasNoCryptoTransfer() { + final var nonTransferTxnBody = TokenAssociateTransactionBody.newBuilder(); + final var txn = TransactionBody.newBuilder() + .transactionID(TransactionID.newBuilder().accountID(ACCOUNT_3333)) + .tokenAssociate(nonTransferTxnBody) + .build(); + + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TRANSACTION_BODY)); + } + + @Test + void pureChecksHbarTransfersHasNullAccountId() { + final var txn = newCryptoTransfer( + ACCT_3333_MINUS_10, + ACCT_4444_PLUS_10.copyBuilder().accountID((AccountID) null).build()); + + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_ACCOUNT_ID)); + } + + @Test + void pureChecksHbarTransfersHasAccountIdWithEmptyAliasAndNumber() { + final var txn = newCryptoTransfer( + ACCT_3333_MINUS_10, + AccountAmount.newBuilder() + .accountID(AccountID.newBuilder() + .accountNum(5555) + .alias(Bytes.wrap("")) + .build()) + .amount(10) + .build()); + + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_ACCOUNT_ID)); + } + + @Test + void pureChecksHbarTransfersHasAccountIdWithIllegalNumber() { + final var txn = newCryptoTransfer( + ACCT_3333_MINUS_10, + ACCT_4444_PLUS_10.copyBuilder().accountID(asAccount(0)).build()); + + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_ACCOUNT_ID)); + } + + @Test + void pureChecksHbarTransfersHasAccountIdWithIllegalAlias() { + final var txn = newCryptoTransfer( + ACCT_4444_MINUS_5, + AccountAmount.newBuilder() + .accountID(AccountID.newBuilder().alias(Bytes.wrap("")).build()) + .amount(5) + .build()); + + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_ACCOUNT_ID)); + } + + @Test + void pureChecksHbarTransfersHasNonZeroHbarAdjustments() { + // A net non-zero transfer balance of (-10 + 11) = 1 should cause the pure checks to fail + final var txn = newCryptoTransfer( + ACCT_3333_MINUS_10, ACCT_4444_PLUS_10.copyBuilder().amount(11).build()); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_ACCOUNT_AMOUNTS)); + } + + @Test + void pureChecksHbarTransfersHasRepeatedAccountId() { + final var txn = newCryptoTransfer( + ACCT_3333_MINUS_10, ACCT_3333_MINUS_10.copyBuilder().amount(10).build()); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS)); + } + + @Test + void pureChecksHasValidHbarTransfers() { + // Note: this test only checks for valid hbar transfers (WITHOUT any token transfers) + final var txn = newCryptoTransfer(ACCT_4444_MINUS_5, ACCT_3333_PLUS_5); + Assertions.assertThatCode(() -> subject.pureChecks(txn)).doesNotThrowAnyException(); + } + + @Test + void pureChecksFungibleTokenTransfersHasMissingTokenId() { + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token((TokenID) null) + // These are TOKEN fungible amount transfers, not HBAR amount transfers + .transfers(ACCT_4444_MINUS_5, ACCT_3333_PLUS_5) + .build()); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_TOKEN_ID)); + } + + @Test + void pureChecksFungibleTokenTransfersHasMissingAccountId() { + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + // These are TOKEN fungible amount transfers, not HBAR amount transfers + .transfers( + ACCT_3333_MINUS_10, + ACCT_4444_PLUS_10 + .copyBuilder() + .accountID((AccountID) null) + .build()) + .build()); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_TRANSFER_ACCOUNT_ID)); + } + + @Test + void pureChecksFungibleTokenTransfersHasRepeatedAccountId() { + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + // These are TOKEN amount transfers, not HBAR amount transfers + .transfers( + ACCT_4444_MINUS_5, + ACCT_4444_MINUS_5.copyBuilder().amount(5).build()) + .build()); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS)); + } + + @Test + void pureChecksFungibleTokenTransfersHasNonZeroTokenSum() { + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + // These are TOKEN amount transfers, not HBAR amount transfers + .transfers( + ACCT_3333_MINUS_10, + ACCT_4444_PLUS_10.copyBuilder().amount(5).build()) + .build()); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.TRANSFERS_NOT_ZERO_SUM_FOR_TOKEN)); + } + + @Test + void pureChecksHasValidFungibleTokenTransfers() { + // Note: this test only checks for valid fungible token transfers (WITHOUT any hbar or nft transfers) + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + .transfers(ACCT_4444_MINUS_5, ACCT_3333_PLUS_5) + .build()); + Assertions.assertThatCode(() -> subject.pureChecks(txn)).doesNotThrowAnyException(); + } + + @Test + void pureChecksNonFungibleTokenTransfersHasMissingTokenId() { + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token((TokenID) null) + // These are nft transfers, not hbar or fungible token transfers + .nftTransfers(SERIAL_1_FROM_3333_TO_4444) + .build()); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_TOKEN_ID)); + } + + @Test + void pureChecksNonFungibleTokenTransfersHasInvalidNftId() { + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + // These are nft transfers, not hbar or fungible token transfers + .nftTransfers( + SERIAL_1_FROM_3333_TO_4444.copyBuilder().serialNumber(0).build()) + .build()); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_TOKEN_NFT_SERIAL_NUMBER)); + } + + @Test + void pureChecksNonFungibleTokenTransfersHasMissingSenderId() { + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + // These are nft transfers, not hbar or fungible token transfers + .nftTransfers(SERIAL_2_FROM_4444_TO_3333 + .copyBuilder() + .senderAccountID((AccountID) null) + .build()) + .build()); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_TRANSFER_ACCOUNT_ID)); + } + + @Test + void pureChecksNonFungibleTokenTransfersHasMissingReceiverId() { + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + .nftTransfers(SERIAL_1_FROM_3333_TO_4444 + .copyBuilder() + .receiverAccountID((AccountID) null) + .build()) + .build()); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_TRANSFER_ACCOUNT_ID)); + } + + @Test + void pureChecksNonFungibleTokenTransfersHasRepeatedNftId() { + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + .nftTransfers(SERIAL_1_FROM_3333_TO_4444, SERIAL_2_FROM_4444_TO_3333, SERIAL_1_FROM_3333_TO_4444) + .build()); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.TOKEN_ID_REPEATED_IN_TOKEN_LIST)); + } + + @Test + void pureChecksHasValidNonFungibleTokenTransfers() { + // Note: this test only checks for valid non-fungible token transfers (WITHOUT any hbar or fungible token + // transfers) + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + .nftTransfers( + SERIAL_1_FROM_3333_TO_4444, + SERIAL_2_FROM_4444_TO_3333, + SERIAL_1_FROM_3333_TO_4444.copyBuilder().serialNumber(3).build()) + .build()); + Assertions.assertThatCode(() -> subject.pureChecks(txn)).doesNotThrowAnyException(); + } + + @Test + void pureChecksTokenTransfersDoesNotHaveFungibleOrNonFungibleAmount() { + // This test checks that, if any token transfer is present, it must have at least one fungible or non-fungible + // balance not equal to zero + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + .transfers(ACCT_3333_PLUS_5.copyBuilder().amount(0).build()) + // nftTransfers is intentionally empty (will result in a count of zero nft transfers) + .nftTransfers() + .build()); + + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(EMPTY_TOKEN_TRANSFER_ACCOUNT_AMOUNTS)); + } + + @Test + void pureChecksTokenTransferHasBothFungibleAndNonFungibleAmounts() { + // This test checks that, if a transfer for a token is present, it must have ONLY a fungible transfer OR an NFT + // transfer, but not both + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + .transfers(ACCT_3333_MINUS_10, ACCT_4444_PLUS_10) + .nftTransfers(SERIAL_1_FROM_3333_TO_4444) + .build()); + + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_ACCOUNT_AMOUNTS)); + } + + @Test + void pureChecksForEmptyHbarTransferAndEmptyTokenTransfers() { + // It's actually valid to have no hbar transfers and no token transfers + final var txn = newCryptoTransfer(Collections.emptyList(), Collections.emptyList()); + Assertions.assertThatCode(() -> subject.pureChecks(txn)).doesNotThrowAnyException(); + } + + @Test + void pureChecksHasValidHbarAndTokenTransfers() { + // Tests that valid hbar transfers, fungible transfers, and non-fungible transfers are all valid when given + // together + final var token9753 = asToken(9753); + final var txn = newCryptoTransfer( + // Valid hbar transfers + List.of(ACCT_3333_MINUS_10, ACCT_4444_PLUS_10), + List.of( + // Valid fungible token transfers + TokenTransferList.newBuilder() + .token(TOKEN_2468) + .transfers(ACCT_4444_MINUS_5, ACCT_3333_PLUS_5) + .build(), + TokenTransferList.newBuilder() + .token(token9753) + .transfers(ACCT_4444_MINUS_5, ACCT_3333_PLUS_5) + .build(), + // Valid nft token transfers + TokenTransferList.newBuilder() + .token(token9753) + .nftTransfers(SERIAL_1_FROM_3333_TO_4444, SERIAL_2_FROM_4444_TO_3333) + .build())); + + Assertions.assertThatCode(() -> subject.pureChecks(txn)).doesNotThrowAnyException(); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerTest.java new file mode 100644 index 000000000000..7a66531406be --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerTest.java @@ -0,0 +1,362 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.BATCH_SIZE_LIMIT_EXCEEDED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_TRANSFER_LIST_SIZE_LIMIT_EXCEEDED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TRANSFER_LIST_SIZE_LIMIT_EXCEEDED; +import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_ALLOWANCES_IS_ENABLED; +import static com.hedera.node.app.service.mono.context.properties.PropertyNames.LEDGER_NFT_TRANSFERS_MAX_LEN; +import static com.hedera.node.app.service.mono.context.properties.PropertyNames.LEDGER_TOKEN_TRANSFERS_MAX_LEN; +import static com.hedera.node.app.service.mono.context.properties.PropertyNames.LEDGER_TRANSFERS_MAX_LEN; +import static com.hedera.node.app.service.mono.context.properties.PropertyNames.TOKENS_NFTS_ARE_ENABLED; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; +import static com.hedera.node.app.service.token.impl.test.handlers.transfer.AccountAmountUtils.aaWith; +import static com.hedera.node.app.service.token.impl.test.handlers.transfer.AccountAmountUtils.nftTransferWith; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.mock; + +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenTransferList; +import com.hedera.hapi.node.base.TransferList; +import com.hedera.hapi.node.token.CryptoTransferTransactionBody; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.handlers.CryptoTransferHandler; +import com.hedera.node.app.service.token.impl.records.CryptoCreateRecordBuilder; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import com.swirlds.config.api.Configuration; +import com.swirlds.test.framework.config.TestConfigBuilder; +import java.util.List; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class CryptoTransferHandlerTest extends CryptoTransferHandlerTestBase { + private static final TokenID TOKEN_1357 = asToken(1357); + private static final TokenID TOKEN_9191 = asToken(9191); + + private Configuration config; + + @BeforeEach + public void setUp() { + super.setUp(); + subject = new CryptoTransferHandler(validator); + } + + @SuppressWarnings("DataFlowIssue") + @Test + void handleNullArgs() { + Assertions.assertThatThrownBy(() -> subject.handle(null)).isInstanceOf(NullPointerException.class); + } + + @Test + void handleExceedsMaxHbarTransfers() { + config = defaultConfig().withValue(LEDGER_TRANSFERS_MAX_LEN, 1).getOrCreateConfig(); + final var txn = newCryptoTransfer(ACCT_3333_MINUS_10, ACCT_4444_PLUS_10); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(TRANSFER_LIST_SIZE_LIMIT_EXCEEDED)); + } + + @Test + void handleHbarAllowancePresentButAllowancesDisabled() { + config = defaultConfig().withValue(HEDERA_ALLOWANCES_IS_ENABLED, false).getOrCreateConfig(); + final var txn = newCryptoTransfer( + ACCT_3333_MINUS_10.copyBuilder().isApproval(true).build(), ACCT_4444_PLUS_10); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void handleExceedsMaxFungibleTokenTransfersInSingleTokenTransferList() { + config = defaultConfig().withValue(LEDGER_TOKEN_TRANSFERS_MAX_LEN, 1).getOrCreateConfig(); + // Here we configure a SINGLE TokenTransferList that has 2 fungible token transfers + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + .transfers(ACCT_4444_MINUS_5, ACCT_3333_PLUS_5) + .build()); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_TRANSFER_LIST_SIZE_LIMIT_EXCEEDED)); + } + + @Test + void handleExceedsMaxFungibleTokenTransfersAcrossMultipleTokenTransferLists() { + config = defaultConfig().withValue(LEDGER_TOKEN_TRANSFERS_MAX_LEN, 4).getOrCreateConfig(); + // Here we configure MULTIPLE TokenTransferList objects, each with a fungible token transfer credit and debit + final var txn = newCryptoTransfer( + TokenTransferList.newBuilder() + .token(TOKEN_1357) + .transfers(ACCT_3333_MINUS_10, ACCT_4444_PLUS_10) + .build(), + TokenTransferList.newBuilder() + .token(TOKEN_2468) + .transfers(ACCT_4444_MINUS_5, ACCT_3333_PLUS_5) + .build(), + TokenTransferList.newBuilder() + .token(TOKEN_9191) + .transfers(ACCT_3333_MINUS_10, ACCT_4444_PLUS_10) + .build()); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_TRANSFER_LIST_SIZE_LIMIT_EXCEEDED)); + } + + @Test + void handleHasNftTransfersButNftsNotEnabled() { + config = defaultConfig().withValue(TOKENS_NFTS_ARE_ENABLED, false).getOrCreateConfig(); + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + .nftTransfers(SERIAL_1_FROM_3333_TO_4444) + .build()); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void handleExceedsMaxNftTransfersInSingleTokenTransferList() { + config = defaultConfig().withValue(LEDGER_NFT_TRANSFERS_MAX_LEN, 1).getOrCreateConfig(); + // Here we configure a SINGLE TokenTransferList that has 2 nft transfers + + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + .nftTransfers(SERIAL_1_FROM_3333_TO_4444, SERIAL_2_FROM_4444_TO_3333) + .build()); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(BATCH_SIZE_LIMIT_EXCEEDED)); + } + + @Test + void handleExceedsMaxNftTransfersAcrossMultipleTokenTransferLists() { + config = defaultConfig().withValue(LEDGER_NFT_TRANSFERS_MAX_LEN, 1).getOrCreateConfig(); + // Here we configure TWO TokenTransferList objects that each have a single nft transfer + final var txn = newCryptoTransfer( + TokenTransferList.newBuilder() + .token(TOKEN_2468) + .nftTransfers(SERIAL_1_FROM_3333_TO_4444) + .build(), + TokenTransferList.newBuilder() + .token(TOKEN_1357) + .nftTransfers(SERIAL_2_FROM_4444_TO_3333) + .build()); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(BATCH_SIZE_LIMIT_EXCEEDED)); + } + + @Test + void handleFungibleTokenAllowancePresentButAllowancesDisabled() { + config = defaultConfig().withValue(HEDERA_ALLOWANCES_IS_ENABLED, false).getOrCreateConfig(); + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + .transfers(ACCT_4444_PLUS_10.copyBuilder().isApproval(true).build()) + .build()); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void handleNftAllowancePresentButAllowancesDisabled() { + config = defaultConfig().withValue(HEDERA_ALLOWANCES_IS_ENABLED, false).getOrCreateConfig(); + final var txn = newCryptoTransfer(TokenTransferList.newBuilder() + .token(TOKEN_2468) + .nftTransfers(SERIAL_1_FROM_3333_TO_4444 + .copyBuilder() + .isApproval(true) + .build()) + .build()); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void autoCreatesAndReplacesAliasesInOp() { + givenTxn(); + refreshWritableStores(); + givenStoresAndConfig(handleContext); + + given(handleContext.dispatchRemovableChildTransaction(any(), eq(CryptoCreateRecordBuilder.class))) + .will((invocation) -> { + final var copy = + account.copyBuilder().accountNumber(createdNumber).build(); + writableAccountStore.put(copy); + writableAliases.put(ecKeyAlias, asAccount(createdNumber)); + return recordBuilder.accountID(asAccount(createdNumber)); + }) + .will((invocation) -> { + final var copy = account.copyBuilder() + .accountNumber(createdNumber + 1) + .build(); + writableAccountStore.put(copy); + writableAliases.put(edKeyAlias, asAccount(createdNumber + 1)); + return recordBuilder.accountID(asAccount(createdNumber + 1)); + }); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + + subject.handle(handleContext); + + assertThat(writableAccountStore.modifiedAliasesInState()).hasSize(2); + assertThat(writableAccountStore.modifiedAccountsInState()).hasSize(2); + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(4); + assertThat(writableAccountStore.get(asAccount(createdNumber))).isNotNull(); + assertThat(writableAccountStore.get(asAccount(createdNumber + 1))).isNotNull(); + assertThat(writableAliases.get(ecKeyAlias).accountNum()).isEqualTo(createdNumber); + assertThat(writableAliases.get(edKeyAlias).accountNum()).isEqualTo(createdNumber + 1); + } + + @Test + void failsOnRepeatedAliasAndCorrespondingNumber() { + final var txnBody = CryptoTransferTransactionBody.newBuilder() + .transfers(TransferList.newBuilder() + .accountAmounts( + aaWith(ownerId, -2_000), + aaWith(unknownAliasedId, +1_000), + aaWith(asAccount(createdNumber), +1_000)) + .build()) + .tokenTransfers( + TokenTransferList.newBuilder() + .token(fungibleTokenId) + .transfers(List.of(aaWith(ownerId, -1_000), aaWith(unknownAliasedId1, +1_000))) + .build(), + TokenTransferList.newBuilder() + .token(nonFungibleTokenId) + .nftTransfers(nftTransferWith(ownerId, unknownAliasedId1, 1)) + .build()) + .build(); + givenTxn(txnBody); + refreshWritableStores(); + givenStoresAndConfig(handleContext); + + given(handleContext.dispatchRemovableChildTransaction(any(), eq(CryptoCreateRecordBuilder.class))) + .will((invocation) -> { + final var copy = + account.copyBuilder().accountNumber(createdNumber).build(); + writableAccountStore.put(copy); + writableAliases.put(ecKeyAlias, asAccount(createdNumber)); + return recordBuilder.accountID(asAccount(createdNumber)); + }) + .will((invocation) -> { + final var copy = account.copyBuilder() + .accountNumber(createdNumber + 1) + .build(); + writableAccountStore.put(copy); + writableAliases.put(edKeyAlias, asAccount(createdNumber + 1)); + return recordBuilder.accountID(asAccount(createdNumber + 1)); + }); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS)); + } + + @Test + void failsOnRepeatedAliasAndCorrespondingNumberInTokenTransferList() { + final var txnBody = CryptoTransferTransactionBody.newBuilder() + .transfers(TransferList.newBuilder() + .accountAmounts(aaWith(ownerId, -1_000), aaWith(unknownAliasedId, +1_000)) + .build()) + .tokenTransfers( + TokenTransferList.newBuilder() + .token(fungibleTokenId) + .transfers(List.of( + aaWith(ownerId, -2_000), + aaWith(unknownAliasedId1, +1_000), + aaWith(asAccount(createdNumber + 1), +1_000))) + .build(), + TokenTransferList.newBuilder() + .token(nonFungibleTokenId) + .nftTransfers(nftTransferWith(ownerId, unknownAliasedId1, 1)) + .build()) + .build(); + givenTxn(txnBody); + refreshWritableStores(); + givenStoresAndConfig(handleContext); + + given(handleContext.dispatchRemovableChildTransaction(any(), eq(CryptoCreateRecordBuilder.class))) + .will((invocation) -> { + final var copy = + account.copyBuilder().accountNumber(createdNumber).build(); + writableAccountStore.put(copy); + writableAliases.put(ecKeyAlias, asAccount(createdNumber)); + return recordBuilder.accountID(asAccount(createdNumber)); + }) + .will((invocation) -> { + final var copy = account.copyBuilder() + .accountNumber(createdNumber + 1) + .build(); + writableAccountStore.put(copy); + writableAliases.put(edKeyAlias, asAccount(createdNumber + 1)); + return recordBuilder.accountID(asAccount(createdNumber + 1)); + }); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS)); + } + + private HandleContext mockContext(final TransactionBody txn) { + final var context = mock(HandleContext.class); + given(context.configuration()).willReturn(config); + given(context.body()).willReturn(txn); + return context; + } + + private static TestConfigBuilder defaultConfig() { + return HederaTestConfigBuilder.create() + .withValue(LEDGER_TRANSFERS_MAX_LEN, 10) + .withValue(LEDGER_TOKEN_TRANSFERS_MAX_LEN, 10) + .withValue(TOKENS_NFTS_ARE_ENABLED, true) + .withValue(LEDGER_NFT_TRANSFERS_MAX_LEN, 10) + .withValue(HEDERA_ALLOWANCES_IS_ENABLED, true); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerTestBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerTestBase.java new file mode 100644 index 000000000000..fea7b70a7b6a --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerTestBase.java @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; + +import com.hedera.hapi.node.base.AccountAmount; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.NftTransfer; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenTransferList; +import com.hedera.hapi.node.base.TransactionID; +import com.hedera.hapi.node.base.TransferList; +import com.hedera.hapi.node.token.CryptoTransferTransactionBody; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.service.token.impl.handlers.CryptoTransferHandler; +import com.hedera.node.app.service.token.impl.test.handlers.transfer.StepsBase; +import com.hedera.node.app.service.token.impl.validators.CryptoTransferValidator; +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; + +class CryptoTransferHandlerTestBase extends StepsBase { + protected static final AccountID ACCOUNT_3333 = asAccount(3333); + protected static final AccountID ACCOUNT_4444 = asAccount(4444); + protected static final TokenID TOKEN_2468 = asToken(2468); + + protected static final AccountAmount ACCT_3333_MINUS_10 = + AccountAmount.newBuilder().accountID(ACCOUNT_3333).amount(-10).build(); + protected static final AccountAmount ACCT_4444_MINUS_5 = + AccountAmount.newBuilder().accountID(ACCOUNT_4444).amount(-5).build(); + protected static final AccountAmount ACCT_3333_PLUS_5 = + AccountAmount.newBuilder().accountID(ACCOUNT_3333).amount(5).build(); + protected static final AccountAmount ACCT_4444_PLUS_10 = + AccountAmount.newBuilder().accountID(ACCOUNT_4444).amount(10).build(); + protected static final NftTransfer SERIAL_1_FROM_3333_TO_4444 = NftTransfer.newBuilder() + .serialNumber(1) + .senderAccountID(ACCOUNT_3333) + .receiverAccountID(ACCOUNT_4444) + .build(); + protected static final NftTransfer SERIAL_2_FROM_4444_TO_3333 = NftTransfer.newBuilder() + .serialNumber(2) + .senderAccountID(ACCOUNT_4444) + .receiverAccountID(ACCOUNT_3333) + .build(); + + protected CryptoTransferHandler subject; + protected CryptoTransferValidator validator; + + @BeforeEach + public void setUp() { + super.setUp(); + validator = new CryptoTransferValidator(); + subject = new CryptoTransferHandler(validator); + } + + protected TransactionBody newCryptoTransfer(final AccountAmount... acctAmounts) { + return newCryptoTransfer(Arrays.stream(acctAmounts).toList(), List.of()); + } + + protected TransactionBody newCryptoTransfer(final TokenTransferList... tokenTransferLists) { + return newCryptoTransfer(List.of(), Arrays.stream(tokenTransferLists).toList()); + } + + // Note: `tokenTransferLists` can include both fungible and non-fungible token transfers + protected TransactionBody newCryptoTransfer( + final List acctAmounts, final List tokenTransferLists) { + return TransactionBody.newBuilder() + .transactionID(TransactionID.newBuilder().accountID(ACCOUNT_3333)) + .cryptoTransfer(CryptoTransferTransactionBody.newBuilder() + .transfers(TransferList.newBuilder().accountAmounts(acctAmounts)) + .tokenTransfers(tokenTransferLists)) + .build(); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoUpdateHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoUpdateHandlerTest.java index 4f9d4e3b14a9..3fbcd3438609 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoUpdateHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoUpdateHandlerTest.java @@ -41,6 +41,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.mock; import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Duration; @@ -63,6 +64,7 @@ import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoHandlerTestBase; import com.hedera.node.app.service.token.impl.validators.StakingValidator; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; +import com.hedera.node.app.spi.info.NetworkInfo; import com.hedera.node.app.spi.info.NodeInfo; import com.hedera.node.app.spi.validation.AttributeValidator; import com.hedera.node.app.spi.validation.ExpiryValidator; @@ -90,7 +92,7 @@ class CryptoUpdateHandlerTest extends CryptoHandlerTestBase { private HandleContext handleContext; @Mock - private NodeInfo nodeInfo; + private NetworkInfo networkInfo; @Mock(strictness = Strictness.LENIENT) private LongSupplier consensusSecondNow; @@ -135,7 +137,7 @@ public void setUp() { expiryValidator = new StandardizedExpiryValidator( System.out::println, attributeValidator, consensusSecondNow, hederaNumbers, configProvider); stakingValidator = new StakingValidator(); - subject = new CryptoUpdateHandler(waivers, stakingValidator, nodeInfo); + subject = new CryptoUpdateHandler(waivers, stakingValidator, networkInfo); } @Test @@ -224,7 +226,7 @@ void updatesStakedAccountNumberIfPresentAndEnabled() { @Test void updatesStakedNodeNumberIfPresentAndEnabled() { - given(nodeInfo.isValidId(anyLong())).willReturn(true); + given(networkInfo.nodeInfo(anyLong())).willReturn(mock(NodeInfo.class)); final var txn = new CryptoUpdateBuilder().withStakedNodeId(0).build(); givenTxnWith(txn); @@ -272,7 +274,7 @@ void doesntThrowStakedNodeIdProvidedIfValid() { final var txn = new CryptoUpdateBuilder().withStakedNodeId(3).build(); givenTxnWith(txn); - given(nodeInfo.isValidId(3)).willReturn(true); + given(networkInfo.nodeInfo(3)).willReturn(mock(NodeInfo.class)); assertEquals(0, writableStore.get(updateAccountId).stakedNumber()); subject.handle(handleContext); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAccountWipeHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAccountWipeHandlerTest.java index fc848fb7afcb..8dd077861d20 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAccountWipeHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAccountWipeHandlerTest.java @@ -603,7 +603,7 @@ void nftNotOwnedByAccount() { writableTokenRelStore = newWritableStoreWithTokenRels(newAccount4680Token531Rel(0)); writableNftStore = newWritableStoreWithNfts(Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(1) .build()) .ownerId(TREASURY_ACCOUNT_9876) @@ -694,28 +694,28 @@ void nftSerialsWipedWithLeftoverNftSerials() { writableNftStore = newWritableStoreWithNfts( Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(1) .build()) // do not set ownerId - default to null, meaning treasury owns this NFT .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(2) .build()) .ownerId(ACCOUNT_4680) .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(3) .build()) .ownerId(ACCOUNT_4680) .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(4) .build()) .ownerId(ACCOUNT_4680) @@ -739,14 +739,14 @@ void nftSerialsWipedWithLeftoverNftSerials() { final var tokenRel = writableTokenRelStore.get(ACCOUNT_4680, TOKEN_531); Assertions.assertThat(tokenRel.balance()).isEqualTo(1); // Verify the treasury's NFT wasn't removed - Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 1))) + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531, 1))) .isNotNull(); // Verify that two of the account's NFTs were removed, and that the final one remains - Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 2))) + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531, 2))) .isNull(); - Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 3))) + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531, 3))) .isNull(); - Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 4))) + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531, 4))) .isNotNull(); } @@ -773,28 +773,28 @@ void nftSerialsWipedWithNoLeftoverNftSerials() { writableNftStore = newWritableStoreWithNfts( Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(1) .build()) // do not set ownerId - default to null, meaning treasury owns this NFT .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(2) .build()) .ownerId(ACCOUNT_4680) .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(3) .build()) .ownerId(ACCOUNT_4680) .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(4) .build()) .ownerId(ACCOUNT_4680) @@ -818,14 +818,14 @@ void nftSerialsWipedWithNoLeftoverNftSerials() { final var tokenRel = writableTokenRelStore.get(ACCOUNT_4680, TOKEN_531); Assertions.assertThat(tokenRel.balance()).isZero(); // Verify the treasury's NFT wasn't removed - Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 1))) + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531, 1))) .isNotNull(); // Verify that the account's NFTs were removed - Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 2))) + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531, 2))) .isNull(); - Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 3))) + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531, 3))) .isNull(); - Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 4))) + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531, 4))) .isNull(); } @@ -853,28 +853,28 @@ void duplicateNftSerials() { writableNftStore = newWritableStoreWithNfts( Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(1) .build()) // do not set ownerId - default to null, meaning treasury owns this NFT .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(2) .build()) .ownerId(ACCOUNT_4680) .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(3) .build()) .ownerId(ACCOUNT_4680) .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .serialNumber(4) .build()) .ownerId(ACCOUNT_4680) @@ -898,14 +898,14 @@ void duplicateNftSerials() { final var tokenRel = writableTokenRelStore.get(ACCOUNT_4680, TOKEN_531); Assertions.assertThat(tokenRel.balance()).isZero(); // Verify the treasury's NFT wasn't removed - Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 1))) + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531, 1))) .isNotNull(); // Verify that the account's NFTs were removed - Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 2))) + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531, 2))) .isNull(); - Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 3))) + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531, 3))) .isNull(); - Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 4))) + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531, 4))) .isNull(); } @@ -919,9 +919,9 @@ private Token newNftToken531(final long totalSupply) { private Token newToken531(final TokenType type, final long totalSupply) { return Token.newBuilder() - .tokenNumber(TOKEN_531.tokenNum()) + .tokenId(TOKEN_531) .tokenType(type) - .treasuryAccountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .treasuryAccountId(TREASURY_ACCOUNT_9876) .wipeKey(TOKEN_WIPE_KT.asPbjKey()) .totalSupply(totalSupply) .build(); @@ -936,9 +936,7 @@ private TokenRelation newAccount4680Token531Rel(final long balance) { } private TokenRelation newToken531Rel(final AccountID accountId, final long balance) { - final var builder = TokenRelation.newBuilder() - .accountNumber(accountId.accountNumOrThrow()) - .tokenNumber(TOKEN_531.tokenNum()); + final var builder = TokenRelation.newBuilder().accountId(accountId).tokenId(TOKEN_531); if (balance > 0) builder.balance(balance); return builder.build(); } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAssociateToAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAssociateToAccountHandlerTest.java index 4c21fe06bdca..dab5255fc29e 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAssociateToAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAssociateToAccountHandlerTest.java @@ -22,7 +22,6 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_ID_REPEATED_IN_TOKEN_LIST; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_PAUSED; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_WAS_DELETED; -import static com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbj; import static com.hedera.node.app.service.mono.pbj.PbjConverter.toPbj; import static com.hedera.node.app.spi.fixtures.Assertions.assertThrowsPreCheck; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; @@ -52,11 +51,11 @@ import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.base.TransactionID; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.hapi.node.token.TokenAssociateTransactionBody; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; @@ -293,27 +292,30 @@ void tokensAssociateToAccountWithNoTokenRels() { subject.handle(context); Assertions.assertThat(writableTokenRelStore.modifiedTokens()) .contains( - EntityNumPair.fromAccountTokenRel(fromPbj(newAcctId), KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY), - EntityNumPair.fromAccountTokenRel(fromPbj(newAcctId), KNOWN_TOKEN_WITH_WIPE)); + EntityIDPair.newBuilder() + .accountId(newAcctId) + .tokenId(toPbj(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY)) + .build(), + EntityIDPair.newBuilder() + .accountId(newAcctId) + .tokenId(toPbj(KNOWN_TOKEN_WITH_WIPE)) + .build()); final var headToken = TokenID.newBuilder() .tokenNum(writableAccountStore.getAccountById(newAcctId).headTokenNumber()) .build(); final var headTokenRel = writableTokenRelStore.get(newAcctId, headToken); Assertions.assertThat(headTokenRel.frozen()).isFalse(); Assertions.assertThat(headTokenRel.kycGranted()).isFalse(); - Assertions.assertThat(headTokenRel.previousToken()).isNotPositive(); - Assertions.assertThat(headTokenRel.tokenNumber()) - .isEqualTo(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY.getTokenNum()); - Assertions.assertThat(headTokenRel.nextToken()).isEqualTo(KNOWN_TOKEN_WITH_WIPE.getTokenNum()); - final var nextToHeadTokenRel = writableTokenRelStore.get( - newAcctId, - TokenID.newBuilder().tokenNum(headTokenRel.nextToken()).build()); + Assertions.assertThat(headTokenRel.previousToken()).isNull(); + Assertions.assertThat(headTokenRel.tokenId()).isEqualTo(toPbj(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY)); + Assertions.assertThat(headTokenRel.nextToken()).isEqualTo(toPbj(KNOWN_TOKEN_WITH_WIPE)); + final var nextToHeadTokenRel = writableTokenRelStore.get(newAcctId, headTokenRel.nextToken()); Assertions.assertThat(nextToHeadTokenRel.frozen()).isFalse(); Assertions.assertThat(nextToHeadTokenRel.kycGranted()).isFalse(); Assertions.assertThat(nextToHeadTokenRel.previousToken()) - .isEqualTo(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY.getTokenNum()); - Assertions.assertThat(nextToHeadTokenRel.tokenNumber()).isEqualTo(KNOWN_TOKEN_WITH_WIPE.getTokenNum()); - Assertions.assertThat(nextToHeadTokenRel.nextToken()).isNotPositive(); + .isEqualTo(toPbj(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY)); + Assertions.assertThat(nextToHeadTokenRel.tokenId()).isEqualTo(toPbj(KNOWN_TOKEN_WITH_WIPE)); + Assertions.assertThat(nextToHeadTokenRel.nextToken()).isNull(); } @Test @@ -329,15 +331,15 @@ void tokensAssociateToAccountWithExistingTokenRels() { // put the pre-existing token rels into the rel store writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(newAcctNum) - .tokenNumber(KNOWN_TOKEN_WITH_WIPE.getTokenNum()) - .nextToken(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY.getTokenNum()) + .accountId(newAcctId) + .tokenId(toPbj(KNOWN_TOKEN_WITH_WIPE)) + .nextToken(toPbj(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY)) .balance(100) .build()); writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(newAcctNum) - .tokenNumber(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY.getTokenNum()) - .previousToken(KNOWN_TOKEN_WITH_WIPE.getTokenNum()) + .accountId(newAcctId) + .tokenId(toPbj(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY)) + .previousToken(toPbj(KNOWN_TOKEN_WITH_WIPE)) .balance(200) .build()); @@ -357,50 +359,48 @@ void tokensAssociateToAccountWithExistingTokenRels() { Assertions.assertThat(writableTokenRelStore.modifiedTokens()) .contains( - EntityNumPair.fromAccountTokenRel(fromPbj(newAcctId), KNOWN_TOKEN_WITH_FREEZE), - EntityNumPair.fromAccountTokenRel(fromPbj(newAcctId), KNOWN_TOKEN_WITH_KYC)); + EntityIDPair.newBuilder() + .accountId(newAcctId) + .tokenId(toPbj(KNOWN_TOKEN_WITH_FREEZE)) + .build(), + EntityIDPair.newBuilder() + .accountId(newAcctId) + .tokenId(toPbj(KNOWN_TOKEN_WITH_KYC)) + .build()); final var headTokenId = TokenID.newBuilder() .tokenNum(writableAccountStore.getAccountById(newAcctId).headTokenNumber()) .build(); final var headTokenRel = writableTokenRelStore.get(newAcctId, headTokenId); - Assertions.assertThat(headTokenRel.previousToken()).isNotPositive(); - Assertions.assertThat(headTokenRel.tokenNumber()).isEqualTo(KNOWN_TOKEN_WITH_FREEZE.getTokenNum()); - Assertions.assertThat(headTokenRel.nextToken()).isEqualTo(KNOWN_TOKEN_WITH_KYC.getTokenNum()); + Assertions.assertThat(headTokenRel.previousToken()).isNull(); + Assertions.assertThat(headTokenRel.tokenId()).isEqualTo(toPbj(KNOWN_TOKEN_WITH_FREEZE)); + Assertions.assertThat(headTokenRel.nextToken()).isEqualTo(toPbj(KNOWN_TOKEN_WITH_KYC)); Assertions.assertThat(headTokenRel.frozen()).isTrue(); Assertions.assertThat(headTokenRel.kycGranted()).isFalse(); Assertions.assertThat(headTokenRel.automaticAssociation()).isFalse(); - final var nextToHeadTokenRel = writableTokenRelStore.get( - newAcctId, - TokenID.newBuilder().tokenNum(headTokenRel.nextToken()).build()); - Assertions.assertThat(nextToHeadTokenRel.previousToken()).isEqualTo(KNOWN_TOKEN_WITH_FREEZE.getTokenNum()); - Assertions.assertThat(nextToHeadTokenRel.tokenNumber()).isEqualTo(KNOWN_TOKEN_WITH_KYC.getTokenNum()); - Assertions.assertThat(nextToHeadTokenRel.nextToken()).isEqualTo(KNOWN_TOKEN_WITH_WIPE.getTokenNum()); + final var nextToHeadTokenRel = writableTokenRelStore.get(newAcctId, headTokenRel.nextToken()); + Assertions.assertThat(nextToHeadTokenRel.previousToken().tokenNum()) + .isEqualTo(KNOWN_TOKEN_WITH_FREEZE.getTokenNum()); + Assertions.assertThat(nextToHeadTokenRel.tokenId()).isEqualTo(toPbj(KNOWN_TOKEN_WITH_KYC)); + Assertions.assertThat(nextToHeadTokenRel.nextToken()).isEqualTo(toPbj(KNOWN_TOKEN_WITH_WIPE)); Assertions.assertThat(nextToHeadTokenRel.frozen()).isFalse(); // Note: this token doesn't actually have a KYC key even though its name implies that // it does Assertions.assertThat(nextToHeadTokenRel.kycGranted()).isFalse(); Assertions.assertThat(nextToHeadTokenRel.automaticAssociation()).isFalse(); - final var thirdTokenRel = writableTokenRelStore.get( - newAcctId, - TokenID.newBuilder() - .tokenNum(nextToHeadTokenRel.nextToken()) - .build()); - Assertions.assertThat(thirdTokenRel.previousToken()).isEqualTo(KNOWN_TOKEN_WITH_KYC.getTokenNum()); - Assertions.assertThat(thirdTokenRel.tokenNumber()).isEqualTo(KNOWN_TOKEN_WITH_WIPE.getTokenNum()); - Assertions.assertThat(thirdTokenRel.nextToken()).isEqualTo(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY.getTokenNum()); + final var thirdTokenRel = writableTokenRelStore.get(newAcctId, nextToHeadTokenRel.nextToken()); + Assertions.assertThat(thirdTokenRel.previousToken()).isEqualTo(toPbj(KNOWN_TOKEN_WITH_KYC)); + Assertions.assertThat(thirdTokenRel.tokenId()).isEqualTo(toPbj(KNOWN_TOKEN_WITH_WIPE)); + Assertions.assertThat(thirdTokenRel.nextToken()).isEqualTo(toPbj(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY)); Assertions.assertThat(thirdTokenRel.frozen()).isFalse(); Assertions.assertThat(thirdTokenRel.kycGranted()).isFalse(); Assertions.assertThat(thirdTokenRel.automaticAssociation()).isFalse(); - final var fourthTokenRel = writableTokenRelStore.get( - newAcctId, - TokenID.newBuilder().tokenNum(thirdTokenRel.nextToken()).build()); - Assertions.assertThat(fourthTokenRel.previousToken()).isEqualTo(KNOWN_TOKEN_WITH_WIPE.getTokenNum()); - Assertions.assertThat(fourthTokenRel.tokenNumber()) - .isEqualTo(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY.getTokenNum()); - Assertions.assertThat(fourthTokenRel.nextToken()).isNotPositive(); + final var fourthTokenRel = writableTokenRelStore.get(newAcctId, thirdTokenRel.nextToken()); + Assertions.assertThat(fourthTokenRel.previousToken()).isEqualTo(toPbj(KNOWN_TOKEN_WITH_WIPE)); + Assertions.assertThat(fourthTokenRel.tokenId()).isEqualTo(toPbj(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY)); + Assertions.assertThat(fourthTokenRel.nextToken()).isNull(); Assertions.assertThat(fourthTokenRel.frozen()).isFalse(); Assertions.assertThat(fourthTokenRel.kycGranted()).isFalse(); Assertions.assertThat(fourthTokenRel.automaticAssociation()).isFalse(); @@ -433,8 +433,14 @@ void missingAccountHeadTokenDoesntStopTokenAssociation() { Assertions.assertThat(writableTokenRelStore.modifiedTokens()) .contains( - EntityNumPair.fromAccountTokenRel(fromPbj(newAcctId), KNOWN_TOKEN_WITH_FREEZE), - EntityNumPair.fromAccountTokenRel(fromPbj(newAcctId), KNOWN_TOKEN_WITH_KYC)); + EntityIDPair.newBuilder() + .accountId(newAcctId) + .tokenId(toPbj(KNOWN_TOKEN_WITH_FREEZE)) + .build(), + EntityIDPair.newBuilder() + .accountId(newAcctId) + .tokenId(toPbj(KNOWN_TOKEN_WITH_KYC)) + .build()); final var updatedAcct = writableAccountStore.getAccountById(newAcctId); Assertions.assertThat(updatedAcct).isNotNull(); // The account's updated head token num will point to the first new token diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java index 0da60c795d20..6467f4f18b8b 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java @@ -243,9 +243,9 @@ void tokenIdNotFound() { void tokenIsDeleted() { writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.FUNGIBLE_COMMON) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .deleted(true) // Intentionally deleted .build()); @@ -262,9 +262,9 @@ void tokenIsDeleted() { void tokenIsPaused() { writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .paused(true) // Intentionally paused .build()); @@ -281,15 +281,15 @@ void tokenDoesntHaveSupplyKey() { final var totalFungibleSupply = 5; writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.FUNGIBLE_COMMON) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey((Key) null) // Intentionally missing supply key .totalSupply(totalFungibleSupply) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(totalFungibleSupply) .build()); final var txn = newBurnTxn(TOKEN_123, totalFungibleSupply + 1); @@ -304,9 +304,9 @@ void tokenDoesntHaveSupplyKey() { void tokenTreasuryRelDoesntExist() { writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .build()); // Intentionally has no token rels: @@ -341,15 +341,15 @@ void fungibleTokenTreasuryAccountDoesntExist() { // Intentionally has no treasury account: writableAccountStore = newWritableStoreWithAccounts(); writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.FUNGIBLE_COMMON) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(10) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(10) .build()); final var txn = newBurnTxn(TOKEN_123, 10); @@ -365,15 +365,15 @@ void fungibleAmountExceedsSupply() { final var totalFungibleSupply = 5; writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.FUNGIBLE_COMMON) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(totalFungibleSupply) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(totalFungibleSupply) .build()); final var txn = newBurnTxn(TOKEN_123, totalFungibleSupply + 1); @@ -387,15 +387,15 @@ void fungibleAmountExceedsSupply() { @Test void fungibleAmountExceedsBalance() { writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.FUNGIBLE_COMMON) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(10) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(8) .build()); // The token treasury has a balance of 8. The token supply is 10, so a fungible amount of 9 exceed the total @@ -412,20 +412,20 @@ void fungibleAmountExceedsBalance() { @Test void fungibleAmountBurnedWithLeftoverTreasuryBalance() { writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .accountNumber(ACCOUNT_1339.accountNum()) .numberTreasuryTitles(1) .numberPositiveBalances(1) .build()); writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.FUNGIBLE_COMMON) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(10) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(9) .build()); final var txn = newBurnTxn(TOKEN_123, 8); @@ -448,20 +448,20 @@ void fungibleAmountBurnedWithLeftoverTreasuryBalance() { void fungibleAmountBurnedWithZeroTreasuryBalance() { writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .accountNumber(ACCOUNT_1339.accountNum()) .numberTreasuryTitles(1) .numberPositiveBalances(1) .build()); writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.FUNGIBLE_COMMON) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(10) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(8) .build()); final var txn = newBurnTxn(TOKEN_123, 8); @@ -518,15 +518,15 @@ void nftSerialCountExceedsBatchSize() { void invalidNftSerial() { writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(10) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .build()); writableNftStore = newWritableStoreWithNfts(); final var txn = newBurnTxn(TOKEN_123, 0, -1L); @@ -541,15 +541,15 @@ void invalidNftSerial() { void nftSerialNotFound() { writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(10) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(10) .build()); writableNftStore = new WritableNftStore(new MapWritableStates( @@ -567,15 +567,15 @@ void nftSerialNotFound() { void nftSerialNumsEmpty() { writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(10) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(10) .build()); final var txn = newBurnTxn(TOKEN_123, 0); @@ -590,22 +590,22 @@ void nftSerialNumsEmpty() { void nftNotOwnedByTreasury() { writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(10) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(10) .build()); // this owner number isn't the treasury AccountID ownerId = AccountID.newBuilder().accountNum(999).build(); writableNftStore = newWritableStoreWithNfts(Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(1L) .build()) .ownerId(ownerId) @@ -625,20 +625,20 @@ void nftTreasuryAccountDoesntExist() { // Intentionally has no treasury account: writableAccountStore = newWritableStoreWithAccounts(); writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(10) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(10) .build()); writableNftStore = newWritableStoreWithNfts(Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(1L) .build()) // do not set ownerId - default to null @@ -655,33 +655,33 @@ void nftTreasuryAccountDoesntExist() { void numNftSerialsExceedsNftSupply() { writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .accountNumber(ACCOUNT_1339.accountNum()) .numberTreasuryTitles(1) .numberPositiveBalances(1) .build()); writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(1) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(1) .build()); writableNftStore = newWritableStoreWithNfts( Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(1L) .build()) // do not set ownerId - default to null .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(2L) .build()) // do not set ownerId - default to null @@ -698,41 +698,41 @@ void numNftSerialsExceedsNftSupply() { void nftSerialsBurnedWithLeftoverTreasuryBalance() { writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .accountNumber(ACCOUNT_1339.accountNum()) .numberTreasuryTitles(1) .numberPositiveBalances(1) .numberOwnedNfts(3) .build()); writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(3) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(3) .build()); writableNftStore = newWritableStoreWithNfts( Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(1L) .build()) // do not set ownerId - default to null .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(2L) .build()) // do not set ownerId - default to null .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(3L) .build()) // do not set ownerId - default to null @@ -758,41 +758,41 @@ void nftSerialsBurnedWithLeftoverTreasuryBalance() { void nftSerialsBurnedWithNoLeftoverTreasuryBalance() { writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .accountNumber(ACCOUNT_1339.accountNum()) .numberTreasuryTitles(1) .numberPositiveBalances(1) .numberOwnedNfts(3) .build()); writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(10) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(3) .build()); writableNftStore = newWritableStoreWithNfts( Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(1L) .build()) // do not set ownerId - default to null .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(2L) .build()) // do not set ownerId - default to null .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(3L) .build()) // do not set ownerId - default to null @@ -820,40 +820,40 @@ void duplicateNftSerials() { // This is a success case, and should be identical to the case without no duplicates above writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .accountNumber(ACCOUNT_1339.accountNum()) .numberTreasuryTitles(1) .numberPositiveBalances(1) .build()); writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_1339) .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) .totalSupply(10) .build()); writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_123.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_123) .balance(3) .build()); writableNftStore = newWritableStoreWithNfts( Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(1L) .build()) // do not set ownerId - default to null .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(2L) .build()) // do not set ownerId - default to null .build(), Nft.newBuilder() .id(UniqueTokenId.newBuilder() - .tokenTypeNumber(TOKEN_123.tokenNum()) + .tokenId(TOKEN_123) .serialNumber(3L) .build()) // do not set ownerId - default to null diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandlerTest.java index ead9be63e0dc..97154834280b 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandlerTest.java @@ -146,8 +146,8 @@ void handleWorksForFungibleCreate() { assertThat(writableTokenStore.get(newTokenId)).isNotNull(); final var token = writableTokenStore.get(newTokenId); - assertThat(token.treasuryAccountNumber()).isEqualTo(treasuryId.accountNum()); - assertThat(token.tokenNumber()).isEqualTo(newTokenId.tokenNum()); + assertThat(token.treasuryAccountId()).isEqualTo(treasuryId); + assertThat(token.tokenId()).isEqualTo(newTokenId); assertThat(token.totalSupply()).isEqualTo(1000L); assertThat(token.tokenType()).isEqualTo(TokenType.FUNGIBLE_COMMON); assertThat(token.expiry()) @@ -159,7 +159,7 @@ void handleWorksForFungibleCreate() { assertThat(token.supplyKey()).isEqualTo(A_COMPLEX_KEY); assertThat(token.feeScheduleKey()).isEqualTo(A_COMPLEX_KEY); assertThat(token.autoRenewSecs()).isEqualTo(autoRenewSecs); - assertThat(token.autoRenewAccountNumber()).isEqualTo(autoRenewAccountId.accountNum()); + assertThat(token.autoRenewAccountId()).isEqualTo(autoRenewAccountId); assertThat(token.decimals()).isZero(); assertThat(token.name()).isEqualTo("TestToken"); assertThat(token.symbol()).isEqualTo("TT"); @@ -171,13 +171,13 @@ void handleWorksForFungibleCreate() { assertThat(tokenRel.balance()).isEqualTo(1000L); assertThat(tokenRel.deleted()).isFalse(); - assertThat(tokenRel.tokenNumber()).isEqualTo(newTokenId.tokenNum()); - assertThat(tokenRel.accountNumber()).isEqualTo(treasuryId.accountNum()); + assertThat(tokenRel.tokenId()).isEqualTo(newTokenId); + assertThat(tokenRel.accountId()).isEqualTo(treasuryId); assertThat(tokenRel.kycGranted()).isFalse(); assertThat(tokenRel.automaticAssociation()).isFalse(); assertThat(tokenRel.frozen()).isFalse(); - assertThat(tokenRel.nextToken()).isZero(); - assertThat(tokenRel.previousToken()).isZero(); + assertThat(tokenRel.nextToken()).isNull(); + assertThat(tokenRel.previousToken()).isNull(); } @Test @@ -200,8 +200,8 @@ void handleWorksForFungibleCreateWithSelfDenominatedToken() { assertThat(writableTokenStore.get(newTokenId)).isNotNull(); final var token = writableTokenStore.get(newTokenId); - assertThat(token.treasuryAccountNumber()).isEqualTo(treasuryId.accountNum()); - assertThat(token.tokenNumber()).isEqualTo(newTokenId.tokenNum()); + assertThat(token.treasuryAccountId()).isEqualTo(treasuryId); + assertThat(token.tokenId()).isEqualTo(newTokenId); assertThat(token.totalSupply()).isEqualTo(1000L); assertThat(token.tokenType()).isEqualTo(TokenType.FUNGIBLE_COMMON); assertThat(token.expiry()) @@ -213,7 +213,7 @@ void handleWorksForFungibleCreateWithSelfDenominatedToken() { assertThat(token.supplyKey()).isEqualTo(A_COMPLEX_KEY); assertThat(token.feeScheduleKey()).isEqualTo(A_COMPLEX_KEY); assertThat(token.autoRenewSecs()).isEqualTo(autoRenewSecs); - assertThat(token.autoRenewAccountNumber()).isEqualTo(autoRenewAccountId.accountNum()); + assertThat(token.autoRenewAccountId()).isEqualTo(autoRenewAccountId); assertThat(token.decimals()).isZero(); assertThat(token.name()).isEqualTo("TestToken"); assertThat(token.symbol()).isEqualTo("TT"); @@ -225,26 +225,26 @@ void handleWorksForFungibleCreateWithSelfDenominatedToken() { assertThat(tokenRel.balance()).isEqualTo(1000L); assertThat(tokenRel.deleted()).isFalse(); - assertThat(tokenRel.tokenNumber()).isEqualTo(newTokenId.tokenNum()); - assertThat(tokenRel.accountNumber()).isEqualTo(treasuryId.accountNum()); + assertThat(tokenRel.tokenId()).isEqualTo(newTokenId); + assertThat(tokenRel.accountId()).isEqualTo(treasuryId); assertThat(tokenRel.kycGranted()).isFalse(); assertThat(tokenRel.automaticAssociation()).isFalse(); assertThat(tokenRel.frozen()).isFalse(); - assertThat(tokenRel.nextToken()).isZero(); - assertThat(tokenRel.previousToken()).isZero(); + assertThat(tokenRel.nextToken()).isNull(); + assertThat(tokenRel.previousToken()).isNull(); assertThat(writableTokenRelStore.get(payerId, newTokenId)).isNotNull(); final var feeCollectorRel = writableTokenRelStore.get(payerId, newTokenId); assertThat(feeCollectorRel.balance()).isZero(); assertThat(feeCollectorRel.deleted()).isFalse(); - assertThat(feeCollectorRel.tokenNumber()).isEqualTo(newTokenId.tokenNum()); - assertThat(feeCollectorRel.accountNumber()).isEqualTo(payerId.accountNum()); + assertThat(feeCollectorRel.tokenId()).isEqualTo(newTokenId); + assertThat(feeCollectorRel.accountId()).isEqualTo(payerId); assertThat(feeCollectorRel.kycGranted()).isFalse(); assertThat(feeCollectorRel.automaticAssociation()).isFalse(); assertThat(feeCollectorRel.frozen()).isFalse(); - assertThat(feeCollectorRel.nextToken()).isZero(); - assertThat(feeCollectorRel.previousToken()).isZero(); + assertThat(feeCollectorRel.nextToken()).isNull(); + assertThat(feeCollectorRel.previousToken()).isNull(); } @Test @@ -278,8 +278,8 @@ void failsIfAssociationAlreadyExists() { // Just to simulate existing token association , add to store. Only for testing writableTokenRelStore.put(TokenRelation.newBuilder() - .tokenNumber(newTokenId.tokenNum()) - .accountNumber(treasuryId.accountNum()) + .tokenId(newTokenId) + .accountId(treasuryId) .balance(1000L) .build()); assertThat(writableTokenRelStore.get(treasuryId, newTokenId)).isNotNull(); @@ -337,8 +337,8 @@ void failsIfAssociationAlreadyExistsWhileAssociatingCollector() { // Just to simulate existing token association , add to store. Only for testing writableTokenRelStore.put(TokenRelation.newBuilder() - .tokenNumber(newTokenId.tokenNum()) - .accountNumber(payerId.accountNum()) + .tokenId(newTokenId) + .accountId(payerId) .balance(1000L) .build()); assertThat(writableTokenRelStore.get(payerId, newTokenId)).isNotNull(); @@ -384,8 +384,8 @@ void uniqueSupportedIfNftsEnabled() { assertThat(writableTokenStore.get(newTokenId)).isNotNull(); final var token = writableTokenStore.get(newTokenId); - assertThat(token.treasuryAccountNumber()).isEqualTo(treasuryId.accountNum()); - assertThat(token.tokenNumber()).isEqualTo(newTokenId.tokenNum()); + assertThat(token.treasuryAccountId()).isEqualTo(treasuryId); + assertThat(token.tokenId()).isEqualTo(newTokenId); assertThat(token.totalSupply()).isZero(); assertThat(token.tokenType()).isEqualTo(TokenType.NON_FUNGIBLE_UNIQUE); assertThat(token.expiry()) @@ -397,7 +397,7 @@ void uniqueSupportedIfNftsEnabled() { assertThat(token.supplyKey()).isEqualTo(A_COMPLEX_KEY); assertThat(token.feeScheduleKey()).isEqualTo(A_COMPLEX_KEY); assertThat(token.autoRenewSecs()).isEqualTo(autoRenewSecs); - assertThat(token.autoRenewAccountNumber()).isEqualTo(autoRenewAccountId.accountNum()); + assertThat(token.autoRenewAccountId()).isEqualTo(autoRenewAccountId); assertThat(token.decimals()).isZero(); assertThat(token.name()).isEqualTo("TestToken"); assertThat(token.symbol()).isEqualTo("TT"); @@ -409,13 +409,13 @@ void uniqueSupportedIfNftsEnabled() { assertThat(tokenRel.balance()).isZero(); assertThat(tokenRel.deleted()).isFalse(); - assertThat(tokenRel.tokenNumber()).isEqualTo(newTokenId.tokenNum()); - assertThat(tokenRel.accountNumber()).isEqualTo(treasuryId.accountNum()); + assertThat(tokenRel.tokenId()).isEqualTo(newTokenId); + assertThat(tokenRel.accountId()).isEqualTo(treasuryId); assertThat(tokenRel.kycGranted()).isFalse(); assertThat(tokenRel.automaticAssociation()).isFalse(); assertThat(tokenRel.frozen()).isFalse(); - assertThat(tokenRel.nextToken()).isZero(); - assertThat(tokenRel.previousToken()).isZero(); + assertThat(tokenRel.nextToken()).isNull(); + assertThat(tokenRel.previousToken()).isNull(); } @Test diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDeleteHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDeleteHandlerTest.java index da6689b7a112..cdd34f335498 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDeleteHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDeleteHandlerTest.java @@ -126,7 +126,7 @@ void rejectsNonexistingToken() { void rejectsDeletedToken() { // Create the token store with a deleted token writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_987_ID.tokenNum()) + .tokenId(TOKEN_987_ID) .deleted(true) .adminKey(DEFAULT_PAYER_KT.asPbjKey()) .build()); @@ -145,7 +145,7 @@ void rejectsDeletedToken() { void rejectsPausedToken() { // Create the token store with a paused token writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_987_ID.tokenNum()) + .tokenId(TOKEN_987_ID) .deleted(false) .paused(true) .adminKey(DEFAULT_PAYER_KT.asPbjKey()) @@ -165,7 +165,7 @@ void rejectsPausedToken() { void rejectsTokenWithoutAdminKey() { // Create the token store with a null admin key writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() - .tokenNumber(TOKEN_987_ID.tokenNum()) + .tokenId(TOKEN_987_ID) .deleted(false) .paused(false) .adminKey((Key) null) // here's the null admin key diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java index 80d3d41820fa..75c7777758ab 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java @@ -25,6 +25,7 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_ID_REPEATED_IN_TOKEN_LIST; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_PAUSED; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NOT_ASSOCIATED_TO_ACCOUNT; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; import static com.hedera.node.app.spi.fixtures.Assertions.assertThrowsPreCheck; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; import static com.hedera.test.factories.scenarios.TokenDissociateScenarios.TOKEN_DISSOCIATE_WITH_CUSTOM_PAYER_PAID_KNOWN_TARGET; @@ -58,7 +59,6 @@ import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; import com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler; -import com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler; import com.hedera.node.app.service.token.impl.handlers.TokenDissociateFromAccountHandler; import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; @@ -232,16 +232,14 @@ void rejectsNonexistingTokenRel() { @Test void rejectsPausedToken() { // Create a readable store with a paused token - final var pausedToken = Token.newBuilder() - .tokenNumber(TOKEN_555_ID.tokenNum()) - .paused(true) - .build(); + final var pausedToken = + Token.newBuilder().tokenId(TOKEN_555_ID).paused(true).build(); readableTokenStore = newReadableStoreWithTokens(pausedToken); // Create the token rel for the paused token writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_555_ID.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_555_ID) .build()); // Create the context and transaction @@ -258,15 +256,15 @@ void rejectsPausedToken() { void rejectsTreasuryAccount() { // Create a readable store that has a token with a treasury account final var tokenWithTreasury = Token.newBuilder() - .tokenNumber(TOKEN_555_ID.tokenNum()) - .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenId(TOKEN_555_ID) + .treasuryAccountId(ACCOUNT_1339) .build(); readableTokenStore = newReadableStoreWithTokens(tokenWithTreasury); // Create the token rel writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_555_ID.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_555_ID) .build()); // Create the context and transaction @@ -283,13 +281,13 @@ void rejectsTreasuryAccount() { void rejectsFrozenToken() { // Create the readable store with a token final var tokenWithTreasury = - Token.newBuilder().tokenNumber(TOKEN_555_ID.tokenNum()).build(); + Token.newBuilder().tokenId(TOKEN_555_ID).build(); readableTokenStore = newReadableStoreWithTokens(tokenWithTreasury); // Create the frozen token rel writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_555_ID.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_555_ID) .frozen(true) .build()); @@ -307,15 +305,15 @@ void rejectsFrozenToken() { void rejectsAccountThatStillOwnsNfts() { // Create the readable store with a token that still owns an NFT final var tokenWithTreasury = Token.newBuilder() - .tokenNumber(TOKEN_555_ID.tokenNum()) + .tokenId(TOKEN_555_ID) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) .build(); readableTokenStore = newReadableStoreWithTokens(tokenWithTreasury); // Create the token rel with a non-zero NFT balance writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_555_ID.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_555_ID) .balance(1L) .build()); @@ -346,16 +344,14 @@ void tokenRelForDeletedTokenIsRemoved() { writableAccountStore = newWritableStoreWithAccounts(accountWithTokenRels); // Create the readable token store with a deleted token - final var tokenWithTreasury = Token.newBuilder() - .tokenNumber(TOKEN_555_ID.tokenNum()) - .deleted(true) - .build(); + final var tokenWithTreasury = + Token.newBuilder().tokenId(TOKEN_555_ID).deleted(true).build(); readableTokenStore = newReadableStoreWithTokens(tokenWithTreasury); // Create the token rel for the deleted token writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_555_ID.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_555_ID) .build()); // Create the context and transaction @@ -398,8 +394,8 @@ void tokenRelForNonexistingTokenIsRemoved() { // Create the token rel for the nonexistent token writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_555_ID.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_555_ID) .build()); // Create the context and transaction @@ -450,23 +446,23 @@ void tokenRelAndTreasuryTokenRelAreUpdatedForFungible() { // 2. has a treasury account final var totalSupply = 3000L; final var tokenWithTreasury = Token.newBuilder() - .tokenNumber(TOKEN_555_ID.tokenNum()) + .tokenId(TOKEN_555_ID) .tokenType(TokenType.FUNGIBLE_COMMON) - .treasuryAccountNumber(ACCOUNT_2020.accountNumOrThrow()) + .treasuryAccountId(ACCOUNT_2020) .totalSupply(totalSupply) .build(); readableTokenStore = newReadableStoreWithTokens(tokenWithTreasury); // Create the token rel with a non-zero fungible balance writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_555_ID.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_555_ID) .balance(1000) .build()); // Create the treasury token rel writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_2020.accountNumOrThrow()) - .tokenNumber(TOKEN_555_ID.tokenNum()) + .accountId(ACCOUNT_2020) + .tokenId(TOKEN_555_ID) .balance(2000L) .build()); @@ -511,15 +507,13 @@ void tokenRelAndTreasuryTokenRelAreUpdatedForFungible() { @Test void multipleTokenRelsAreRemoved() { // Represents a token that won't be found - final var token444Id = BaseTokenHandler.asToken(444); + final var token444Id = asToken(444); // Represents a token that is deleted - final var token555 = Token.newBuilder() - .tokenNumber(TOKEN_555_ID.tokenNum()) - .deleted(true) - .build(); + final var token555 = + Token.newBuilder().tokenId(TOKEN_555_ID).deleted(true).build(); // Represents an active token final var token666 = Token.newBuilder() - .tokenNumber(TOKEN_666_ID.tokenNum()) + .tokenId(TOKEN_666_ID) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) .build(); @@ -539,25 +533,25 @@ void multipleTokenRelsAreRemoved() { // Create the token rel for each token writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(token444Id.tokenNum()) - .previousToken(-1) // start of the account's token list - .nextToken(TOKEN_555_ID.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(token444Id) + .previousToken(asToken(-1)) // start of the account's token list + .nextToken(TOKEN_555_ID) .balance(20) .build()); writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_555_ID.tokenNum()) - .previousToken(token444Id.tokenNum()) - .nextToken(TOKEN_666_ID.tokenNum()) + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_555_ID) + .previousToken(token444Id) + .nextToken(TOKEN_666_ID) .balance(30) .automaticAssociation(true) .build()); writableTokenRelStore.put(TokenRelation.newBuilder() - .accountNumber(ACCOUNT_1339.accountNumOrThrow()) - .tokenNumber(TOKEN_666_ID.tokenNum()) - .previousToken(TOKEN_555_ID.tokenNum()) - .nextToken(-1) // end of the account's token list + .accountId(ACCOUNT_1339) + .tokenId(TOKEN_666_ID) + .previousToken(TOKEN_555_ID) + .nextToken((TokenID) null) // end of the account's token list .build()); // Create the context and transaction diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenFeeScheduleUpdateHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenFeeScheduleUpdateHandlerTest.java index 7d8d45ffcd94..df9fe0744859 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenFeeScheduleUpdateHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenFeeScheduleUpdateHandlerTest.java @@ -30,7 +30,6 @@ import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.token.TokenFeeScheduleUpdateTransactionBody; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableTokenRelationStore; import com.hedera.node.app.service.token.impl.WritableTokenStore; @@ -92,7 +91,7 @@ void handleWorksAsExpectedForFungibleToken() { // validate after fee schedule update fixed and fractional custom fees are added to the token assertThat(writableTokenStore.modifiedTokens()).hasSize(1); - assertThat(writableTokenStore.modifiedTokens()).hasSameElementsAs(Set.of(fungibleTokenNum)); + assertThat(writableTokenStore.modifiedTokens()).hasSameElementsAs(Set.of(fungibleTokenId)); final var expectedToken = writableTokenStore.get(fungibleTokenId); assertThat(expectedToken.customFees()).hasSize(2); @@ -103,8 +102,7 @@ void handleWorksAsExpectedForFungibleToken() { @Test @DisplayName("fee schedule update works as expected for non-fungible token") void handleWorksAsExpectedForNonFungibleToken() { - final var tokenId = - TokenID.newBuilder().tokenNum(nonFungibleTokenNum.longValue()).build(); + final var tokenId = nonFungibleTokenId; txn = TransactionBody.newBuilder() .tokenFeeScheduleUpdate(TokenFeeScheduleUpdateTransactionBody.newBuilder() .tokenId(tokenId) @@ -122,7 +120,7 @@ void handleWorksAsExpectedForNonFungibleToken() { // validate after fee schedule update royalty custom fees are added to the token assertThat(writableTokenStore.modifiedTokens()).hasSize(1); - assertThat(writableTokenStore.modifiedTokens()).hasSameElementsAs(Set.of(nonFungibleTokenNum)); + assertThat(writableTokenStore.modifiedTokens()).hasSameElementsAs(Set.of(tokenId)); final var expectedToken = writableTokenStore.get(nonFungibleTokenId); assertThat(expectedToken.customFees()).hasSize(1); @@ -134,10 +132,10 @@ void handleWorksAsExpectedForNonFungibleToken() { void validatesTokenHasFeeScheduleKey() { final var tokenWithoutFeeScheduleKey = fungibleToken.copyBuilder().feeScheduleKey((Key) null).build(); - writableTokenState = MapWritableKVState.builder(TOKENS) - .value(fungibleTokenNum, tokenWithoutFeeScheduleKey) + writableTokenState = MapWritableKVState.builder(TOKENS) + .value(fungibleTokenId, tokenWithoutFeeScheduleKey) .build(); - given(writableStates.get(TOKENS)).willReturn(writableTokenState); + given(writableStates.get(TOKENS)).willReturn(writableTokenState); writableTokenStore = new WritableTokenStore(writableStates); given(context.writableStore(WritableTokenStore.class)).willReturn(writableTokenStore); @@ -150,7 +148,7 @@ void validatesTokenHasFeeScheduleKey() { @DisplayName("fee schedule update fails if token does not exist") void rejectsInvalidTokenId() { writableTokenState = emptyWritableTokenState(); - given(writableStates.get(TOKENS)).willReturn(writableTokenState); + given(writableStates.get(TOKENS)).willReturn(writableTokenState); writableTokenStore = new WritableTokenStore(writableStates); given(context.writableStore(WritableTokenStore.class)).willReturn(writableTokenStore); @@ -188,9 +186,7 @@ void failsIfTxnHasNoTokenId() { private void givenTxn() { txn = TransactionBody.newBuilder() .tokenFeeScheduleUpdate(TokenFeeScheduleUpdateTransactionBody.newBuilder() - .tokenId(TokenID.newBuilder() - .tokenNum(fungibleTokenNum.longValue()) - .build()) + .tokenId(fungibleTokenId) .customFees(customFees) .build()) .build(); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenFreezeAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenFreezeAccountHandlerTest.java index fea9514b60e5..8986fef7ff84 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenFreezeAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenFreezeAccountHandlerTest.java @@ -21,6 +21,7 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_FREEZE_KEY; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NOT_ASSOCIATED_TO_ACCOUNT; import static com.hedera.node.app.service.mono.pbj.PbjConverter.toPbj; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; import static com.hedera.test.factories.scenarios.TokenFreezeScenarios.FREEZE_WITH_NO_KEYS; import static com.hedera.test.factories.scenarios.TokenFreezeScenarios.VALID_FREEZE_WITH_EXTANT_TOKEN; @@ -247,8 +248,8 @@ void tokenRelFreezeSuccessful() { Account.newBuilder().accountNumber(accountNumber).build()); given(tokenRelStore.getForModify(ACCOUNT_13257, token)) .willReturn(TokenRelation.newBuilder() - .tokenNumber(token.tokenNum()) - .accountNumber(accountNumber) + .tokenId(token) + .accountId(ACCOUNT_13257) .build()); final var txn = newFreezeTxn(token); given(context.body()).willReturn(txn); @@ -256,8 +257,8 @@ void tokenRelFreezeSuccessful() { subject.handle(context); verify(tokenRelStore) .put(TokenRelation.newBuilder() - .tokenNumber(token.tokenNum()) - .accountNumber(accountNumber) + .tokenId(token) + .accountId(ACCOUNT_13257) .frozen(true) .build()); } @@ -280,7 +281,7 @@ private ReadableTokenStore.TokenMetadata tokenMetaWithFreezeKey() { private ReadableTokenStore.TokenMetadata tokenMetaWithFreezeKey(Key freezeKey) { return new ReadableTokenStore.TokenMetadata( - null, null, null, freezeKey, null, null, null, null, false, 25L, 2); + null, null, null, freezeKey, null, null, null, null, false, asAccount(25L), 2); } private TransactionBody newFreezeTxn(TokenID token) { diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetInfoHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetInfoHandlerTest.java index 92c6d33ab3bd..ec9b1c77c416 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetInfoHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetInfoHandlerTest.java @@ -35,7 +35,6 @@ import static org.mockito.Mock.Strictness.LENIENT; import static org.mockito.Mockito.when; -import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Duration; import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.base.QueryHeader; @@ -212,10 +211,10 @@ void getsResponseIfOkWithDefaultKey() { final var expectedInfo = getExpectInfoDefaultKeys(); fungibleToken = setFungibleTokenKeys(); - final var state = MapReadableKVState.builder(TOKENS) - .value(fungibleTokenNum, fungibleToken) + final var state = MapReadableKVState.builder(TOKENS) + .value(fungibleTokenId, fungibleToken) .build(); - given(readableStates.get(TOKENS)).willReturn(state); + given(readableStates.get(TOKENS)).willReturn(state); final var store = new ReadableTokenStoreImpl(readableStates); checkResponse(responseHeader, expectedInfo, store); @@ -229,10 +228,10 @@ void getsResponseIfOkWithDefaultStatus() { final var expectedInfo = getExpectInfoDefaultStatus(); fungibleToken = setFungibleTokenDefaultStatus(); - final var state = MapReadableKVState.builder(TOKENS) - .value(fungibleTokenNum, fungibleToken) + final var state = MapReadableKVState.builder(TOKENS) + .value(fungibleTokenId, fungibleToken) .build(); - given(readableStates.get(TOKENS)).willReturn(state); + given(readableStates.get(TOKENS)).willReturn(state); final var store = new ReadableTokenStoreImpl(readableStates); checkResponse(responseHeader, expectedInfo, store); @@ -264,7 +263,7 @@ private TokenInfo getExpectedInfo() { .symbol(fungibleToken.symbol()) .name(fungibleToken.name()) .memo(fungibleToken.memo()) - .treasury(AccountID.newBuilder().accountNum(fungibleToken.treasuryAccountNumber())) + .treasury(fungibleToken.treasuryAccountId()) .totalSupply(fungibleToken.totalSupply()) .maxSupply(fungibleToken.maxSupply()) .decimals(fungibleToken.decimals()) @@ -277,7 +276,7 @@ private TokenInfo getExpectedInfo() { .feeScheduleKey(fungibleToken.feeScheduleKey()) .pauseKey(fungibleToken.pauseKey()) .autoRenewPeriod(Duration.newBuilder().seconds(fungibleToken.autoRenewSecs())) - .autoRenewAccount(AccountID.newBuilder().accountNum(fungibleToken.autoRenewAccountNumber())) + .autoRenewAccount(fungibleToken.autoRenewAccountId()) .defaultFreezeStatus(fungibleToken.accountsFrozenByDefault() ? FROZEN : UNFROZEN) .defaultKycStatus(fungibleToken.accountsKycGrantedByDefault() ? GRANTED : REVOKED) .pauseStatus(fungibleToken.paused() ? PAUSED : UNPAUSED) diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetNftInfoHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetNftInfoHandlerTest.java index 5c8ce3d3122a..2c387a0909d7 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetNftInfoHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetNftInfoHandlerTest.java @@ -255,7 +255,8 @@ private TokenNftInfo getExpectedInfo() { return TokenNftInfo.newBuilder() .ledgerId(new BytesConverter().convert("0x03")) .nftID(NftID.newBuilder() - .tokenID(TokenID.newBuilder().tokenNum(uniqueTokenIdSl1.tokenTypeNumber())) + .tokenID(TokenID.newBuilder() + .tokenNum(uniqueTokenIdSl1.tokenId().tokenNum())) .serialNumber(uniqueTokenIdSl1.serialNumber())) .accountID(ownerId) .creationTime(consensusTimestamp) @@ -266,9 +267,7 @@ private TokenNftInfo getExpectedInfo() { private Query createTokenGetNftInfoQuery(final UniqueTokenId uniqueTokenId) { final var data = TokenGetNftInfoQuery.newBuilder() - .nftID(NftID.newBuilder() - .tokenID(TokenID.newBuilder().tokenNum(uniqueTokenId.tokenTypeNumber())) - .serialNumber(uniqueTokenId.serialNumber())) + .nftID(NftID.newBuilder().tokenID(uniqueTokenId.tokenId()).serialNumber(uniqueTokenId.serialNumber())) .header(QueryHeader.newBuilder().build()) .build(); @@ -286,9 +285,7 @@ private Query createTokenGetNftInfoQueryInvalidTokenId(final UniqueTokenId uniqu private Query createTokenGetNftInfoQueryInvalidSerialNum(final UniqueTokenId uniqueTokenId) { final var data = TokenGetNftInfoQuery.newBuilder() - .nftID(NftID.newBuilder() - .tokenID(TokenID.newBuilder().tokenNum(uniqueTokenId.tokenTypeNumber())) - .serialNumber(-1L)) + .nftID(NftID.newBuilder().tokenID(uniqueTokenId.tokenId()).serialNumber(-1L)) .header(QueryHeader.newBuilder().build()) .build(); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGrantKycToAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGrantKycToAccountHandlerTest.java index ff915b4fd845..d5805a59c9be 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGrantKycToAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGrantKycToAccountHandlerTest.java @@ -35,6 +35,7 @@ import static org.mockito.Mock.Strictness.LENIENT; import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.base.TokenSupplyType; import com.hedera.hapi.node.base.TokenType; import com.hedera.hapi.node.base.TransactionID; @@ -42,7 +43,6 @@ import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.hapi.node.token.TokenGrantKycTransactionBody; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.ReadableTokenStoreImpl; @@ -122,12 +122,14 @@ void txnHasNoAccount() throws PreCheckException { private ReadableTokenStore mockKnownKycTokenStore() { final var tokenNum = KNOWN_TOKEN_WITH_KYC.getTokenNum(); final var storedToken = new Token( - tokenNum, + TokenID.newBuilder() + .tokenNum(KNOWN_TOKEN_WITH_KYC.getTokenNum()) + .build(), "Test_KnownKycToken" + System.currentTimeMillis(), "KYC", 10, 10, - treasury.accountNumOrThrow(), + treasury, null, TOKEN_KYC_KT.asPbjKey(), null, @@ -139,7 +141,7 @@ private ReadableTokenStore mockKnownKycTokenStore() { false, TokenType.FUNGIBLE_COMMON, TokenSupplyType.INFINITE, - -1, + AccountID.newBuilder().accountNum(-1).build(), autoRenewSecs, expirationTime, memo, @@ -148,10 +150,10 @@ private ReadableTokenStore mockKnownKycTokenStore() { false, false, Collections.emptyList()); - final var readableState = MapReadableKVState.builder(TOKENS) - .value(EntityNum.fromLong(tokenNum), storedToken) + final var readableState = MapReadableKVState.builder(TOKENS) + .value(TokenID.newBuilder().tokenNum(tokenNum).build(), storedToken) .build(); - given(readableStates.get(TOKENS)).willReturn(readableState); + given(readableStates.get(TOKENS)).willReturn(readableState); return new ReadableTokenStoreImpl(readableStates); } @@ -229,9 +231,7 @@ void kycGrantedAndPersisted() { } private TokenRelation.Builder newTokenRelationBuilder() { - return TokenRelation.newBuilder() - .tokenNumber(token.tokenNumber()) - .accountNumber(payerId.accountNumOrThrow()); + return TokenRelation.newBuilder().tokenId(token.tokenId()).accountId(payerId); } private TransactionBody newTxnBody(final boolean tokenPresent, final boolean accountPresent) { diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenPauseHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenPauseHandlerTest.java index 67511c25e459..c01db3eaa37d 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenPauseHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenPauseHandlerTest.java @@ -33,7 +33,6 @@ import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.token.TokenPauseTransactionBody; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.ReadableTokenStoreImpl; @@ -140,10 +139,10 @@ void preHandleSetsStatusWhenTokenMissing() throws PreCheckException { @Test void doesntAddAnyKeyIfPauseKeyMissing() throws PreCheckException { final var copy = token.copyBuilder().pauseKey(Key.DEFAULT).build(); - readableTokenState = MapReadableKVState.builder(TOKENS) - .value(tokenEntityNum, copy) + readableTokenState = MapReadableKVState.builder(TOKENS) + .value(tokenId, copy) .build(); - given(readableStates.get(TOKENS)).willReturn(readableTokenState); + given(readableStates.get(TOKENS)).willReturn(readableTokenState); readableTokenStore = new ReadableTokenStoreImpl(readableStates); preHandleContext.registerStore(ReadableTokenStore.class, readableTokenStore); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenRevokeKycFromAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenRevokeKycFromAccountHandlerTest.java index b23b8987ffac..c13d08475741 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenRevokeKycFromAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenRevokeKycFromAccountHandlerTest.java @@ -212,8 +212,8 @@ void emptyGetForModifyShouldNotPersist() { @DisplayName("Valid inputs should grant KYC and commit changes") void kycRevokedAndPersisted() { final var stateTokenRel = newTokenRelationBuilder() - .tokenNumber(TOKEN_10.tokenNum()) - .accountNumber(ACCOUNT_100.accountNumOrThrow()) + .tokenId(TOKEN_10) + .accountId(ACCOUNT_100) .kycGranted(true) .build(); given(tokenRelStore.getForModify(ACCOUNT_100, TOKEN_10)).willReturn(stateTokenRel); @@ -228,9 +228,7 @@ void kycRevokedAndPersisted() { } private TokenRelation.Builder newTokenRelationBuilder() { - return TokenRelation.newBuilder() - .tokenNumber(TOKEN_10.tokenNum()) - .accountNumber(ACCOUNT_100.accountNumOrThrow()); + return TokenRelation.newBuilder().tokenId(TOKEN_10).accountId(ACCOUNT_100); } private TransactionBody newTxnBody() { diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUnfreezeAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUnfreezeAccountHandlerTest.java index af3e40b3324a..be2f933c43f7 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUnfreezeAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUnfreezeAccountHandlerTest.java @@ -21,6 +21,7 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_FREEZE_KEY; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NOT_ASSOCIATED_TO_ACCOUNT; import static com.hedera.node.app.service.mono.pbj.PbjConverter.toPbj; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static com.hedera.node.app.service.token.impl.test.handlers.util.AdapterUtils.txnFrom; import static com.hedera.node.app.service.token.impl.test.util.MetaAssertion.basicContextAssertions; import static com.hedera.node.app.spi.fixtures.Assertions.assertThrowsPreCheck; @@ -241,8 +242,8 @@ void tokenRelUnfreezeSuccessful() { Account.newBuilder().accountNumber(accountNumber).build()); given(tokenRelStore.getForModify(ACCOUNT_13257, token)) .willReturn(TokenRelation.newBuilder() - .tokenNumber(token.tokenNum()) - .accountNumber(accountNumber) + .tokenId(token) + .accountId(ACCOUNT_13257) .build()); final var txn = newUnfreezeTxn(token); given(context.body()).willReturn(txn); @@ -250,8 +251,8 @@ void tokenRelUnfreezeSuccessful() { subject.handle(context); verify(tokenRelStore) .put(TokenRelation.newBuilder() - .tokenNumber(token.tokenNum()) - .accountNumber(accountNumber) + .tokenId(token) + .accountId(ACCOUNT_13257) .frozen(false) .build()); } @@ -274,7 +275,7 @@ private ReadableTokenStore.TokenMetadata tokenMetaWithFreezeKey() { private ReadableTokenStore.TokenMetadata tokenMetaWithFreezeKey(Key freezeKey) { return new ReadableTokenStore.TokenMetadata( - null, null, null, freezeKey, null, null, null, null, false, 25L, 2); + null, null, null, freezeKey, null, null, null, null, false, asAccount(25L), 2); } private TransactionBody newUnfreezeTxn(TokenID token) { diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUnpauseHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUnpauseHandlerTest.java index 58c7bb5b985f..e76340b30b1e 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUnpauseHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUnpauseHandlerTest.java @@ -32,7 +32,6 @@ import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.token.TokenUnpauseTransactionBody; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.ReadableTokenStoreImpl; @@ -141,10 +140,10 @@ void preHandleSetsStatusWhenTokenMissing() throws PreCheckException { @Test void doesntAddAnyKeyIfPauseKeyMissing() throws PreCheckException { final var copy = token.copyBuilder().pauseKey(Key.DEFAULT).build(); - readableTokenState = MapReadableKVState.builder(TOKENS) - .value(tokenEntityNum, copy) + readableTokenState = MapReadableKVState.builder(TOKENS) + .value(tokenId, copy) .build(); - given(readableStates.get(TOKENS)).willReturn(readableTokenState); + given(readableStates.get(TOKENS)).willReturn(readableTokenState); readableTokenStore = new ReadableTokenStoreImpl(readableStates); preHandleContext.registerStore(ReadableTokenStore.class, readableTokenStore); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUpdateHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUpdateHandlerTest.java index bbb8ed5c4a83..f9ec148b3234 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUpdateHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUpdateHandlerTest.java @@ -131,7 +131,7 @@ void happyPathForFungibleTokenUpdate() { final var token = readableTokenStore.get(fungibleTokenId); assertThat(token.symbol()).isEqualTo(fungibleToken.symbol()); assertThat(token.name()).isEqualTo(fungibleToken.name()); - assertThat(token.treasuryAccountNumber()).isEqualTo(fungibleToken.treasuryAccountNumber()); + assertThat(token.treasuryAccountId()).isEqualTo(fungibleToken.treasuryAccountId()); assertThat(token.adminKey()).isEqualTo(fungibleToken.adminKey()); assertThat(token.supplyKey()).isEqualTo(fungibleToken.supplyKey()); assertThat(token.kycKey()).isEqualTo(fungibleToken.kycKey()); @@ -139,7 +139,7 @@ void happyPathForFungibleTokenUpdate() { assertThat(token.wipeKey()).isEqualTo(fungibleToken.wipeKey()); assertThat(token.feeScheduleKey()).isEqualTo(fungibleToken.feeScheduleKey()); assertThat(token.pauseKey()).isEqualTo(fungibleToken.pauseKey()); - assertThat(token.autoRenewAccountNumber()).isEqualTo(fungibleToken.autoRenewAccountNumber()); + assertThat(token.autoRenewAccountId()).isEqualTo(fungibleToken.autoRenewAccountId()); assertThat(token.expiry()).isEqualTo(fungibleToken.expiry()); assertThat(token.memo()).isEqualTo(fungibleToken.memo()); assertThat(token.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); @@ -150,7 +150,7 @@ void happyPathForFungibleTokenUpdate() { final var modifiedToken = writableTokenStore.get(fungibleTokenId); assertThat(modifiedToken.symbol()).isEqualTo("TTT"); assertThat(modifiedToken.name()).isEqualTo("TestToken1"); - assertThat(modifiedToken.treasuryAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.treasuryAccountId()).isEqualTo(ownerId); assertThat(modifiedToken.adminKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.supplyKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.kycKey()).isEqualTo(B_COMPLEX_KEY); @@ -158,7 +158,7 @@ void happyPathForFungibleTokenUpdate() { assertThat(modifiedToken.wipeKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.feeScheduleKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.pauseKey()).isEqualTo(B_COMPLEX_KEY); - assertThat(modifiedToken.autoRenewAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.autoRenewAccountId()).isEqualTo(ownerId); assertThat(modifiedToken.expiry()).isEqualTo(1234600L); assertThat(modifiedToken.memo()).isEqualTo("test token1"); assertThat(modifiedToken.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); @@ -173,7 +173,7 @@ void happyPathForNonFungibleTokenUpdate() { final var token = readableTokenStore.get(nonFungibleTokenId); assertThat(token.symbol()).isEqualTo(nonFungibleToken.symbol()); assertThat(token.name()).isEqualTo(nonFungibleToken.name()); - assertThat(token.treasuryAccountNumber()).isEqualTo(nonFungibleToken.treasuryAccountNumber()); + assertThat(token.treasuryAccountId()).isEqualTo(nonFungibleToken.treasuryAccountId()); assertThat(token.adminKey()).isEqualTo(nonFungibleToken.adminKey()); assertThat(token.supplyKey()).isEqualTo(nonFungibleToken.supplyKey()); assertThat(token.kycKey()).isEqualTo(nonFungibleToken.kycKey()); @@ -181,7 +181,7 @@ void happyPathForNonFungibleTokenUpdate() { assertThat(token.wipeKey()).isEqualTo(nonFungibleToken.wipeKey()); assertThat(token.feeScheduleKey()).isEqualTo(nonFungibleToken.feeScheduleKey()); assertThat(token.pauseKey()).isEqualTo(nonFungibleToken.pauseKey()); - assertThat(token.autoRenewAccountNumber()).isEqualTo(nonFungibleToken.autoRenewAccountNumber()); + assertThat(token.autoRenewAccountId()).isEqualTo(nonFungibleToken.autoRenewAccountId()); assertThat(token.expiry()).isEqualTo(nonFungibleToken.expiry()); assertThat(token.memo()).isEqualTo(nonFungibleToken.memo()); assertThat(token.autoRenewSecs()).isEqualTo(nonFungibleToken.autoRenewSecs()); @@ -192,7 +192,7 @@ void happyPathForNonFungibleTokenUpdate() { final var modifiedToken = writableTokenStore.get(fungibleTokenId); assertThat(modifiedToken.symbol()).isEqualTo("TTT"); assertThat(modifiedToken.name()).isEqualTo("TestToken1"); - assertThat(modifiedToken.treasuryAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.treasuryAccountId()).isEqualTo(ownerId); assertThat(modifiedToken.adminKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.supplyKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.kycKey()).isEqualTo(B_COMPLEX_KEY); @@ -200,7 +200,7 @@ void happyPathForNonFungibleTokenUpdate() { assertThat(modifiedToken.wipeKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.feeScheduleKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.pauseKey()).isEqualTo(B_COMPLEX_KEY); - assertThat(modifiedToken.autoRenewAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.autoRenewAccountId()).isEqualTo(ownerId); assertThat(modifiedToken.expiry()).isEqualTo(1234600L); assertThat(modifiedToken.memo()).isEqualTo("test token1"); assertThat(modifiedToken.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); @@ -363,8 +363,8 @@ void worksWithUnassociatedNewTreasuryIfAutoAssociationsAvailable() { .build(); given(handleContext.body()).willReturn(txn); writableTokenRelStore.remove(TokenRelation.newBuilder() - .tokenNumber(fungibleTokenId.tokenNum()) - .accountNumber(payerId.accountNum()) + .tokenId(fungibleTokenId) + .accountId(payerId) .build()); given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); @@ -373,7 +373,7 @@ void worksWithUnassociatedNewTreasuryIfAutoAssociationsAvailable() { final var token = readableTokenStore.get(fungibleTokenId); assertThat(token.symbol()).isEqualTo(fungibleToken.symbol()); assertThat(token.name()).isEqualTo(fungibleToken.name()); - assertThat(token.treasuryAccountNumber()).isEqualTo(fungibleToken.treasuryAccountNumber()); + assertThat(token.treasuryAccountId()).isEqualTo(fungibleToken.treasuryAccountId()); assertThat(token.adminKey()).isEqualTo(fungibleToken.adminKey()); assertThat(token.supplyKey()).isEqualTo(fungibleToken.supplyKey()); assertThat(token.kycKey()).isEqualTo(fungibleToken.kycKey()); @@ -381,7 +381,7 @@ void worksWithUnassociatedNewTreasuryIfAutoAssociationsAvailable() { assertThat(token.wipeKey()).isEqualTo(fungibleToken.wipeKey()); assertThat(token.feeScheduleKey()).isEqualTo(fungibleToken.feeScheduleKey()); assertThat(token.pauseKey()).isEqualTo(fungibleToken.pauseKey()); - assertThat(token.autoRenewAccountNumber()).isEqualTo(fungibleToken.autoRenewAccountNumber()); + assertThat(token.autoRenewAccountId()).isEqualTo(fungibleToken.autoRenewAccountId()); assertThat(token.expiry()).isEqualTo(fungibleToken.expiry()); assertThat(token.memo()).isEqualTo(fungibleToken.memo()); assertThat(token.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); @@ -395,7 +395,7 @@ void worksWithUnassociatedNewTreasuryIfAutoAssociationsAvailable() { final var modifiedToken = writableTokenStore.get(fungibleTokenId); assertThat(modifiedToken.symbol()).isEqualTo("TTT"); assertThat(modifiedToken.name()).isEqualTo("TestToken1"); - assertThat(modifiedToken.treasuryAccountNumber()).isEqualTo(payerId.accountNum()); + assertThat(modifiedToken.treasuryAccountId()).isEqualTo(payerId); assertThat(modifiedToken.adminKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.supplyKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.kycKey()).isEqualTo(B_COMPLEX_KEY); @@ -403,7 +403,7 @@ void worksWithUnassociatedNewTreasuryIfAutoAssociationsAvailable() { assertThat(modifiedToken.wipeKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.feeScheduleKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.pauseKey()).isEqualTo(B_COMPLEX_KEY); - assertThat(modifiedToken.autoRenewAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.autoRenewAccountId()).isEqualTo(ownerId); assertThat(modifiedToken.expiry()).isEqualTo(1234600L); assertThat(modifiedToken.memo()).isEqualTo("test token1"); assertThat(modifiedToken.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); @@ -421,8 +421,8 @@ void worksWithUnassociatedNewTreasuryIfAutoAssociationsAvailableForNFT() { .build(); given(handleContext.body()).willReturn(txn); writableTokenRelStore.remove(TokenRelation.newBuilder() - .tokenNumber(nonFungibleTokenId.tokenNum()) - .accountNumber(payerId.accountNum()) + .tokenId(nonFungibleTokenId) + .accountId(payerId) .build()); given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); @@ -431,7 +431,7 @@ void worksWithUnassociatedNewTreasuryIfAutoAssociationsAvailableForNFT() { final var token = readableTokenStore.get(nonFungibleTokenId); assertThat(token.symbol()).isEqualTo(nonFungibleToken.symbol()); assertThat(token.name()).isEqualTo(nonFungibleToken.name()); - assertThat(token.treasuryAccountNumber()).isEqualTo(nonFungibleToken.treasuryAccountNumber()); + assertThat(token.treasuryAccountId()).isEqualTo(nonFungibleToken.treasuryAccountId()); assertThat(token.adminKey()).isEqualTo(nonFungibleToken.adminKey()); assertThat(token.supplyKey()).isEqualTo(nonFungibleToken.supplyKey()); assertThat(token.kycKey()).isEqualTo(nonFungibleToken.kycKey()); @@ -439,7 +439,7 @@ void worksWithUnassociatedNewTreasuryIfAutoAssociationsAvailableForNFT() { assertThat(token.wipeKey()).isEqualTo(nonFungibleToken.wipeKey()); assertThat(token.feeScheduleKey()).isEqualTo(nonFungibleToken.feeScheduleKey()); assertThat(token.pauseKey()).isEqualTo(nonFungibleToken.pauseKey()); - assertThat(token.autoRenewAccountNumber()).isEqualTo(nonFungibleToken.autoRenewAccountNumber()); + assertThat(token.autoRenewAccountId()).isEqualTo(nonFungibleToken.autoRenewAccountId()); assertThat(token.expiry()).isEqualTo(nonFungibleToken.expiry()); assertThat(token.memo()).isEqualTo(nonFungibleToken.memo()); assertThat(token.autoRenewSecs()).isEqualTo(nonFungibleToken.autoRenewSecs()); @@ -463,7 +463,7 @@ void worksWithUnassociatedNewTreasuryIfAutoAssociationsAvailableForNFT() { final var modifiedToken = writableTokenStore.get(nonFungibleTokenId); assertThat(modifiedToken.symbol()).isEqualTo("TTT"); assertThat(modifiedToken.name()).isEqualTo("TestToken1"); - assertThat(modifiedToken.treasuryAccountNumber()).isEqualTo(payerId.accountNum()); + assertThat(modifiedToken.treasuryAccountId()).isEqualTo(payerId); assertThat(rel.balance()).isEqualTo(1); assertThat(modifiedToken.adminKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.supplyKey()).isEqualTo(B_COMPLEX_KEY); @@ -472,7 +472,7 @@ void worksWithUnassociatedNewTreasuryIfAutoAssociationsAvailableForNFT() { assertThat(modifiedToken.wipeKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.feeScheduleKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.pauseKey()).isEqualTo(B_COMPLEX_KEY); - assertThat(modifiedToken.autoRenewAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.autoRenewAccountId()).isEqualTo(ownerId); assertThat(modifiedToken.expiry()).isEqualTo(1234600L); assertThat(modifiedToken.memo()).isEqualTo("test token1"); assertThat(modifiedToken.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); @@ -495,8 +495,8 @@ void failsIfNoAutoAssociationsAvailableForNewUnassociatedTreasury() { .build(); given(handleContext.body()).willReturn(txn); writableTokenRelStore.remove(TokenRelation.newBuilder() - .tokenNumber(fungibleTokenId.tokenNum()) - .accountNumber(payerId.accountNum()) + .tokenId(fungibleTokenId) + .accountId(payerId) .build()); writableAccountStore.put(account.copyBuilder() .maxAutoAssociations(0) @@ -533,8 +533,8 @@ void failsOnDetachedNewTreasury() { .build()); given(handleContext.body()).willReturn(txn); writableTokenRelStore.remove(TokenRelation.newBuilder() - .tokenNumber(fungibleTokenId.tokenNum()) - .accountNumber(payerId.accountNum()) + .tokenId(fungibleTokenId) + .accountId(payerId) .build()); given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); @@ -574,8 +574,8 @@ void failsOnDetachedNewAutoRenewAccount() { .build()); given(handleContext.body()).willReturn(txn); writableTokenRelStore.remove(TokenRelation.newBuilder() - .tokenNumber(fungibleTokenId.tokenNum()) - .accountNumber(payerId.accountNum()) + .tokenId(fungibleTokenId) + .accountId(payerId) .build()); given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); @@ -738,8 +738,8 @@ void followsHappyPathWithNewTreasuryAndZeroBalanceOldTreasury() { .build(); given(handleContext.body()).willReturn(txn); writableTokenRelStore.remove(TokenRelation.newBuilder() - .tokenNumber(fungibleTokenId.tokenNum()) - .accountNumber(payerId.accountNum()) + .tokenId(fungibleTokenId) + .accountId(payerId) .build()); writableAccountStore.put(account.copyBuilder().numberPositiveBalances(0).build()); given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); @@ -756,13 +756,13 @@ void followsHappyPathWithNewTreasuryAndZeroBalanceOldTreasury() { final var modifiedToken = writableTokenStore.get(fungibleTokenId); assertThat(modifiedToken.symbol()).isEqualTo("TTT"); assertThat(modifiedToken.name()).isEqualTo("TestToken1"); - assertThat(modifiedToken.treasuryAccountNumber()).isEqualTo(payerId.accountNum()); + assertThat(modifiedToken.treasuryAccountId()).isEqualTo(payerId); assertThat(modifiedToken.adminKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.supplyKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.wipeKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.feeScheduleKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.pauseKey()).isEqualTo(B_COMPLEX_KEY); - assertThat(modifiedToken.autoRenewAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.autoRenewAccountId()).isEqualTo(ownerId); assertThat(modifiedToken.expiry()).isEqualTo(1234600L); assertThat(modifiedToken.memo()).isEqualTo("test token1"); assertThat(modifiedToken.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); @@ -779,8 +779,8 @@ void doesntGrantKycOrUnfreezeNewTreasuryIfNoKeyIsPresent() { .build(); given(handleContext.body()).willReturn(txn); writableTokenRelStore.remove(TokenRelation.newBuilder() - .tokenNumber(fungibleTokenId.tokenNum()) - .accountNumber(payerId.accountNum()) + .tokenId(fungibleTokenId) + .accountId(payerId) .build()); given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); @@ -802,13 +802,13 @@ void doesntGrantKycOrUnfreezeNewTreasuryIfNoKeyIsPresent() { final var modifiedToken = writableTokenStore.get(fungibleTokenId); assertThat(modifiedToken.symbol()).isEqualTo("TTT"); assertThat(modifiedToken.name()).isEqualTo("TestToken1"); - assertThat(modifiedToken.treasuryAccountNumber()).isEqualTo(payerId.accountNum()); + assertThat(modifiedToken.treasuryAccountId()).isEqualTo(payerId); assertThat(modifiedToken.adminKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.supplyKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.wipeKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.feeScheduleKey()).isEqualTo(B_COMPLEX_KEY); assertThat(modifiedToken.pauseKey()).isEqualTo(B_COMPLEX_KEY); - assertThat(modifiedToken.autoRenewAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.autoRenewAccountId()).isEqualTo(ownerId); assertThat(modifiedToken.expiry()).isEqualTo(1234600L); assertThat(modifiedToken.memo()).isEqualTo("test token1"); assertThat(modifiedToken.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/AccountAmountUtils.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/AccountAmountUtils.java new file mode 100644 index 000000000000..213e5201e202 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/AccountAmountUtils.java @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers.transfer; + +import com.hedera.hapi.node.base.AccountAmount; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.NftTransfer; +import com.hedera.pbj.runtime.io.buffer.Bytes; + +public class AccountAmountUtils { + public static AccountAmount aaWith(AccountID account, long amount) { + return AccountAmount.newBuilder().accountID(account).amount(amount).build(); + } + + public static AccountAmount aaWithAllowance(AccountID account, long amount) { + return AccountAmount.newBuilder() + .accountID(account) + .amount(amount) + .isApproval(true) + .build(); + } + + public static AccountID asAccountWithAlias(Bytes alias) { + return AccountID.newBuilder().alias(alias).build(); + } + + public static NftTransfer nftTransferWith(AccountID from, AccountID to, long serialNo) { + return NftTransfer.newBuilder() + .senderAccountID(from) + .receiverAccountID(to) + .serialNumber(serialNo) + .build(); + } + + public static AccountAmount aaAlias(final Bytes alias, final long amount) { + return AccountAmount.newBuilder() + .amount(amount) + .accountID(AccountID.newBuilder().alias(alias).build()) + .build(); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/AutoAccountCreatorTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/AutoAccountCreatorTest.java new file mode 100644 index 000000000000..1fbedfcd7dcb --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/AutoAccountCreatorTest.java @@ -0,0 +1,151 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers.transfer; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.MAX_ENTITIES_IN_PRICE_REGIME_HAVE_BEEN_CREATED; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.BDDMockito.given; + +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.handlers.transfer.AutoAccountCreator; +import com.hedera.node.app.service.token.impl.handlers.transfer.TransferContextImpl; +import com.hedera.node.app.service.token.impl.records.CryptoCreateRecordBuilder; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class AutoAccountCreatorTest extends StepsBase { + private AutoAccountCreator subject; + + @BeforeEach + public void setUp() { + super.setUp(); + givenTxn(); + refreshWritableStores(); + givenStoresAndConfig(handleContext); + + transferContext = new TransferContextImpl(handleContext); + subject = new AutoAccountCreator(handleContext); + } + + @Test + void refusesToCreateBeyondMaxNumber() { + configuration = HederaTestConfigBuilder.create() + .withValue("accounts.maxNumber", 2) + .getOrCreateConfig(); + given(handleContext.configuration()).willReturn(configuration); + transferContext = new TransferContextImpl(handleContext); + assertThatThrownBy(() -> subject.create(alias.alias(), false)) + .isInstanceOf(HandleException.class) + .has(responseCode(MAX_ENTITIES_IN_PRICE_REGIME_HAVE_BEEN_CREATED)); + } + + @Test + // TODO: In end to end tests need to validate other fields set correctly on auto created accounts + void happyPathECKeyAliasWorks() { + given(handleContext.dispatchRemovableChildTransaction(any(), eq(CryptoCreateRecordBuilder.class))) + .will((invocation) -> { + final var copy = + account.copyBuilder().accountNumber(createdNumber).build(); + writableAccountStore.put(copy); + writableAliases.put(ecKeyAlias, asAccount(createdNumber)); + return recordBuilder.accountID(asAccount(createdNumber)); + }); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(2); + assertThat(writableAccountStore.modifiedAccountsInState()).isEmpty(); + assertThat(writableAccountStore.get(asAccount(createdNumber))).isNull(); + assertThat(writableAccountStore.get(asAccount(createdNumber + 1))).isNull(); + assertThat(writableAliases.get(ecKeyAlias)).isNull(); + + subject.create(ecKeyAlias, false); + + assertThat(writableAccountStore.modifiedAliasesInState()).hasSize(1); + assertThat(writableAccountStore.modifiedAccountsInState()).hasSize(1); + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(3); + assertThat(writableAccountStore.get(asAccount(createdNumber))).isNotNull(); + assertThat(writableAliases.get(ecKeyAlias).accountNum()).isEqualTo(createdNumber); + } + + @Test + // TODO: In end to end tests need to validate other fields set correctly on auto created accounts + void happyPathEDKeyAliasWorks() { + given(handleContext.dispatchRemovableChildTransaction(any(), eq(CryptoCreateRecordBuilder.class))) + .will((invocation) -> { + final var copy = + account.copyBuilder().accountNumber(createdNumber).build(); + writableAccountStore.put(copy); + writableAliases.put(edKeyAlias, asAccount(createdNumber)); + return recordBuilder.accountID(asAccount(createdNumber)); + }); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(2); + assertThat(writableAccountStore.modifiedAccountsInState()).isEmpty(); + assertThat(writableAccountStore.get(asAccount(createdNumber))).isNull(); + assertThat(writableAccountStore.get(asAccount(createdNumber + 1))).isNull(); + assertThat(writableAliases.get(edKeyAlias)).isNull(); + + subject.create(edKeyAlias, false); + + assertThat(writableAccountStore.modifiedAliasesInState()).hasSize(1); + assertThat(writableAccountStore.modifiedAccountsInState()).hasSize(1); + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(3); + assertThat(writableAccountStore.get(asAccount(createdNumber))).isNotNull(); + assertThat(writableAliases.get(edKeyAlias).accountNum()).isEqualTo(createdNumber); + } + + @Test + // TODO: In end to end tests need to validate other fields set on auto created accounts + void happyPathWithHollowAccountAliasInHbarTransfersWorks() { + final var address = Bytes.wrap(evmAddress); + given(handleContext.dispatchRemovableChildTransaction(any(), eq(CryptoCreateRecordBuilder.class))) + .will((invocation) -> { + final var copy = + account.copyBuilder().accountNumber(createdNumber).build(); + writableAccountStore.put(copy); + writableAliases.put(address, asAccount(createdNumber)); + return recordBuilder.accountID(asAccount(createdNumber)); + }); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(2); + assertThat(writableAccountStore.modifiedAccountsInState()).isEmpty(); + assertThat(writableAccountStore.get(asAccount(createdNumber))).isNull(); + assertThat(writableAccountStore.get(asAccount(createdNumber + 1))).isNull(); + assertThat(writableAliases.get(address)).isNull(); + + subject.create(address, false); + + assertThat(writableAccountStore.modifiedAliasesInState()).hasSize(1); + assertThat(writableAccountStore.modifiedAccountsInState()).hasSize(1); + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(3); + assertThat(writableAccountStore.get(asAccount(createdNumber))).isNotNull(); + assertThat(writableAliases.get(address).accountNum()).isEqualTo(createdNumber); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/EnsureAliasesStepTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/EnsureAliasesStepTest.java new file mode 100644 index 000000000000..cfb34d8e856b --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/EnsureAliasesStepTest.java @@ -0,0 +1,392 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers.transfer; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.test.handlers.transfer.AccountAmountUtils.aaAlias; +import static com.hedera.node.app.service.token.impl.test.handlers.transfer.AccountAmountUtils.aaWith; +import static com.hedera.node.app.service.token.impl.test.handlers.transfer.AccountAmountUtils.asAccountWithAlias; +import static com.hedera.node.app.service.token.impl.test.handlers.transfer.AccountAmountUtils.nftTransferWith; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static com.swirlds.common.utility.CommonUtils.unhex; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.BDDMockito.given; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.NftTransfer; +import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.hapi.node.base.TokenTransferList; +import com.hedera.hapi.node.base.TransferList; +import com.hedera.hapi.node.token.CryptoTransferTransactionBody; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.handlers.transfer.EnsureAliasesStep; +import com.hedera.node.app.service.token.impl.handlers.transfer.TransferContextImpl; +import com.hedera.node.app.service.token.impl.records.CryptoCreateRecordBuilder; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class EnsureAliasesStepTest extends StepsBase { + @BeforeEach + public void setUp() { + super.setUp(); + givenTxn(); + givenStoresAndConfig(handleContext); + ensureAliasesStep = new EnsureAliasesStep(body); + transferContext = new TransferContextImpl(handleContext); + } + + @Test + void autoCreatesAccounts() { + given(handleContext.dispatchRemovableChildTransaction(any(), eq(CryptoCreateRecordBuilder.class))) + .will((invocation) -> { + final var copy = + account.copyBuilder().accountNumber(createdNumber).build(); + writableAccountStore.put(copy); + writableAliases.put(ecKeyAlias, asAccount(createdNumber)); + return recordBuilder.accountID(asAccount(createdNumber)); + }) + .will((invocation) -> { + final var copy = account.copyBuilder() + .accountNumber(createdNumber + 1) + .build(); + writableAccountStore.put(copy); + writableAliases.put(edKeyAlias, asAccount(createdNumber + 1)); + return recordBuilder.accountID(asAccount(createdNumber + 1)); + }); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(2); + assertThat(writableAccountStore.modifiedAccountsInState()).isEmpty(); + assertThat(writableAccountStore.get(asAccount(createdNumber))).isNull(); + assertThat(writableAccountStore.get(asAccount(createdNumber + 1))).isNull(); + assertThat(writableAliases.get(ecKeyAlias)).isNull(); + assertThat(writableAliases.get(edKeyAlias)).isNull(); + + ensureAliasesStep.doIn(transferContext); + + assertThat(writableAccountStore.modifiedAliasesInState()).hasSize(2); + assertThat(writableAccountStore.modifiedAccountsInState()).hasSize(2); + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(4); + assertThat(writableAccountStore.get(asAccount(createdNumber))).isNotNull(); + assertThat(writableAccountStore.get(asAccount(createdNumber + 1))).isNotNull(); + assertThat(writableAliases.get(ecKeyAlias).accountNum()).isEqualTo(createdNumber); + assertThat(writableAliases.get(edKeyAlias).accountNum()).isEqualTo(createdNumber + 1); + + assertThat(transferContext.numOfAutoCreations()).isEqualTo(2); + assertThat(transferContext.numOfLazyCreations()).isZero(); + assertThat(transferContext.resolutions()).containsKey(edKeyAlias); + assertThat(transferContext.resolutions()).containsKey(ecKeyAlias); + } + + @Test + void autoCreateEvmAddressesAccounts() { + final var evmAddressAlias1 = Bytes.wrap(unhex("0000000000000000000000000000000000000004")); + final var evmAddressAlias2 = Bytes.wrap(unhex("0000000000000000000000000000000000000005")); + final var evmAddressAlias3 = Bytes.wrap(unhex("0000000000000000000000000000000000000002")); + body = CryptoTransferTransactionBody.newBuilder() + .transfers(TransferList.newBuilder() + .accountAmounts(aaWith(ownerId, -1_000), aaAlias(evmAddressAlias1, +1_000)) + .build()) + .tokenTransfers( + TokenTransferList.newBuilder() + .token(fungibleTokenId) + .transfers(List.of(aaWith(ownerId, -1_000), aaAlias(evmAddressAlias2, +1_000))) + .build(), + TokenTransferList.newBuilder() + .token(nonFungibleTokenId) + .nftTransfers(nftTransferWith(ownerId, asAccountWithAlias(evmAddressAlias3), 1)) + .build()) + .build(); + givenTxn(body); + + given(handleContext.dispatchRemovableChildTransaction(any(), eq(CryptoCreateRecordBuilder.class))) + .will((invocation) -> { + final var copy = account.copyBuilder() + .accountNumber(createdNumber) + .alias(evmAddressAlias1) + .build(); + writableAccountStore.put(copy); + writableAliases.put(evmAddressAlias1, asAccount(createdNumber)); + return recordBuilder.accountID(asAccount(createdNumber)); + }) + .will((invocation) -> { + final var copy = account.copyBuilder() + .accountNumber(createdNumber + 1) + .alias(evmAddressAlias2) + .build(); + writableAccountStore.put(copy); + writableAliases.put(evmAddressAlias2, asAccount(createdNumber + 1)); + return recordBuilder.accountID(asAccount(createdNumber + 1)); + }) + .will((invocation) -> { + final var copy = account.copyBuilder() + .accountNumber(createdNumber + 2) + .alias(evmAddressAlias3) + .build(); + writableAccountStore.put(copy); + writableAliases.put(evmAddressAlias3, asAccount(createdNumber + 2)); + return recordBuilder.accountID(asAccount(createdNumber + 2)); + }); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + + ensureAliasesStep = new EnsureAliasesStep(body); + + ensureAliasesStep.doIn(transferContext); + + assertThat(writableAccountStore.modifiedAliasesInState()).hasSize(3); + assertThat(writableAccountStore.modifiedAccountsInState()).hasSize(3); + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(5); + assertThat(writableAccountStore.get(asAccount(createdNumber))).isNotNull(); + assertThat(writableAccountStore.get(asAccount(createdNumber + 1))).isNotNull(); + assertThat(writableAccountStore.get(asAccount(createdNumber + 2))).isNotNull(); + assertThat(writableAliases.get(evmAddressAlias1).accountNum()).isEqualTo(createdNumber); + assertThat(writableAliases.get(evmAddressAlias2).accountNum()).isEqualTo(createdNumber + 1); + assertThat(writableAliases.get(evmAddressAlias3).accountNum()).isEqualTo(createdNumber + 2); + + assertThat(transferContext.numOfAutoCreations()).isZero(); + assertThat(transferContext.numOfLazyCreations()).isEqualTo(3); + assertThat(transferContext.resolutions()).containsKey(evmAddressAlias1); + assertThat(transferContext.resolutions()).containsKey(evmAddressAlias2); + assertThat(transferContext.resolutions()).containsKey(evmAddressAlias3); + } + + @Test + void resolvedExistingAliases() { + // insert aliases into state + setUpInsertingKnownAliasesToState(); + + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(2); + assertThat(writableAccountStore.get(unknownAliasedId)).isNotNull(); + assertThat(writableAccountStore.get(unknownAliasedId1)).isNotNull(); + + ensureAliasesStep.doIn(transferContext); + + assertThat(writableAccountStore.modifiedAliasesInState()).isEmpty(); + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(2); + assertThat(writableAliases.get(ecKeyAlias).accountNum()).isEqualTo(createdNumber); + assertThat(writableAliases.get(edKeyAlias).accountNum()).isEqualTo(createdNumber + 1); + + assertThat(transferContext.numOfAutoCreations()).isZero(); + assertThat(transferContext.numOfLazyCreations()).isZero(); + assertThat(transferContext.resolutions()).containsKey(edKeyAlias); + assertThat(transferContext.resolutions()).containsKey(ecKeyAlias); + } + + @Test + void failsOnRepeatedAliasesInTokenTransferList() { + body = CryptoTransferTransactionBody.newBuilder() + .transfers(TransferList.newBuilder() + .accountAmounts(aaWith(ownerId, -1_000), aaWith(unknownAliasedId, +1_000)) + .build()) + .tokenTransfers( + TokenTransferList.newBuilder() + .token(fungibleTokenId) + .transfers(List.of( + aaWith(ownerId, -1_000), + aaWith(unknownAliasedId1, +1_000), + aaWith(ownerId, -1_000), + aaWith(unknownAliasedId1, +1_000))) + .build(), + TokenTransferList.newBuilder() + .token(nonFungibleTokenId) + .nftTransfers(nftTransferWith(ownerId, unknownAliasedId1, 1)) + .build()) + .build(); + txn = asTxn(body); + given(handleContext.body()).willReturn(txn); + ensureAliasesStep = new EnsureAliasesStep(body); + transferContext = new TransferContextImpl(handleContext); + + given(handleContext.dispatchRemovableChildTransaction(any(), eq(CryptoCreateRecordBuilder.class))) + .will((invocation) -> { + final var copy = + account.copyBuilder().accountNumber(createdNumber).build(); + writableAccountStore.put(copy); + writableAliases.put(ecKeyAlias, asAccount(createdNumber)); + return recordBuilder.accountID(asAccount(createdNumber)); + }) + .will((invocation) -> { + final var copy = account.copyBuilder() + .accountNumber(createdNumber + 1) + .build(); + writableAccountStore.put(copy); + writableAliases.put(edKeyAlias, asAccount(createdNumber + 1)); + return recordBuilder.accountID(asAccount(createdNumber + 1)); + }); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + + assertThatThrownBy(() -> ensureAliasesStep.doIn(transferContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(ResponseCodeEnum.INVALID_ALIAS_KEY)); + } + + @Test + void failsOnRepeatedAliasesInHbarTransferList() { + body = CryptoTransferTransactionBody.newBuilder() + .transfers(TransferList.newBuilder() + .accountAmounts( + aaWith(ownerId, -1_000), + aaWith(unknownAliasedId, +1_000), + aaWith(ownerId, -1_000), + aaWith(unknownAliasedId, +1_000)) + .build()) + .tokenTransfers() + .build(); + txn = asTxn(body); + given(handleContext.body()).willReturn(txn); + ensureAliasesStep = new EnsureAliasesStep(body); + transferContext = new TransferContextImpl(handleContext); + + given(handleContext.dispatchRemovableChildTransaction(any(), eq(CryptoCreateRecordBuilder.class))) + .will((invocation) -> { + final var copy = + account.copyBuilder().accountNumber(createdNumber).build(); + writableAccountStore.put(copy); + writableAliases.put(ecKeyAlias, asAccount(createdNumber)); + return recordBuilder.accountID(asAccount(createdNumber)); + }) + .will((invocation) -> { + final var copy = account.copyBuilder() + .accountNumber(createdNumber + 1) + .build(); + writableAccountStore.put(copy); + writableAliases.put(edKeyAlias, asAccount(createdNumber + 1)); + return recordBuilder.accountID(asAccount(createdNumber + 1)); + }); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + + assertThatThrownBy(() -> ensureAliasesStep.doIn(transferContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(ResponseCodeEnum.ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS)); + } + + @Test + void resolvesMirrorAddressInHbarList() { + final var mirrorAdjust = aaAlias(mirrorAlias, +100); + body = CryptoTransferTransactionBody.newBuilder() + .transfers( + TransferList.newBuilder().accountAmounts(mirrorAdjust).build()) + .build(); + txn = asTxn(body); + given(handleContext.body()).willReturn(txn); + ensureAliasesStep = new EnsureAliasesStep(body); + transferContext = new TransferContextImpl(handleContext); + + ensureAliasesStep.doIn(transferContext); + + assertThat(transferContext.resolutions()).containsEntry(mirrorAlias, payerId); + assertThat(transferContext.numOfLazyCreations()).isZero(); + } + + @Test + void resolvesMirrorAddressInNftTransfer() { + body = CryptoTransferTransactionBody.newBuilder() + .tokenTransfers(TokenTransferList.newBuilder() + .token(nonFungibleTokenId) + .nftTransfers(NftTransfer.newBuilder() + .receiverAccountID(AccountID.newBuilder() + .alias(mirrorAlias) + .build()) + .senderAccountID(payerId) + .serialNumber(1) + .build()) + .build()) + .build(); + txn = asTxn(body); + given(handleContext.body()).willReturn(txn); + ensureAliasesStep = new EnsureAliasesStep(body); + transferContext = new TransferContextImpl(handleContext); + + ensureAliasesStep.doIn(transferContext); + + assertThat(transferContext.resolutions()).containsEntry(mirrorAlias, payerId); + assertThat(transferContext.numOfLazyCreations()).isZero(); + } + + private void setUpInsertingKnownAliasesToState() { + final var readableBuilder = emptyReadableAliasStateBuilder(); + readableBuilder.value(ecKeyAlias, asAccount(createdNumber)); + readableBuilder.value(edKeyAlias, asAccount(createdNumber + 1)); + readableAliases = readableBuilder.build(); + + final var writableBuilder = emptyWritableAliasStateBuilder(); + writableBuilder.value(ecKeyAlias, asAccount(createdNumber)); + writableBuilder.value(edKeyAlias, asAccount(createdNumber + 1)); + writableAliases = writableBuilder.build(); + + given(writableStates.get(ALIASES)).willReturn(writableAliases); + writableAccountStore = new WritableAccountStore(writableStates); + + writableAccountStore.put(account.copyBuilder() + .accountNumber(createdNumber) + .alias(ecKeyAlias) + .build()); + writableAccountStore.put(account.copyBuilder() + .accountNumber(createdNumber + 1) + .alias(edKeyAlias) + .build()); + + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + transferContext = new TransferContextImpl(handleContext); + } + + @Test + void doesntAutoCreateWhenTransferToAliasFeatureDisabled() { + configuration = HederaTestConfigBuilder.create() + .withValue("autoCreation.enabled", false) + .getOrCreateConfig(); + given(handleContext.configuration()).willReturn(configuration); + transferContext = new TransferContextImpl(handleContext); + assertThatThrownBy(() -> ensureAliasesStep.doIn(transferContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void doesntAutoCreateWhenTokenTransferToAliasFeatureDisabled() { + configuration = HederaTestConfigBuilder.create() + .withValue("tokens.autoCreations.isEnabled", false) + .getOrCreateConfig(); + body = CryptoTransferTransactionBody.newBuilder() + .tokenTransfers(TokenTransferList.newBuilder() + .token(fungibleTokenId) + .transfers(List.of(aaWith(ownerId, -1_000), aaWith(unknownAliasedId1, +1_000))) + .build()) + .build(); + txn = asTxn(body); + given(handleContext.body()).willReturn(txn); + given(handleContext.configuration()).willReturn(configuration); + + ensureAliasesStep = new EnsureAliasesStep(body); + transferContext = new TransferContextImpl(handleContext); + + assertThatThrownBy(() -> ensureAliasesStep.doIn(transferContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(NOT_SUPPORTED)); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/ReplaceAliasesWithIDsInOpTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/ReplaceAliasesWithIDsInOpTest.java new file mode 100644 index 000000000000..1e206d80dfc3 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/ReplaceAliasesWithIDsInOpTest.java @@ -0,0 +1,106 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers.transfer; + +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.test.handlers.transfer.AccountAmountUtils.aaWith; +import static com.hedera.node.app.service.token.impl.test.handlers.transfer.AccountAmountUtils.nftTransferWith; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.BDDMockito.given; + +import com.hedera.hapi.node.base.TokenTransferList; +import com.hedera.hapi.node.base.TransferList; +import com.hedera.hapi.node.token.CryptoTransferTransactionBody; +import com.hedera.node.app.records.SingleTransactionRecordBuilder; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.handlers.transfer.EnsureAliasesStep; +import com.hedera.node.app.service.token.impl.handlers.transfer.ReplaceAliasesWithIDsInOp; +import com.hedera.node.app.service.token.impl.handlers.transfer.TransferContextImpl; +import com.hedera.node.app.service.token.impl.records.CryptoCreateRecordBuilder; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class ReplaceAliasesWithIDsInOpTest extends StepsBase { + @BeforeEach + public void setUp() { + super.setUp(); + recordBuilder = new SingleTransactionRecordBuilder(consensusInstant); + givenTxn(); + refreshWritableStores(); + givenStoresAndConfig(handleContext); + ensureAliasesStep = new EnsureAliasesStep(body); + replaceAliasesWithIDsInOp = new ReplaceAliasesWithIDsInOp(); + transferContext = new TransferContextImpl(handleContext); + } + + @Test + void replacesAliasesInOp() { + given(handleContext.dispatchRemovableChildTransaction(any(), eq(CryptoCreateRecordBuilder.class))) + .will((invocation) -> { + final var copy = + account.copyBuilder().accountNumber(createdNumber).build(); + writableAccountStore.put(copy); + writableAliases.put(ecKeyAlias, asAccount(createdNumber)); + return recordBuilder.accountID(asAccount(createdNumber)); + }) + .will((invocation) -> { + final var copy = account.copyBuilder() + .accountNumber(createdNumber + 1) + .build(); + writableAccountStore.put(copy); + writableAliases.put(edKeyAlias, asAccount(createdNumber + 1)); + return recordBuilder.accountID(asAccount(createdNumber + 1)); + }); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + ensureAliasesStep.doIn(transferContext); + + assertThat(writableAccountStore.modifiedAliasesInState()).hasSize(2); + assertThat(writableAccountStore.modifiedAccountsInState()).hasSize(2); + assertThat(writableAccountStore.sizeOfAliasesState()).isEqualTo(4); + assertThat(writableAccountStore.get(asAccount(createdNumber))).isNotNull(); + assertThat(writableAccountStore.get(asAccount(createdNumber + 1))).isNotNull(); + assertThat(writableAliases.get(ecKeyAlias).accountNum()).isEqualTo(createdNumber); + assertThat(writableAliases.get(edKeyAlias).accountNum()).isEqualTo(createdNumber + 1); + + assertThat(transferContext.numOfAutoCreations()).isEqualTo(2); + assertThat(transferContext.numOfLazyCreations()).isZero(); + assertThat(transferContext.resolutions()).containsKey(edKeyAlias); + assertThat(transferContext.resolutions()).containsKey(ecKeyAlias); + + final var replacedOp = replaceAliasesWithIDsInOp.replaceAliasesWithIds(body, transferContext); + + final var expectedOp = CryptoTransferTransactionBody.newBuilder() + .transfers(TransferList.newBuilder() + .accountAmounts(aaWith(ownerId, -1_000), aaWith(asAccount(createdNumber), +1_000)) + .build()) + .tokenTransfers( + TokenTransferList.newBuilder() + .token(fungibleTokenId) + .transfers( + List.of(aaWith(ownerId, -1_000), aaWith(asAccount(createdNumber + 1), +1_000))) + .build(), + TokenTransferList.newBuilder() + .token(nonFungibleTokenId) + .nftTransfers(nftTransferWith(ownerId, asAccount(createdNumber + 1), 1)) + .build()) + .build(); + assertThat(replacedOp).isEqualTo(expectedOp); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/StepsBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/StepsBase.java new file mode 100644 index 000000000000..44ee5bba8f41 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/StepsBase.java @@ -0,0 +1,134 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers.transfer; + +import static com.hedera.node.app.service.mono.pbj.PbjConverter.asBytes; +import static com.hedera.node.app.service.token.impl.test.handlers.transfer.AccountAmountUtils.aaWith; +import static com.hedera.node.app.service.token.impl.test.handlers.transfer.AccountAmountUtils.nftTransferWith; +import static com.swirlds.common.utility.CommonUtils.unhex; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.BDDMockito.given; + +import com.google.common.primitives.Longs; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.TokenTransferList; +import com.hedera.hapi.node.base.TransactionID; +import com.hedera.hapi.node.base.TransferList; +import com.hedera.hapi.node.token.CryptoTransferTransactionBody; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.records.SingleTransactionRecordBuilder; +import com.hedera.node.app.service.token.impl.handlers.transfer.EnsureAliasesStep; +import com.hedera.node.app.service.token.impl.handlers.transfer.ReplaceAliasesWithIDsInOp; +import com.hedera.node.app.service.token.impl.handlers.transfer.TransferContextImpl; +import com.hedera.node.app.service.token.impl.records.CryptoCreateRecordBuilder; +import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; +import com.hedera.node.app.spi.validation.ExpiryValidator; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import java.util.Arrays; +import java.util.List; +import org.bouncycastle.util.encoders.Hex; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class StepsBase extends CryptoTokenHandlerTestBase { + @Mock(strictness = Mock.Strictness.LENIENT) + protected HandleContext handleContext; + + @Mock + protected ExpiryValidator expiryValidator; + + protected EnsureAliasesStep ensureAliasesStep; + protected ReplaceAliasesWithIDsInOp replaceAliasesWithIDsInOp; + protected CryptoTransferTransactionBody body; + protected TransactionBody txn; + protected TransferContextImpl transferContext; + protected SingleTransactionRecordBuilder recordBuilder; + + @BeforeEach + public void setUp() { + super.setUp(); + recordBuilder = new SingleTransactionRecordBuilder(consensusInstant); + refreshWritableStores(); + } + + protected final AccountID unknownAliasedId = + AccountID.newBuilder().alias(ecKeyAlias).build(); + protected final AccountID unknownAliasedId1 = + AccountID.newBuilder().alias(edKeyAlias).build(); + + protected static final Key aPrimitiveKey = Key.newBuilder() + .ed25519(Bytes.wrap("01234567890123456789012345678911")) + .build(); + protected static final Bytes edKeyAlias = Bytes.wrap(asBytes(Key.PROTOBUF, aPrimitiveKey)); + protected static final byte[] ecdsaKeyBytes = + Hex.decode("3a21033a514176466fa815ed481ffad09110a2d344f6c9b78c1d14afc351c3a51be33d"); + protected static final Bytes ecKeyAlias = Bytes.wrap(ecdsaKeyBytes); + + protected static final byte[] evmAddress = unhex("0000000000000000000000000000000000000003"); + protected static final byte[] create2Address = unhex("0111111111111111111111111111111111defbbb"); + protected static final Bytes mirrorAlias = Bytes.wrap(evmAddress); + protected static final Bytes create2Alias = Bytes.wrap(create2Address); + protected static final Long mirrorNum = Longs.fromByteArray(Arrays.copyOfRange(evmAddress, 12, 20)); + protected final int createdNumber = 10000000; + + protected TransactionBody asTxn(final CryptoTransferTransactionBody body) { + return TransactionBody.newBuilder() + .transactionID(TransactionID.newBuilder() + .accountID(payerId) + .transactionValidStart(consensusTimestamp) + .build()) + .cryptoTransfer(body) + .build(); + } + + protected void givenTxn() { + body = CryptoTransferTransactionBody.newBuilder() + .transfers(TransferList.newBuilder() + .accountAmounts(aaWith(ownerId, -1_000), aaWith(unknownAliasedId, +1_000)) + .build()) + .tokenTransfers( + TokenTransferList.newBuilder() + .token(fungibleTokenId) + .transfers(List.of(aaWith(ownerId, -1_000), aaWith(unknownAliasedId1, +1_000))) + .build(), + TokenTransferList.newBuilder() + .token(nonFungibleTokenId) + .nftTransfers(nftTransferWith(ownerId, unknownAliasedId1, 1)) + .build()) + .build(); + givenTxn(body); + } + + protected void givenTxn(CryptoTransferTransactionBody txnBody) { + body = txnBody; + txn = asTxn(body); + given(handleContext.body()).willReturn(txn); + given(handleContext.configuration()).willReturn(configuration); + given(handleContext.expiryValidator()).willReturn(expiryValidator); + given(handleContext.dispatchRemovableChildTransaction(any(), eq(CryptoCreateRecordBuilder.class))) + .willReturn(recordBuilder); + transferContext = new TransferContextImpl(handleContext); + // given(handleContext.feeCalculator()).willReturn(fees); + // given(fees.computePayment(any(), any())).willReturn(new FeeObject(100, 100, 100)); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/TransferStepTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/TransferStepTest.java new file mode 100644 index 000000000000..44d98ace23a3 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/TransferStepTest.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers.transfer; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.hedera.node.app.service.token.impl.handlers.transfer.TransferStep; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class TransferStepTest extends StepsBase { + private TransferStep transferStep = transferContext -> { + throw new UnsupportedOperationException(); + }; + + @Test + void assertKeysEmpty() { + assertThat(transferStep.authorizingKeysIn(transferContext)).isEmpty(); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/AdapterUtils.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/AdapterUtils.java index 427ee8ee942e..6a11e0ec4298 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/AdapterUtils.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/AdapterUtils.java @@ -18,7 +18,7 @@ import static com.hedera.node.app.service.mono.pbj.PbjConverter.toPbj; import static com.hedera.node.app.service.mono.utils.EntityNum.MISSING_NUM; -import static com.hedera.node.app.service.mono.utils.EntityNum.fromAccountId; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static com.hedera.test.factories.scenarios.TxnHandlingScenario.CURRENTLY_UNUSED_ALIAS; import static com.hedera.test.factories.scenarios.TxnHandlingScenario.FIRST_TOKEN_SENDER; import static com.hedera.test.factories.scenarios.TxnHandlingScenario.FIRST_TOKEN_SENDER_LITERAL_ALIAS; @@ -31,7 +31,6 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.service.mono.state.migration.HederaAccount; -import com.hedera.node.app.service.mono.state.virtual.EntityNumValue; import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; @@ -40,6 +39,7 @@ import com.hedera.node.app.spi.state.ReadableStates; import com.hedera.node.app.spi.state.WritableKVState; import com.hedera.node.app.spi.state.WritableStates; +import com.hedera.pbj.runtime.io.buffer.Bytes; import com.hedera.test.factories.scenarios.TxnHandlingScenario; import com.hedera.test.utils.StateKeyAdapter; import com.hedera.test.utils.TestFixturesKeyLookup; @@ -86,18 +86,12 @@ public static WritableStates mockWritableStates(final Map new EntityNum(id.accountNumOrThrow().intValue())); } - public static WritableKVState wellKnownAliasState() { - final Map wellKnownAliases = Map.ofEntries( - Map.entry(CURRENTLY_UNUSED_ALIAS, new EntityNumValue(MISSING_NUM.longValue())), - Map.entry( - NO_RECEIVER_SIG_ALIAS, - new EntityNumValue(fromAccountId(NO_RECEIVER_SIG).longValue())), - Map.entry( - RECEIVER_SIG_ALIAS, - new EntityNumValue(fromAccountId(RECEIVER_SIG).longValue())), - Map.entry( - FIRST_TOKEN_SENDER_LITERAL_ALIAS.toStringUtf8(), - new EntityNumValue(fromAccountId(FIRST_TOKEN_SENDER).longValue()))); + public static MapWritableKVState wellKnownAliasState() { + final Map wellKnownAliases = Map.ofEntries( + Map.entry(Bytes.wrap(CURRENTLY_UNUSED_ALIAS), asAccount(MISSING_NUM.longValue())), + Map.entry(Bytes.wrap(NO_RECEIVER_SIG_ALIAS), toPbj(NO_RECEIVER_SIG)), + Map.entry(Bytes.wrap(RECEIVER_SIG_ALIAS), toPbj(RECEIVER_SIG)), + Map.entry(Bytes.wrap(FIRST_TOKEN_SENDER_LITERAL_ALIAS.toByteArray()), toPbj(FIRST_TOKEN_SENDER))); return new MapWritableKVState<>(ALIASES_KEY, wellKnownAliases); } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoHandlerTestBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoHandlerTestBase.java index b7b73e6a8c19..192904a8b035 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoHandlerTestBase.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoHandlerTestBase.java @@ -17,6 +17,8 @@ package com.hedera.node.app.service.token.impl.test.handlers.util; import static com.hedera.node.app.service.mono.Utils.asHederaKey; +import static com.hedera.node.app.service.mono.pbj.PbjConverter.asBytes; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static com.hedera.test.utils.KeyUtils.A_COMPLEX_KEY; import static com.hedera.test.utils.KeyUtils.B_COMPLEX_KEY; import static com.hedera.test.utils.KeyUtils.C_COMPLEX_KEY; @@ -32,7 +34,6 @@ import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.token.CryptoAllowance; import com.hedera.hapi.node.token.TokenAllowance; -import com.hedera.node.app.service.mono.state.virtual.EntityNumValue; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.impl.CryptoSignatureWaiversImpl; import com.hedera.node.app.service.token.impl.ReadableAccountStoreImpl; @@ -67,14 +68,18 @@ public class CryptoHandlerTestBase { protected final Key accountKey = A_COMPLEX_KEY; protected final HederaKey accountHederaKey = asHederaKey(accountKey).get(); protected final Long accountNum = id.accountNum(); - protected final AccountID alias = - AccountID.newBuilder().alias(Bytes.wrap("testAlias")).build(); - protected final byte[] evmAddress = CommonUtils.unhex("6aea3773ea468a814d954e6dec795bfee7d76e25"); + + private static final Key aPrimitiveKey = Key.newBuilder() + .ed25519(Bytes.wrap("01234567890123456789012345678901")) + .build(); + private static final Bytes edKeyAlias = Bytes.wrap(asBytes(Key.PROTOBUF, aPrimitiveKey)); + protected final AccountID alias = AccountID.newBuilder().alias(edKeyAlias).build(); + protected final byte[] evmAddress = CommonUtils.unhex("6aea3773ea468a814d954e6dec795bfee7d76e26"); protected final ContractID contractAlias = ContractID.newBuilder().evmAddress(Bytes.wrap(evmAddress)).build(); + /*Contracts */ protected final ContractID contract = ContractID.newBuilder().contractNum(1234).build(); - protected final AccountID deleteAccountId = AccountID.newBuilder().accountNum(3213).build(); protected final AccountID transferAccountId = @@ -102,10 +107,10 @@ public class CryptoHandlerTestBase { .build(); protected static final long defaultAutoRenewPeriod = 7200000L; protected static final long payerBalance = 10_000L; - protected MapReadableKVState readableAliases; + protected MapReadableKVState readableAliases; protected MapReadableKVState readableAccounts; - protected MapWritableKVState writableAliases; + protected MapWritableKVState writableAliases; protected MapWritableKVState writableAccounts; protected Account account; protected ReadableAccountStore readableStore; @@ -152,9 +157,9 @@ protected void resetStores() { readableAliases = emptyReadableAliasStateBuilder().build(); writableAliases = emptyWritableAliasStateBuilder().build(); given(readableStates.get(ACCOUNTS)).willReturn(readableAccounts); - given(readableStates.get(ALIASES)).willReturn(readableAliases); + given(readableStates.get(ALIASES)).willReturn(readableAliases); given(writableStates.get(ACCOUNTS)).willReturn(writableAccounts); - given(writableStates.get(ALIASES)).willReturn(writableAliases); + given(writableStates.get(ALIASES)).willReturn(writableAliases); readableStore = new ReadableAccountStoreImpl(readableStates); writableStore = new WritableAccountStore(writableStates); } @@ -165,7 +170,7 @@ protected void refreshStoresWithCurrentTokenOnlyInReadable() { readableAliases = readableAliasState(); writableAliases = emptyWritableAliasStateBuilder().build(); given(readableStates.get(ACCOUNTS)).willReturn(readableAccounts); - given(readableStates.get(ALIASES)).willReturn(readableAliases); + given(readableStates.get(ALIASES)).willReturn(readableAliases); readableStore = new ReadableAccountStoreImpl(readableStates); writableStore = new WritableAccountStore(writableStates); } @@ -176,9 +181,9 @@ protected void refreshStoresWithCurrentTokenInWritable() { readableAliases = readableAliasState(); writableAliases = writableAliasesStateWithOneKey(); given(readableStates.get(ACCOUNTS)).willReturn(readableAccounts); - given(readableStates.get(ALIASES)).willReturn(readableAliases); + given(readableStates.get(ALIASES)).willReturn(readableAliases); given(writableStates.get(ACCOUNTS)).willReturn(writableAccounts); - given(writableStates.get(ALIASES)).willReturn(writableAliases); + given(writableStates.get(ALIASES)).willReturn(writableAliases); readableStore = new ReadableAccountStoreImpl(readableStates); writableStore = new WritableAccountStore(writableStates); } @@ -202,18 +207,18 @@ protected MapReadableKVState readableAccountState() { } @NonNull - protected MapWritableKVState writableAliasesStateWithOneKey() { + protected MapWritableKVState writableAliasesStateWithOneKey() { return emptyWritableAliasStateBuilder() - .value(alias.toString(), new EntityNumValue(accountNum)) - .value(contractAlias.toString(), new EntityNumValue(contract.contractNum())) + .value(alias.alias(), asAccount(accountNum)) + .value(contractAlias.evmAddress(), asAccount(contract.contractNum())) .build(); } @NonNull - protected MapReadableKVState readableAliasState() { + protected MapReadableKVState readableAliasState() { return emptyReadableAliasStateBuilder() - .value(alias.toString(), new EntityNumValue(accountNum)) - .value(contractAlias.toString(), new EntityNumValue(contract.contractNum())) + .value(alias.alias(), asAccount(accountNum)) + .value(contractAlias.evmAddress(), asAccount(contract.contractNum())) .build(); } @@ -228,12 +233,12 @@ protected MapWritableKVState.Builder emptyWritableAccountSta } @NonNull - protected MapWritableKVState.Builder emptyWritableAliasStateBuilder() { + protected MapWritableKVState.Builder emptyWritableAliasStateBuilder() { return MapWritableKVState.builder(ALIASES); } @NonNull - protected MapReadableKVState.Builder emptyReadableAliasStateBuilder() { + protected MapReadableKVState.Builder emptyReadableAliasStateBuilder() { return MapReadableKVState.builder(ALIASES); } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java index a0cb45faccb3..fd0f00a34633 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java @@ -16,6 +16,9 @@ package com.hedera.node.app.service.token.impl.test.handlers.util; +import static com.hedera.node.app.service.mono.pbj.PbjConverter.asBytes; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; import static com.hedera.test.utils.KeyUtils.A_COMPLEX_KEY; import static com.hedera.test.utils.KeyUtils.B_COMPLEX_KEY; import static com.hedera.test.utils.KeyUtils.C_COMPLEX_KEY; @@ -30,6 +33,7 @@ import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.base.TokenSupplyType; import com.hedera.hapi.node.base.TokenType; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.common.UniqueTokenId; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.AccountApprovalForAllAllowance; @@ -46,9 +50,6 @@ import com.hedera.hapi.node.transaction.FractionalFee; import com.hedera.hapi.node.transaction.RoyaltyFee; import com.hedera.node.app.config.VersionedConfigImpl; -import com.hedera.node.app.service.mono.state.virtual.EntityNumValue; -import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableNftStore; import com.hedera.node.app.service.token.ReadableTokenRelationStore; @@ -118,8 +119,11 @@ public class CryptoTokenHandlerTestBase extends StateBuilderUtil { protected final Long accountNum = payerId.accountNum(); /* ---------- Aliases ---------- */ - protected final AccountID alias = - AccountID.newBuilder().alias(Bytes.wrap("testAlias")).build(); + private static final Key aPrimitiveKey = Key.newBuilder() + .ed25519(Bytes.wrap("01234567890123456789012345678901")) + .build(); + private static final Bytes edKeyAlias = Bytes.wrap(asBytes(Key.PROTOBUF, aPrimitiveKey)); + protected final AccountID alias = AccountID.newBuilder().alias(edKeyAlias).build(); protected final byte[] evmAddress = CommonUtils.unhex("6aea3773ea468a814d954e6dec795bfee7d76e25"); protected final ContractID contractAlias = ContractID.newBuilder().evmAddress(Bytes.wrap(evmAddress)).build(); @@ -127,31 +131,41 @@ public class CryptoTokenHandlerTestBase extends StateBuilderUtil { protected final ContractID contract = ContractID.newBuilder().contractNum(1234).build(); /* ---------- Tokens ---------- */ - protected final EntityNum fungibleTokenNum = EntityNum.fromLong(1L); - protected final TokenID fungibleTokenId = - TokenID.newBuilder().tokenNum(fungibleTokenNum.longValue()).build(); - protected final EntityNum nonFungibleTokenNum = EntityNum.fromLong(2L); - protected final TokenID nonFungibleTokenId = - TokenID.newBuilder().tokenNum(nonFungibleTokenNum.longValue()).build(); - protected final EntityNumPair fungiblePair = - EntityNumPair.fromLongs(accountNum.longValue(), fungibleTokenNum.longValue()); - protected final EntityNumPair nonFungiblePair = - EntityNumPair.fromLongs(accountNum.longValue(), nonFungibleTokenNum.longValue()); - protected final EntityNumPair ownerFTPair = - EntityNumPair.fromLongs(ownerId.accountNum(), fungibleTokenNum.longValue()); - protected final EntityNumPair ownerNFTPair = - EntityNumPair.fromLongs(ownerId.accountNum(), nonFungibleTokenNum.longValue()); - - protected final EntityNumPair treasuryFTPair = - EntityNumPair.fromLongs(treasuryId.accountNum(), fungibleTokenNum.longValue()); - protected final EntityNumPair treasuryNFTPair = - EntityNumPair.fromLongs(treasuryId.accountNum(), nonFungibleTokenNum.longValue()); + protected final TokenID fungibleTokenId = asToken(1L); + + protected final TokenID nonFungibleTokenId = asToken(2L); + + protected final EntityIDPair fungiblePair = EntityIDPair.newBuilder() + .accountId(payerId) + .tokenId(fungibleTokenId) + .build(); + protected final EntityIDPair nonFungiblePair = EntityIDPair.newBuilder() + .accountId(payerId) + .tokenId(nonFungibleTokenId) + .build(); + protected final EntityIDPair ownerFTPair = EntityIDPair.newBuilder() + .accountId(ownerId) + .tokenId(fungibleTokenId) + .build(); + protected final EntityIDPair ownerNFTPair = EntityIDPair.newBuilder() + .accountId(ownerId) + .tokenId(nonFungibleTokenId) + .build(); + + protected final EntityIDPair treasuryFTPair = EntityIDPair.newBuilder() + .accountId(treasuryId) + .tokenId(fungibleTokenId) + .build(); + protected final EntityIDPair treasuryNFTPair = EntityIDPair.newBuilder() + .accountId(treasuryId) + .tokenId(nonFungibleTokenId) + .build(); protected final UniqueTokenId uniqueTokenIdSl1 = UniqueTokenId.newBuilder() - .tokenTypeNumber(nonFungibleTokenId.tokenNum()) + .tokenId(nonFungibleTokenId) .serialNumber(1L) .build(); protected final UniqueTokenId uniqueTokenIdSl2 = UniqueTokenId.newBuilder() - .tokenTypeNumber(nonFungibleTokenId.tokenNum()) + .tokenId(nonFungibleTokenId) .serialNumber(2L) .build(); @@ -211,14 +225,14 @@ public class CryptoTokenHandlerTestBase extends StateBuilderUtil { protected final long autoRenewSecs = 100L; protected static final long payerBalance = 10_000L; /* ---------- States ---------- */ - protected MapReadableKVState readableAliases; + protected MapReadableKVState readableAliases; protected MapReadableKVState readableAccounts; - protected MapWritableKVState writableAliases; + protected MapWritableKVState writableAliases; protected MapWritableKVState writableAccounts; - protected MapReadableKVState readableTokenState; - protected MapWritableKVState writableTokenState; - protected MapReadableKVState readableTokenRelState; - protected MapWritableKVState writableTokenRelState; + protected MapReadableKVState readableTokenState; + protected MapWritableKVState writableTokenState; + protected MapReadableKVState readableTokenRelState; + protected MapWritableKVState writableTokenRelState; protected MapReadableKVState readableNftState; protected MapWritableKVState writableNftState; @@ -257,8 +271,8 @@ public class CryptoTokenHandlerTestBase extends StateBuilderUtil { private Map accountsMap; private Map aliasesMap; - private Map tokensMap; - private Map tokenRelsMap; + private Map tokensMap; + private Map tokenRelsMap; @Mock protected ReadableStates readableStates; @@ -291,10 +305,12 @@ private void setUpAllEntities() { accountsMap.put(treasuryId, treasuryAccount); tokensMap = new HashMap<>(); - tokensMap.put(fungibleTokenNum, fungibleToken); - tokensMap.put(nonFungibleTokenNum, nonFungibleToken); + tokensMap.put(fungibleTokenId, fungibleToken); + tokensMap.put(nonFungibleTokenId, nonFungibleToken); aliasesMap = new HashMap<>(); + aliasesMap.put(alias.alias(), payerId); + aliasesMap.put(contractAlias.evmAddress(), asAccount(contract.contractNum())); tokenRelsMap = new HashMap<>(); tokenRelsMap.put(fungiblePair, fungibleTokenRelation); @@ -329,7 +345,7 @@ private void givenAccountsInReadableStore() { readableAliases = readableAliasState(); writableAliases = emptyWritableAliasStateBuilder().build(); given(readableStates.get(ACCOUNTS)).willReturn(readableAccounts); - given(readableStates.get(ALIASES)).willReturn(readableAliases); + given(readableStates.get(ALIASES)).willReturn(readableAliases); readableAccountStore = new ReadableAccountStoreImpl(readableStates); writableAccountStore = new WritableAccountStore(writableStates); } @@ -340,9 +356,9 @@ private void givenAccountsInWritableStore() { readableAliases = readableAliasState(); writableAliases = writableAliasesState(); given(readableStates.get(ACCOUNTS)).willReturn(readableAccounts); - given(readableStates.get(ALIASES)).willReturn(readableAliases); + given(readableStates.get(ALIASES)).willReturn(readableAliases); given(writableStates.get(ACCOUNTS)).willReturn(writableAccounts); - given(writableStates.get(ALIASES)).willReturn(writableAliases); + given(writableStates.get(ALIASES)).willReturn(writableAliases); readableAccountStore = new ReadableAccountStoreImpl(readableStates); writableAccountStore = new WritableAccountStore(writableStates); } @@ -350,8 +366,8 @@ private void givenAccountsInWritableStore() { private void givenTokensInReadableStore() { readableTokenState = readableTokenState(); writableTokenState = emptyWritableTokenState(); - given(readableStates.get(TOKENS)).willReturn(readableTokenState); - given(writableStates.get(TOKENS)).willReturn(writableTokenState); + given(readableStates.get(TOKENS)).willReturn(readableTokenState); + given(writableStates.get(TOKENS)).willReturn(writableTokenState); readableTokenStore = new ReadableTokenStoreImpl(readableStates); writableTokenStore = new WritableTokenStore(writableStates); } @@ -359,21 +375,21 @@ private void givenTokensInReadableStore() { private void givenTokensInWritableStore() { readableTokenState = readableTokenState(); writableTokenState = writableTokenState(); - given(readableStates.get(TOKENS)).willReturn(readableTokenState); - given(writableStates.get(TOKENS)).willReturn(writableTokenState); + given(readableStates.get(TOKENS)).willReturn(readableTokenState); + given(writableStates.get(TOKENS)).willReturn(writableTokenState); readableTokenStore = new ReadableTokenStoreImpl(readableStates); writableTokenStore = new WritableTokenStore(writableStates); } private void givenReadableTokenRelsStore() { readableTokenRelState = readableTokenRelState(); - given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); + given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates); } private void givenWritableTokenRelsStore() { writableTokenRelState = writableTokenRelState(); - given(writableStates.get(TOKEN_RELS)).willReturn(writableTokenRelState); + given(writableStates.get(TOKEN_RELS)).willReturn(writableTokenRelState); writableTokenRelStore = new WritableTokenRelationStore(writableStates); } @@ -413,7 +429,7 @@ protected MapReadableKVState readableAccountState() { return builder.build(); } - private MapWritableKVState writableTokenRelState() { + private MapWritableKVState writableTokenRelState() { final var builder = emptyWritableTokenRelsStateBuilder(); for (final var entry : tokenRelsMap.entrySet()) { builder.value(entry.getKey(), entry.getValue()); @@ -421,7 +437,7 @@ private MapWritableKVState writableTokenRelState() return builder.build(); } - private MapReadableKVState readableTokenRelState() { + private MapReadableKVState readableTokenRelState() { final var builder = emptyReadableTokenRelsStateBuilder(); for (final var entry : tokenRelsMap.entrySet()) { builder.value(entry.getKey(), entry.getValue()); @@ -430,23 +446,25 @@ private MapReadableKVState readableTokenRelState() } @NonNull - protected MapWritableKVState writableAliasesState() { - return emptyWritableAliasStateBuilder() - .value(alias.toString(), new EntityNumValue(accountNum)) - .value(contractAlias.toString(), new EntityNumValue(contract.contractNum())) - .build(); + protected MapWritableKVState writableAliasesState() { + final var builder = emptyWritableAliasStateBuilder(); + for (final var entry : aliasesMap.entrySet()) { + builder.value(entry.getKey(), entry.getValue()); + } + return builder.build(); } @NonNull - protected MapReadableKVState readableAliasState() { - return emptyReadableAliasStateBuilder() - .value(alias.toString(), new EntityNumValue(accountNum)) - .value(contractAlias.toString(), new EntityNumValue(contract.contractNum())) - .build(); + protected MapReadableKVState readableAliasState() { + final var builder = emptyReadableAliasStateBuilder(); + for (final var entry : aliasesMap.entrySet()) { + builder.value(entry.getKey(), entry.getValue()); + } + return builder.build(); } @NonNull - protected MapWritableKVState writableTokenState() { + protected MapWritableKVState writableTokenState() { final var builder = emptyWritableTokenStateBuilder(); for (final var entry : tokensMap.entrySet()) { builder.value(entry.getKey(), entry.getValue()); @@ -455,7 +473,7 @@ protected MapWritableKVState writableTokenState() { } @NonNull - protected MapReadableKVState readableTokenState() { + protected MapReadableKVState readableTokenState() { final var builder = emptyReadableTokenStateBuilder(); for (final var entry : tokensMap.entrySet()) { builder.value(entry.getKey(), entry.getValue()); @@ -466,21 +484,15 @@ protected MapReadableKVState readableTokenState() { private void givenValidTokenRelations() { fungibleTokenRelation = givenFungibleTokenRelation(); nonFungibleTokenRelation = givenNonFungibleTokenRelation(); - ownerFTRelation = givenFungibleTokenRelation() - .copyBuilder() - .accountNumber(ownerId.accountNum()) - .build(); - ownerNFTRelation = givenNonFungibleTokenRelation() - .copyBuilder() - .accountNumber(ownerId.accountNum()) - .build(); - treasuryFTRelation = givenFungibleTokenRelation() - .copyBuilder() - .accountNumber(treasuryId.accountNum()) - .build(); + ownerFTRelation = + givenFungibleTokenRelation().copyBuilder().accountId(ownerId).build(); + ownerNFTRelation = + givenNonFungibleTokenRelation().copyBuilder().accountId(ownerId).build(); + treasuryFTRelation = + givenFungibleTokenRelation().copyBuilder().accountId(treasuryId).build(); treasuryNFTRelation = givenNonFungibleTokenRelation() .copyBuilder() - .accountNumber(treasuryId.accountNum()) + .accountId(treasuryId) .build(); } @@ -511,7 +523,7 @@ private void givenValidAccounts() { .amount(100) .build()) .approveForAllNftAllowances(AccountApprovalForAllAllowance.newBuilder() - .tokenNum(nonFungibleTokenNum.longValue()) + .tokenNum(nonFungibleTokenId.tokenNum()) .spenderNum(spenderId.accountNum()) .build()) .key(ownerKey) @@ -528,26 +540,26 @@ private void givenValidAccounts() { } protected Token givenValidFungibleToken() { - return givenValidFungibleToken(spenderId.accountNum()); + return givenValidFungibleToken(spenderId); } - protected Token givenValidFungibleToken(long autoRenewAccountNumber) { - return givenValidFungibleToken(autoRenewAccountNumber, false, false, false, false); + protected Token givenValidFungibleToken(AccountID autoRenewAccountId) { + return givenValidFungibleToken(autoRenewAccountId, false, false, false, false); } protected Token givenValidFungibleToken( - long autoRenewAccountNumber, + AccountID autoRenewAccountId, boolean deleted, boolean paused, boolean accountsFrozenByDefault, boolean accountsKycGrantedByDefault) { return new Token( - fungibleTokenId.tokenNum(), + fungibleTokenId, tokenName, tokenSymbol, 1000, 1000, - treasuryId.accountNum(), + treasuryId, adminKey, kycKey, freezeKey, @@ -559,7 +571,7 @@ protected Token givenValidFungibleToken( deleted, TokenType.FUNGIBLE_COMMON, TokenSupplyType.FINITE, - autoRenewAccountNumber, + autoRenewAccountId, autoRenewSecs, expirationTime, memo, @@ -574,8 +586,8 @@ protected Token givenValidNonFungibleToken() { givenValidFungibleToken(); return fungibleToken .copyBuilder() - .tokenNumber(nonFungibleTokenNum.longValue()) - .treasuryAccountNumber(treasuryId.accountNum()) + .tokenId(nonFungibleTokenId) + .treasuryAccountId(treasuryId) .customFees(List.of()) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) .build(); @@ -619,29 +631,29 @@ protected Account givenValidAccount() { protected TokenRelation givenFungibleTokenRelation() { return TokenRelation.newBuilder() - .tokenNumber(fungibleTokenId.tokenNum()) - .accountNumber(accountNum) + .tokenId(fungibleTokenId) + .accountId(payerId) .balance(1000L) .frozen(false) .kycGranted(true) .deleted(false) .automaticAssociation(true) - .nextToken(2L) - .previousToken(3L) + .nextToken(asToken(2L)) + .previousToken(asToken(3L)) .build(); } protected TokenRelation givenNonFungibleTokenRelation() { return TokenRelation.newBuilder() - .tokenNumber(nonFungibleTokenNum.longValue()) - .accountNumber(accountNum) + .tokenId(nonFungibleTokenId) + .accountId(payerId) .balance(1) .frozen(false) .kycGranted(true) .deleted(false) .automaticAssociation(true) - .nextToken(2L) - .previousToken(3L) + .nextToken(asToken(2L)) + .previousToken(asToken(3L)) .build(); } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/ParityTestBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/ParityTestBase.java index 21ffd77fd6e5..ba8c91896593 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/ParityTestBase.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/ParityTestBase.java @@ -16,7 +16,6 @@ package com.hedera.node.app.service.token.impl.test.handlers.util; -import static com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbj; import static com.hedera.node.app.service.mono.pbj.PbjConverter.toPbj; import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ACCOUNTS_KEY; import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ALIASES_KEY; @@ -24,14 +23,13 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.common.UniqueTokenId; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.Nft; import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.ReadableTokenStoreImpl; @@ -41,7 +39,6 @@ import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; import com.hedera.node.app.service.token.impl.WritableTokenStore; import com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler; -import com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler; import com.hedera.node.app.service.token.impl.test.util.SigReqAdapterUtils; import com.hedera.node.app.spi.fixtures.state.MapReadableStates; import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; @@ -74,10 +71,10 @@ protected TransactionBody txnFrom(final TxnHandlingScenario scenario) { } } - private MapWritableKVState newTokenStateFromTokens(Token... tokens) { - final var backingMap = new HashMap(); + private MapWritableKVState newTokenStateFromTokens(Token... tokens) { + final var backingMap = new HashMap(); for (final Token token : tokens) { - backingMap.put(EntityNum.fromTokenId(fromPbj(BaseTokenHandler.asToken(token.tokenNumber()))), token); + backingMap.put(token.tokenId(), token); } return new MapWritableKVState<>(TOKENS_KEY, backingMap); @@ -105,9 +102,14 @@ protected WritableAccountStore newWritableStoreWithAccounts(Account... accounts) } protected WritableTokenRelationStore newWritableStoreWithTokenRels(final TokenRelation... tokenRels) { - final var backingMap = new HashMap(); + final var backingMap = new HashMap(); for (final TokenRelation tokenRel : tokenRels) { - backingMap.put(EntityNumPair.fromLongs(tokenRel.accountNumber(), tokenRel.tokenNumber()), tokenRel); + backingMap.put( + EntityIDPair.newBuilder() + .accountId(tokenRel.accountId()) + .tokenId(tokenRel.tokenId()) + .build(), + tokenRel); } final var wrappingState = new MapWritableKVState<>(ACCOUNTS_KEY, backingMap); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/StateBuilderUtil.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/StateBuilderUtil.java index e6730c5cae63..3fe225518d45 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/StateBuilderUtil.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/StateBuilderUtil.java @@ -17,16 +17,16 @@ package com.hedera.node.app.service.token.impl.test.handlers.util; import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.common.UniqueTokenId; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.Nft; import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.state.token.TokenRelation; -import com.hedera.node.app.service.mono.state.virtual.EntityNumValue; -import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; +import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; public class StateBuilderUtil { @@ -48,12 +48,12 @@ protected MapWritableKVState.Builder emptyWritableAccountSta } @NonNull - protected MapReadableKVState.Builder emptyReadableTokenRelsStateBuilder() { + protected MapReadableKVState.Builder emptyReadableTokenRelsStateBuilder() { return MapReadableKVState.builder(TOKEN_RELS); } @NonNull - protected MapWritableKVState.Builder emptyWritableTokenRelsStateBuilder() { + protected MapWritableKVState.Builder emptyWritableTokenRelsStateBuilder() { return MapWritableKVState.builder(TOKEN_RELS); } @@ -68,27 +68,27 @@ protected MapWritableKVState.Builder emptyWritableNftStateBu } @NonNull - protected MapReadableKVState.Builder emptyReadableTokenStateBuilder() { + protected MapReadableKVState.Builder emptyReadableTokenStateBuilder() { return MapReadableKVState.builder(TOKENS); } @NonNull - protected MapWritableKVState.Builder emptyWritableTokenStateBuilder() { + protected MapWritableKVState.Builder emptyWritableTokenStateBuilder() { return MapWritableKVState.builder(TOKENS); } @NonNull - protected MapWritableKVState.Builder emptyWritableAliasStateBuilder() { + protected MapWritableKVState.Builder emptyWritableAliasStateBuilder() { return MapWritableKVState.builder(ALIASES); } @NonNull - protected MapReadableKVState.Builder emptyReadableAliasStateBuilder() { + protected MapReadableKVState.Builder emptyReadableAliasStateBuilder() { return MapReadableKVState.builder(ALIASES); } @NonNull - protected MapWritableKVState emptyWritableTokenState() { - return MapWritableKVState.builder(TOKENS).build(); + protected MapWritableKVState emptyWritableTokenState() { + return MapWritableKVState.builder(TOKENS).build(); } } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/TokenHandlerTestBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/TokenHandlerTestBase.java index 840384125cc4..ace4e0b96288 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/TokenHandlerTestBase.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/TokenHandlerTestBase.java @@ -18,6 +18,7 @@ import static com.hedera.node.app.service.mono.Utils.asHederaKey; import static com.hedera.node.app.service.mono.pbj.PbjConverter.protoToPbj; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; import static com.hedera.test.utils.IdUtils.asAccount; import static com.hedera.test.utils.KeyUtils.A_COMPLEX_KEY; import static com.hedera.test.utils.KeyUtils.B_COMPLEX_KEY; @@ -38,10 +39,10 @@ import com.hedera.hapi.node.transaction.FixedFee; import com.hedera.hapi.node.transaction.FractionalFee; import com.hedera.hapi.node.transaction.RoyaltyFee; -import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.ReadableTokenStoreImpl; import com.hedera.node.app.service.token.impl.WritableTokenStore; +import com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler; import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; import com.hedera.node.app.spi.key.HederaKey; @@ -79,17 +80,14 @@ public class TokenHandlerTestBase { protected final HederaKey freezeHederaKey = asHederaKey(freezeKey).get(); protected final HederaKey feeScheduleHederaKey = asHederaKey(feeScheduleKey).get(); protected final HederaKey pauseHederaKey = asHederaKey(A_COMPLEX_KEY).get(); - protected final EntityNum tokenEntityNum = EntityNum.fromLong(1L); - protected final TokenID tokenId = - TokenID.newBuilder().tokenNum(tokenEntityNum.longValue()).build(); + protected final TokenID tokenId = asToken(1L); protected final String tokenName = "test token"; protected final String tokenSymbol = "TT"; protected final Duration WELL_KNOWN_AUTO_RENEW_PERIOD = Duration.newBuilder().seconds(100).build(); protected final Timestamp WELL_KNOWN_EXPIRY = Timestamp.newBuilder().seconds(1_234_567L).build(); - protected final TokenID WELL_KNOWN_TOKEN_ID = - TokenID.newBuilder().tokenNum(tokenEntityNum.longValue()).build(); + protected final TokenID WELL_KNOWN_TOKEN_ID = tokenId; protected final String memo = "test memo"; protected final long expirationTime = 1_234_567L; protected final long sequenceNumber = 1L; @@ -127,8 +125,8 @@ public class TokenHandlerTestBase { @Mock protected WritableStates writableStates; - protected MapReadableKVState readableTokenState; - protected MapWritableKVState writableTokenState; + protected MapReadableKVState readableTokenState; + protected MapWritableKVState writableTokenState; protected ReadableTokenStore readableTokenStore; protected WritableTokenStore writableTokenStore; @@ -142,8 +140,8 @@ public void commonSetUp() { protected void refreshStoresWithCurrentTokenOnlyInReadable() { readableTokenState = readableTokenState(); writableTokenState = emptyWritableTokenState(); - given(readableStates.get(TOKENS)).willReturn(readableTokenState); - given(writableStates.get(TOKENS)).willReturn(writableTokenState); + given(readableStates.get(TOKENS)).willReturn(readableTokenState); + given(writableStates.get(TOKENS)).willReturn(writableTokenState); readableTokenStore = new ReadableTokenStoreImpl(readableStates); writableTokenStore = new WritableTokenStore(writableStates); } @@ -151,28 +149,28 @@ protected void refreshStoresWithCurrentTokenOnlyInReadable() { protected void refreshStoresWithCurrentTokenInWritable() { readableTokenState = readableTokenState(); writableTokenState = writableTokenStateWithOneKey(); - given(readableStates.get(TOKENS)).willReturn(readableTokenState); - given(writableStates.get(TOKENS)).willReturn(writableTokenState); + given(readableStates.get(TOKENS)).willReturn(readableTokenState); + given(writableStates.get(TOKENS)).willReturn(writableTokenState); readableTokenStore = new ReadableTokenStoreImpl(readableStates); writableTokenStore = new WritableTokenStore(writableStates); } @NonNull - protected MapWritableKVState emptyWritableTokenState() { - return MapWritableKVState.builder(TOKENS).build(); + protected MapWritableKVState emptyWritableTokenState() { + return MapWritableKVState.builder(TOKENS).build(); } @NonNull - protected MapWritableKVState writableTokenStateWithOneKey() { - return MapWritableKVState.builder(TOKENS) - .value(tokenEntityNum, token) + protected MapWritableKVState writableTokenStateWithOneKey() { + return MapWritableKVState.builder(TOKENS) + .value(tokenId, token) .build(); } @NonNull - protected MapReadableKVState readableTokenState() { - return MapReadableKVState.builder(TOKENS) - .value(tokenEntityNum, token) + protected MapReadableKVState readableTokenState() { + return MapReadableKVState.builder(TOKENS) + .value(tokenId, token) .build(); } @@ -193,12 +191,12 @@ protected void givenValidToken( boolean withAdminKey, boolean withSubmitKey) { token = new Token( - tokenId.tokenNum(), + tokenId, tokenName, tokenSymbol, 1000, 1000, - treasury.accountNum(), + AccountID.newBuilder().accountNum(treasury.accountNum()).build(), adminKey, kycKey, freezeKey, @@ -210,7 +208,7 @@ protected void givenValidToken( deleted, TokenType.FUNGIBLE_COMMON, TokenSupplyType.INFINITE, - autoRenewAccountNumber, + BaseCryptoHandler.asAccount(autoRenewAccountNumber), autoRenewSecs, expirationTime, memo, @@ -223,7 +221,7 @@ protected void givenValidToken( protected Token createToken() { return new Token.Builder() - .tokenNumber(tokenId.tokenNum()) + .tokenId(tokenId) .adminKey(adminKey) .supplyKey(supplyKey) .kycKey(kycKey) @@ -231,14 +229,15 @@ protected Token createToken() { .wipeKey(wipeKey) .feeScheduleKey(feeScheduleKey) .pauseKey(pauseKey) - .treasuryAccountNumber(treasury.accountNum()) + .treasuryAccountId( + AccountID.newBuilder().accountNum(treasury.accountNum()).build()) .name(tokenName) .symbol(tokenSymbol) .totalSupply(1000) .decimals(1000) .maxSupply(100000) .autoRenewSecs(autoRenewSecs) - .autoRenewAccountNumber(autoRenewId.accountNum()) + .autoRenewAccountId(autoRenewId) .expiry(expirationTime) .memo(memo) .deleted(false) diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/SigReqAdapterUtils.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/SigReqAdapterUtils.java index 855257482f8a..2c0623191f2c 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/SigReqAdapterUtils.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/SigReqAdapterUtils.java @@ -76,6 +76,8 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.AccountApprovalForAllAllowance; import com.hedera.hapi.node.state.token.AccountCryptoAllowance; @@ -85,8 +87,6 @@ import com.hedera.hapi.node.transaction.CustomFee; import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.service.mono.state.merkle.MerkleToken; -import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.mono.utils.accessors.PlatformTxnAccessor; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.ReadableAccountStoreImpl; @@ -149,9 +149,9 @@ public static WritableTokenStore wellKnownWritableTokenStoreAt() { return new WritableTokenStore(mockWritableStates(Map.of(TOKENS_KEY, wellKnownTokenState()))); } - private static WritableKVState wellKnownTokenState() { + private static WritableKVState wellKnownTokenState() { final var source = sigReqsMockTokenStore(); - final Map destination = new HashMap<>(); + final Map destination = new HashMap<>(); List.of( toPbj(KNOWN_TOKEN_IMMUTABLE), toPbj(KNOWN_TOKEN_NO_SPECIAL_KEYS), @@ -163,32 +163,40 @@ private static WritableKVState wellKnownTokenState() { toPbj(KNOWN_TOKEN_WITH_SUPPLY), toPbj(KNOWN_TOKEN_WITH_WIPE), toPbj(DELETED_TOKEN)) - .forEach(id -> destination.put(EntityNum.fromLong(id.tokenNum()), asToken(source.get(fromPbj(id))))); + .forEach(id -> destination.put(id, asToken(source.get(fromPbj(id))))); return new MapWritableKVState<>("TOKENS", destination); } public static WritableTokenRelationStore wellKnownTokenRelStoreAt() { - final var miscAcctNum = MISC_ACCOUNT.getAccountNum(); - final var destination = new HashMap(); + final var destination = new HashMap(); destination.put( - EntityNumPair.fromLongs(miscAcctNum, KNOWN_TOKEN_IMMUTABLE.getTokenNum()), + EntityIDPair.newBuilder() + .accountId(toPbj(MISC_ACCOUNT)) + .tokenId(toPbj(KNOWN_TOKEN_IMMUTABLE)) + .build(), TokenRelation.newBuilder() - .accountNumber(miscAcctNum) - .tokenNumber(KNOWN_TOKEN_IMMUTABLE.getTokenNum()) + .accountId(toPbj(MISC_ACCOUNT)) + .tokenId(toPbj(KNOWN_TOKEN_IMMUTABLE)) .balance(10) .build()); destination.put( - EntityNumPair.fromLongs(miscAcctNum, KNOWN_TOKEN_NO_SPECIAL_KEYS.getTokenNum()), + EntityIDPair.newBuilder() + .accountId(toPbj(MISC_ACCOUNT)) + .tokenId(toPbj(KNOWN_TOKEN_NO_SPECIAL_KEYS)) + .build(), TokenRelation.newBuilder() - .accountNumber(miscAcctNum) - .tokenNumber(KNOWN_TOKEN_NO_SPECIAL_KEYS.getTokenNum()) + .accountId(toPbj(MISC_ACCOUNT)) + .tokenId(toPbj(KNOWN_TOKEN_NO_SPECIAL_KEYS)) .balance(20) .build()); destination.put( - EntityNumPair.fromLongs(miscAcctNum, KNOWN_TOKEN_WITH_KYC.getTokenNum()), + EntityIDPair.newBuilder() + .accountId(toPbj(MISC_ACCOUNT)) + .tokenId(toPbj(KNOWN_TOKEN_WITH_KYC)) + .build(), TokenRelation.newBuilder() - .accountNumber(miscAcctNum) - .tokenNumber(KNOWN_TOKEN_WITH_KYC.getTokenNum()) + .accountId(toPbj(MISC_ACCOUNT)) + .tokenId(toPbj(KNOWN_TOKEN_WITH_KYC)) .balance(30) .build()); @@ -359,12 +367,14 @@ private static Token asToken(final MerkleToken token) { customFee.forEach(fee -> pbjFees.add(fromFcCustomFee(fee))); } return new Token( - token.entityNum(), + TokenID.newBuilder().tokenNum(token.entityNum()).build(), token.name(), token.symbol(), token.decimals(), token.totalSupply(), - token.treasuryNum().longValue(), + AccountID.newBuilder() + .accountNum(token.treasuryNum().longValue()) + .build(), !token.adminKey().isEmpty() ? fromGrpcKey(asKeyUnchecked(token.adminKey().get())) : Key.DEFAULT, @@ -392,7 +402,11 @@ private static Token asToken(final MerkleToken token) { token.supplyType() == com.hedera.node.app.service.mono.state.enums.TokenSupplyType.FINITE ? com.hedera.hapi.node.base.TokenSupplyType.FINITE : com.hedera.hapi.node.base.TokenSupplyType.INFINITE, - token.autoRenewAccount() == null ? 0 : token.autoRenewAccount().num(), + token.autoRenewAccount() == null + ? AccountID.DEFAULT + : AccountID.newBuilder() + .accountNum(token.autoRenewAccount().num()) + .build(), token.autoRenewPeriod(), token.expiry(), token.memo(), diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenHandlerHelperTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenHandlerHelperTest.java index f4a04fbc8f93..89f9cb100fe6 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenHandlerHelperTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenHandlerHelperTest.java @@ -218,7 +218,7 @@ void token_getIfUsable_nullToken() { void token_getIfUsable_deletedToken() { given(tokenStore.get(notNull())) .willReturn(Token.newBuilder() - .tokenNumber(TOKEN_ID_45.tokenNum()) + .tokenId(TOKEN_ID_45) .deleted(true) .paused(false) .build()); @@ -232,7 +232,7 @@ void token_getIfUsable_deletedToken() { void token_getIfUsable_pausedToken() { given(tokenStore.get(notNull())) .willReturn(Token.newBuilder() - .tokenNumber(TOKEN_ID_45.tokenNum()) + .tokenId(TOKEN_ID_45) .deleted(false) .paused(true) .build()); @@ -246,7 +246,7 @@ void token_getIfUsable_pausedToken() { void token_getIfUsable_usableToken() { given(tokenStore.get(notNull())) .willReturn(Token.newBuilder() - .tokenNumber(TOKEN_ID_45.tokenNum()) + .tokenId(TOKEN_ID_45) .deleted(false) .paused(false) .build()); @@ -279,8 +279,8 @@ void tokenRel_getIfUsable_notFound() { void tokenRel_getIfUsable_usableTokenRel() { given(tokenRelStore.get(notNull(), notNull())) .willReturn(TokenRelation.newBuilder() - .accountNumber(ACCT_2300.accountNumOrThrow()) - .tokenNumber(TOKEN_ID_45.tokenNum()) + .accountId(ACCT_2300) + .tokenId(TOKEN_ID_45) .deleted(false) .balance(0) .build()); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenRelListCalculatorTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenRelListCalculatorTest.java index 1dbfe28a692d..1eae6f0ec0a7 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenRelListCalculatorTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenRelListCalculatorTest.java @@ -17,13 +17,16 @@ package com.hedera.node.app.service.token.impl.test.util; import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKEN_RELS_KEY; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; import static com.hedera.node.app.service.token.impl.test.handlers.util.AdapterUtils.mockStates; import static org.mockito.Mockito.mock; import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.TokenRelation; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.token.ReadableTokenRelationStore; import com.hedera.node.app.service.token.impl.ReadableTokenRelationStoreImpl; import com.hedera.node.app.service.token.impl.util.TokenRelListCalculator; @@ -45,21 +48,17 @@ void setup() { subject = new TokenRelListCalculator(localTokenRelsStore()); } - // This null token number will represent a null pointer to a token relation's prev or next token number, i.e. if - // tokenRel.prevToken() == -1L, then there is no previous token in the token rel list. If tokenRel.nextToken() == - // -1L, there is no next token in the token rel list. - private static final long NULL_TOKEN_NUMBER = -1L; - private static final long TOKEN_NUMBER_1 = 1L; - private static final long TOKEN_NUMBER_2 = 2L; - private static final long TOKEN_NUMBER_3 = 3L; - private static final long TOKEN_NUMBER_4 = 4L; - private static final long TOKEN_NUMBER_5 = 5L; + private static final TokenID TOKEN_ID_1 = asToken(1L); + private static final TokenID TOKEN_ID_2 = asToken(2L); + private static final TokenID TOKEN_ID_3 = asToken(3L); + private static final TokenID TOKEN_ID_4 = asToken(4L); + private static final TokenID TOKEN_ID_5 = asToken(5L); private static final AccountID ACCT_2300_ID = AccountID.newBuilder().accountNum(2300L).build(); private static final Account ACCT_2300 = Account.newBuilder() .accountNumber(ACCT_2300_ID.accountNumOrThrow()) - .headTokenNumber(TOKEN_NUMBER_1) + .headTokenNumber(TOKEN_ID_1.tokenNum()) .numberAssociations(5) .build(); @@ -68,34 +67,34 @@ void setup() { // 1).prevToken() = -1, (account 2300, token 1).nextToken() = 2, (account 2300, token 2).prevToken() = 1, (account // 2300, token 2).nextToken() = 3, etc. private static final TokenRelation LOCAL_TOKEN_REL_1 = TokenRelation.newBuilder() - .accountNumber(ACCT_2300_ID.accountNumOrThrow()) - .tokenNumber(TOKEN_NUMBER_1) - .previousToken(NULL_TOKEN_NUMBER) - .nextToken(TOKEN_NUMBER_2) + .accountId(ACCT_2300_ID) + .tokenId(TOKEN_ID_1) + .previousToken((TokenID) null) + .nextToken(TOKEN_ID_2) .build(); private static final TokenRelation LOCAL_TOKEN_REL_2 = TokenRelation.newBuilder() - .accountNumber(ACCT_2300_ID.accountNumOrThrow()) - .tokenNumber(TOKEN_NUMBER_2) - .previousToken(TOKEN_NUMBER_1) - .nextToken(TOKEN_NUMBER_3) + .accountId(ACCT_2300_ID) + .tokenId(TOKEN_ID_2) + .previousToken(TOKEN_ID_1) + .nextToken(TOKEN_ID_3) .build(); private static final TokenRelation LOCAL_TOKEN_REL_3 = TokenRelation.newBuilder() - .accountNumber(ACCT_2300_ID.accountNumOrThrow()) - .tokenNumber(TOKEN_NUMBER_3) - .previousToken(TOKEN_NUMBER_2) - .nextToken(TOKEN_NUMBER_4) + .accountId(ACCT_2300_ID) + .tokenId(TOKEN_ID_3) + .previousToken(TOKEN_ID_2) + .nextToken(TOKEN_ID_4) .build(); private static final TokenRelation LOCAL_TOKEN_REL_4 = TokenRelation.newBuilder() - .accountNumber(ACCT_2300_ID.accountNumOrThrow()) - .tokenNumber(TOKEN_NUMBER_4) - .previousToken(TOKEN_NUMBER_3) - .nextToken(TOKEN_NUMBER_5) + .accountId(ACCT_2300_ID) + .tokenId(TOKEN_ID_4) + .previousToken(TOKEN_ID_3) + .nextToken(TOKEN_ID_5) .build(); private static final TokenRelation LOCAL_TOKEN_REL_5 = TokenRelation.newBuilder() - .accountNumber(ACCT_2300_ID.accountNumOrThrow()) - .tokenNumber(TOKEN_NUMBER_5) - .previousToken(TOKEN_NUMBER_4) - .nextToken(NULL_TOKEN_NUMBER) + .accountId(ACCT_2300_ID) + .tokenId(TOKEN_ID_5) + .previousToken(TOKEN_ID_4) + .nextToken((TokenID) null) .build(); @SuppressWarnings("DataFlowIssue") @@ -113,15 +112,15 @@ void removeTokenRels_nullArgs() { @Test void removeTokenRels_emptyTokenRels() { final var result = subject.removeTokenRels(ACCT_2300, Collections.emptyList()); - Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(TOKEN_NUMBER_1); + Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(TOKEN_ID_1); Assertions.assertThat(result.updatedTokenRelsStillInChain()).isEmpty(); } @Test void removeTokenRels_tokenRelsFromDifferentAccountPresent() { final var tokenRelFromDifferentAccount = TokenRelation.newBuilder() - .accountNumber(2301L) - .tokenNumber(TOKEN_NUMBER_1) + .accountId(asAccount(2301L)) + .tokenId(TOKEN_ID_1) .build(); final var tokenRelsToRemove = List.of(LOCAL_TOKEN_REL_1, LOCAL_TOKEN_REL_2, tokenRelFromDifferentAccount); @@ -134,7 +133,7 @@ void removeTokenRels_removesAllTokenRels() { final var allLocalTokenRels = List.of(LOCAL_TOKEN_REL_1, LOCAL_TOKEN_REL_2, LOCAL_TOKEN_REL_3, LOCAL_TOKEN_REL_4, LOCAL_TOKEN_REL_5); final var result = subject.removeTokenRels(ACCT_2300, allLocalTokenRels); - Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(NULL_TOKEN_NUMBER); + Assertions.assertThat(result.updatedHeadTokenId()).isNull(); Assertions.assertThat(result.updatedTokenRelsStillInChain()).isEmpty(); } @@ -142,13 +141,13 @@ void removeTokenRels_removesAllTokenRels() { void removeTokenRels_removesHeadTokenRel() { final var onlyLocalHeadTokenRel = List.of(LOCAL_TOKEN_REL_1); final var result = subject.removeTokenRels(ACCT_2300, onlyLocalHeadTokenRel); - Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(TOKEN_NUMBER_2); + Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(TOKEN_ID_2); // Note: we don't need to update LOCAL_TOKEN_REL_3, _4, or _5 because their positions in the token rel list are // unchanged Assertions.assertThat(result.updatedTokenRelsStillInChain()) .containsExactly(LOCAL_TOKEN_REL_2 .copyBuilder() - .previousToken(NULL_TOKEN_NUMBER) + .previousToken((TokenID) null) .build()); } @@ -157,22 +156,19 @@ void removeTokenRels_removesEvenTokenRels() { final var evenLocalTokenRels = List.of(LOCAL_TOKEN_REL_2, LOCAL_TOKEN_REL_4); final var result = subject.removeTokenRels(ACCT_2300, evenLocalTokenRels); // The account's head token number shouldn't have changed because it's an odd-numbered token - Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(ACCT_2300.headTokenNumber()); + Assertions.assertThat(result.updatedHeadTokenId().tokenNum()).isEqualTo(ACCT_2300.headTokenNumber()); Assertions.assertThat(result.updatedTokenRelsStillInChain()) .containsExactlyInAnyOrder( - LOCAL_TOKEN_REL_1 - .copyBuilder() - .nextToken(TOKEN_NUMBER_3) - .build(), + LOCAL_TOKEN_REL_1.copyBuilder().nextToken(TOKEN_ID_3).build(), LOCAL_TOKEN_REL_3 .copyBuilder() - .previousToken(TOKEN_NUMBER_1) - .nextToken(TOKEN_NUMBER_5) + .previousToken(TOKEN_ID_1) + .nextToken(TOKEN_ID_5) .build(), LOCAL_TOKEN_REL_5 .copyBuilder() - .previousToken(TOKEN_NUMBER_3) - .nextToken(NULL_TOKEN_NUMBER) + .previousToken(TOKEN_ID_3) + .nextToken((TokenID) null) .build()); } @@ -180,18 +176,18 @@ void removeTokenRels_removesEvenTokenRels() { void removeTokenRels_removesOddTokenRels() { final var oddHeadTokenRels = List.of(LOCAL_TOKEN_REL_1, LOCAL_TOKEN_REL_3, LOCAL_TOKEN_REL_5); final var result = subject.removeTokenRels(ACCT_2300, oddHeadTokenRels); - Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(TOKEN_NUMBER_2); + Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(TOKEN_ID_2); Assertions.assertThat(result.updatedTokenRelsStillInChain()) .containsExactlyInAnyOrder( LOCAL_TOKEN_REL_2 .copyBuilder() - .previousToken(NULL_TOKEN_NUMBER) - .nextToken(TOKEN_NUMBER_4) + .previousToken((TokenID) null) + .nextToken(TOKEN_ID_4) .build(), LOCAL_TOKEN_REL_4 .copyBuilder() - .previousToken(TOKEN_NUMBER_2) - .nextToken(NULL_TOKEN_NUMBER) + .previousToken(TOKEN_ID_2) + .nextToken((TokenID) null) .build()); } @@ -199,16 +195,13 @@ void removeTokenRels_removesOddTokenRels() { void removeTokenRels_removesConsecutiveTokenRels() { final var consecutiveLocalTokenRels = List.of(LOCAL_TOKEN_REL_2, LOCAL_TOKEN_REL_3, LOCAL_TOKEN_REL_4); final var result = subject.removeTokenRels(ACCT_2300, consecutiveLocalTokenRels); - Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(ACCT_2300.headTokenNumber()); + Assertions.assertThat(result.updatedHeadTokenId().tokenNum()).isEqualTo(ACCT_2300.headTokenNumber()); Assertions.assertThat(result.updatedTokenRelsStillInChain()) .containsExactlyInAnyOrder( - LOCAL_TOKEN_REL_1 - .copyBuilder() - .nextToken(TOKEN_NUMBER_5) - .build(), + LOCAL_TOKEN_REL_1.copyBuilder().nextToken(TOKEN_ID_5).build(), LOCAL_TOKEN_REL_5 .copyBuilder() - .previousToken(TOKEN_NUMBER_1) + .previousToken(TOKEN_ID_1) .build()); } @@ -218,17 +211,17 @@ void removeTokenRels_removesConsecutiveAndSeparatedTokenRels() { // rels by token rel 3, which token rel 3 will remain in the list final var localTokenRels = List.of(LOCAL_TOKEN_REL_1, LOCAL_TOKEN_REL_2, LOCAL_TOKEN_REL_4); final var result = subject.removeTokenRels(ACCT_2300, localTokenRels); - Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(TOKEN_NUMBER_3); + Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(TOKEN_ID_3); Assertions.assertThat(result.updatedTokenRelsStillInChain()) .containsExactlyInAnyOrder( LOCAL_TOKEN_REL_3 .copyBuilder() - .previousToken(NULL_TOKEN_NUMBER) - .nextToken(TOKEN_NUMBER_5) + .previousToken((TokenID) null) + .nextToken(TOKEN_ID_5) .build(), LOCAL_TOKEN_REL_5 .copyBuilder() - .previousToken(TOKEN_NUMBER_3) + .previousToken(TOKEN_ID_3) .build()); } @@ -249,22 +242,19 @@ void removeTokenRels_nullAndDuplicateTokenRelsRemoved() { // Results should be identical to the _removesEvenTokenRels case // The account's head token number shouldn't have changed because it's an odd-numbered token - Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(ACCT_2300.headTokenNumber()); + Assertions.assertThat(result.updatedHeadTokenId().tokenNum()).isEqualTo(ACCT_2300.headTokenNumber()); Assertions.assertThat(result.updatedTokenRelsStillInChain()) .containsExactlyInAnyOrder( - LOCAL_TOKEN_REL_1 - .copyBuilder() - .nextToken(TOKEN_NUMBER_3) - .build(), + LOCAL_TOKEN_REL_1.copyBuilder().nextToken(TOKEN_ID_3).build(), LOCAL_TOKEN_REL_3 .copyBuilder() - .previousToken(TOKEN_NUMBER_1) - .nextToken(TOKEN_NUMBER_5) + .previousToken(TOKEN_ID_1) + .nextToken(TOKEN_ID_5) .build(), LOCAL_TOKEN_REL_5 .copyBuilder() - .previousToken(TOKEN_NUMBER_3) - .nextToken(NULL_TOKEN_NUMBER) + .previousToken(TOKEN_ID_3) + .nextToken((TokenID) null) .build()); } @@ -272,23 +262,47 @@ void removeTokenRels_nullAndDuplicateTokenRelsRemoved() { void removeTokenRels_selfPointingTokenRel() { final var selfPointingTokenRel = LOCAL_TOKEN_REL_1 .copyBuilder() - .previousToken(TOKEN_NUMBER_1) - .nextToken(TOKEN_NUMBER_1) + .previousToken(TOKEN_ID_1) + .nextToken(TOKEN_ID_1) .build(); final var result = subject.removeTokenRels(ACCT_2300, List.of(selfPointingTokenRel)); // Since the token rel points to itself, the calculation of the account's new head token number should loop // until it maxes out at a safety boundary, at which point we should default to a head token number of -1 - Assertions.assertThat(result.updatedHeadTokenId()).isEqualTo(-1); + Assertions.assertThat(result.updatedHeadTokenId()).isNull(); } private static ReadableTokenRelationStore localTokenRelsStore() { - final long acct2300 = ACCT_2300_ID.accountNumOrThrow(); - final var tokenRels = new HashMap(); - tokenRels.put(EntityNumPair.fromLongs(acct2300, TOKEN_NUMBER_1), LOCAL_TOKEN_REL_1); - tokenRels.put(EntityNumPair.fromLongs(acct2300, TOKEN_NUMBER_2), LOCAL_TOKEN_REL_2); - tokenRels.put(EntityNumPair.fromLongs(acct2300, TOKEN_NUMBER_3), LOCAL_TOKEN_REL_3); - tokenRels.put(EntityNumPair.fromLongs(acct2300, TOKEN_NUMBER_4), LOCAL_TOKEN_REL_4); - tokenRels.put(EntityNumPair.fromLongs(acct2300, TOKEN_NUMBER_5), LOCAL_TOKEN_REL_5); + final var tokenRels = new HashMap(); + tokenRels.put( + EntityIDPair.newBuilder() + .accountId(ACCT_2300_ID) + .tokenId(TOKEN_ID_1) + .build(), + LOCAL_TOKEN_REL_1); + tokenRels.put( + EntityIDPair.newBuilder() + .accountId(ACCT_2300_ID) + .tokenId(TOKEN_ID_2) + .build(), + LOCAL_TOKEN_REL_2); + tokenRels.put( + EntityIDPair.newBuilder() + .accountId(ACCT_2300_ID) + .tokenId(TOKEN_ID_3) + .build(), + LOCAL_TOKEN_REL_3); + tokenRels.put( + EntityIDPair.newBuilder() + .accountId(ACCT_2300_ID) + .tokenId(TOKEN_ID_4) + .build(), + LOCAL_TOKEN_REL_4); + tokenRels.put( + EntityIDPair.newBuilder() + .accountId(ACCT_2300_ID) + .tokenId(TOKEN_ID_5) + .build(), + LOCAL_TOKEN_REL_5); final var wrappedState = new MapReadableKVState<>(TOKEN_RELS_KEY, tokenRels); return new ReadableTokenRelationStoreImpl(mockStates(Map.of(TOKEN_RELS_KEY, wrappedState))); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/CustomFeesValidatorTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/CustomFeesValidatorTest.java index 89841f3ff972..ec3c01e6f67e 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/CustomFeesValidatorTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/CustomFeesValidatorTest.java @@ -30,11 +30,10 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.common.EntityIDPair; import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.hapi.node.transaction.CustomFee; -import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.token.impl.ReadableTokenRelationStoreImpl; import com.hedera.node.app.service.token.impl.WritableTokenStore; import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; @@ -140,7 +139,7 @@ void royaltyFeeForNonFungibleTokenSucceedsOnFeeScheduleUpdate() { @DisplayName("royalty fee for NFTs will fail if the denominating token is missing on fee schedule update") void royaltyFeeFailsWithMissingTokenOnFeeScheduleUpdate() { writableTokenState = emptyWritableTokenState(); - given(writableStates.get(TOKENS)).willReturn(writableTokenState); + given(writableStates.get(TOKENS)).willReturn(writableTokenState); writableTokenStore = new WritableTokenStore(writableStates); final List feeWithRoyalty = new ArrayList<>(); @@ -164,7 +163,7 @@ void royaltyFeeFailsFungibleDenomOnFeeScheduleUpdate() { .copyBuilder() .fallbackFee(fixedFee.copyBuilder() .denominatingTokenId(TokenID.newBuilder() - .tokenNum(nonFungibleTokenNum.longValue()) + .tokenNum(nonFungibleTokenId.tokenNum()) .build())) .build(); feeWithRoyalty.add(withRoyaltyFee(nftDenom)); @@ -182,7 +181,7 @@ void royaltyFeeFailsFungibleDenomOnFeeScheduleUpdate() { void missingTokenAssociationForRoyaltyFeeFailsOnFeeScheduleUpdate() { refreshWritableStores(); readableTokenRelState = emptyReadableTokenRelsStateBuilder().build(); - given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); + given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates); assertThatThrownBy(() -> subject.validateForFeeScheduleUpdate( @@ -230,7 +229,7 @@ void fixedFeeIsAllowedForNonFungibleTokenOnFeeScheduleUpdate() { "fails if there is no token relation between token and fee collector in fixed fee on fee schedule update") void failsIfTokenRelationIsMissingInFixedFeeOnFeeScheduleUpdate() { readableTokenRelState = emptyReadableTokenRelsStateBuilder().build(); - given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); + given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates); assertThatThrownBy(() -> subject.validateForFeeScheduleUpdate( @@ -248,7 +247,7 @@ void failsIfTokenRelationIsMissingInFixedFeeOnFeeScheduleUpdate() { "fails if there is no token relation between token and fee collector in fractional fee on fee schedule update") void failsIfTokenRelationIsMissingForFractionalFeeOnFeeScheduleUpdate() { readableTokenRelState = emptyReadableTokenRelsStateBuilder().build(); - given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); + given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates); assertThatThrownBy(() -> subject.validateForFeeScheduleUpdate( @@ -267,7 +266,7 @@ void validateTokenDenominationForFixedFeeOnFeeScheduleUpdate() { refreshWritableStores(); final var newFee = fixedFee.copyBuilder() .denominatingTokenId(TokenID.newBuilder() - .tokenNum(nonFungibleTokenNum.longValue()) + .tokenNum(nonFungibleTokenId.tokenNum()) .build()) .build(); assertThatThrownBy(() -> subject.validateForFeeScheduleUpdate( @@ -418,7 +417,7 @@ void royaltyFeeForNonFungibleTokenSucceedsOnTokenCreate() { @DisplayName("royalty fee for NFTs will fail if the denominating token is missing") void royaltyFeeFailsWithMissingTokenOnTokenCreate() { writableTokenState = emptyWritableTokenState(); - given(writableStates.get(TOKENS)).willReturn(writableTokenState); + given(writableStates.get(TOKENS)).willReturn(writableTokenState); writableTokenStore = new WritableTokenStore(writableStates); final List feeWithRoyalty = new ArrayList<>(); @@ -441,9 +440,8 @@ void royaltyFeeFailsFungibleDenomOnTokenCreate() { final var nftDenom = royaltyFee .copyBuilder() .fallbackFee(fixedFee.copyBuilder() - .denominatingTokenId(TokenID.newBuilder() - .tokenNum(nonFungibleTokenNum.longValue()) - .build())) + .denominatingTokenId(nonFungibleTokenId) + .build()) .build(); feeWithRoyalty.add(withRoyaltyFee(nftDenom)); assertThatThrownBy(() -> subject.validateForCreation( @@ -460,7 +458,7 @@ void royaltyFeeFailsFungibleDenomOnTokenCreate() { void missingTokenAssociationForRoyaltyFeeFailsOnTokenCreate() { refreshWritableStores(); readableTokenRelState = emptyReadableTokenRelsStateBuilder().build(); - given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); + given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates); assertThatThrownBy(() -> subject.validateForCreation( @@ -507,7 +505,7 @@ void fixedFeeIsAllowedForNonFungibleTokenOnTokenCreate() { @DisplayName("fails if there is no token relation between token and fee collector in fixed fee") void failsIfTokenRelationIsMissingInFixedFeeOnTokenCreate() { readableTokenRelState = emptyReadableTokenRelsStateBuilder().build(); - given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); + given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates); assertThatThrownBy(() -> subject.validateForCreation( @@ -524,11 +522,8 @@ void failsIfTokenRelationIsMissingInFixedFeeOnTokenCreate() { @DisplayName("token denomination should be fungible common for fixed fee") void validateTokenDenominationForFixedFeeOnTokenCreate() { refreshWritableStores(); - final var newFee = fixedFee.copyBuilder() - .denominatingTokenId(TokenID.newBuilder() - .tokenNum(nonFungibleTokenNum.longValue()) - .build()) - .build(); + final var newFee = + fixedFee.copyBuilder().denominatingTokenId(nonFungibleTokenId).build(); assertThatThrownBy(() -> subject.validateForCreation( fungibleToken, readableAccountStore, diff --git a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableAccountStore.java b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableAccountStore.java index c920594173ff..0c1e3c90a8aa 100644 --- a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableAccountStore.java +++ b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableAccountStore.java @@ -19,6 +19,7 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.ContractID; import com.hedera.hapi.node.state.token.Account; +import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -46,7 +47,7 @@ public interface ReadableAccountStore { * @return AccountID object if successfully fetched or {@code null} if the account doesn't exist */ @Nullable - AccountID getAccountIDByAlias(@NonNull final String alias); + AccountID getAccountIDByAlias(@NonNull final Bytes alias); /** * Fetches an {@link Account} object from state with the given {@link ContractID}. If the contract account could not diff --git a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableNftStore.java b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableNftStore.java index 2a0e3e2042d8..181429d8c2d7 100644 --- a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableNftStore.java +++ b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableNftStore.java @@ -37,7 +37,7 @@ public interface ReadableNftStore { @Nullable default Nft get(@NonNull final TokenID id, final long serialNumber) { final var uniqueTokenId = UniqueTokenId.newBuilder() - .tokenTypeNumber(id.tokenNum()) + .tokenId(id) .serialNumber(serialNumber) .build(); return get(uniqueTokenId); diff --git a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableTokenStore.java b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableTokenStore.java index c1eda609a38d..218e3bbed082 100644 --- a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableTokenStore.java +++ b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableTokenStore.java @@ -16,6 +16,7 @@ package com.hedera.node.app.service.token; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.base.Key.KeyOneOfType; import com.hedera.hapi.node.base.TokenID; @@ -48,7 +49,7 @@ record TokenMetadata( @Nullable Key pauseKey, @Nullable String symbol, boolean hasRoyaltyWithFallback, - long treasuryNum, + AccountID treasuryAccountId, int decimals) { public boolean hasAdminKey() { return adminKey != null && !adminKey.key().kind().equals(KeyOneOfType.UNSET); diff --git a/hedera-node/infrastructure/docker/containers/local-node/main-network-node/Dockerfile b/hedera-node/infrastructure/docker/containers/local-node/main-network-node/Dockerfile new file mode 100644 index 000000000000..05b2b25b656a --- /dev/null +++ b/hedera-node/infrastructure/docker/containers/local-node/main-network-node/Dockerfile @@ -0,0 +1,45 @@ +######################################################################################################################## +# +# Define Global Build Arguments +# +######################################################################################################################## +ARG IMAGE_PREFIX="gcr.io/hedera-registry/" +ARG IMAGE_TAG="latest" + +FROM ${IMAGE_PREFIX}network-node-base:${IMAGE_TAG} + +# Define Environment Variables +ENV JAVA_HEAP_MIN="" +ENV JAVA_HEAP_MAX="" +ENV JAVA_OPTS="" + +# Add SDK components +ADD sdk/data/apps/* /opt/hgcapp/services-hedera/HapiApp2.0/data/apps/ +ADD sdk/data/lib/* /opt/hgcapp/services-hedera/HapiApp2.0/data/lib/ + +# Add Diagnostic Utilities +RUN apt-get update && \ + apt-get install --yes net-tools && \ + apt-get autoremove --yes && \ + apt-get autoclean --yes && \ + apt-get clean all --yes && \ + rm -rf /var/lib/{apt,dpkg,cache,log}/ + +# Add the entrypoint script +ADD entrypoint.sh /opt/hgcapp/services-hedera/HapiApp2.0/ + +# Ensure MainNet specific folder & files exist +RUN mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0/output" && \ + touch "/opt/hgcapp/services-hedera/HapiApp2.0/stdout.log" + +# Ensure proper file permissions +RUN chmod -R +x /opt/hgcapp/services-hedera/HapiApp2.0/entrypoint.sh && \ + chown -R 2000:2000 /opt/hgcapp/services-hedera/HapiApp2.0 + +# Expose TCP/UDP Port Definitions +EXPOSE 50111/tcp 50211/tcp 50212/tcp + +# Set Final Working Directory, User, and Entrypoint +USER 2000 +WORKDIR "/opt/hgcapp" +ENTRYPOINT ["/opt/hgcapp/services-hedera/HapiApp2.0/entrypoint.sh"] diff --git a/hedera-node/infrastructure/docker/containers/local-node/main-network-node/entrypoint.sh b/hedera-node/infrastructure/docker/containers/local-node/main-network-node/entrypoint.sh new file mode 100644 index 000000000000..16a4b89810fa --- /dev/null +++ b/hedera-node/infrastructure/docker/containers/local-node/main-network-node/entrypoint.sh @@ -0,0 +1,52 @@ +#!/usr/bin/env bash + +######################################################################################################################## +# Copyright 2016-2022 Hedera Hashgraph, LLC # +# # +# Licensed under the Apache License, Version 2.0 (the "License"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +######################################################################################################################## + +set -eo pipefail + +SCRIPT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_PATH}" || exit 64 + +if [[ -z "${JAVA_OPTS}" ]]; then + JAVA_OPTS="" +fi + +JAVA_HEAP_OPTS="" + +if [[ -n "${JAVA_HEAP_MIN}" ]]; then + JAVA_HEAP_OPTS="${JAVA_HEAP_OPTS} -Xms${JAVA_HEAP_MIN}" +fi + +if [[ -n "${JAVA_HEAP_MAX}" ]]; then + JAVA_HEAP_OPTS="${JAVA_HEAP_OPTS} -Xmx${JAVA_HEAP_MAX}" +fi + +if [[ ! -d "${SCRIPT_PATH}/output" ]]; then + mkdir -p "${SCRIPT_PATH}/output" +fi + +# Ensure stdout.log exists as a file & not a directory since we are bind mounting +[[ -d "${SCRIPT_PATH}/stdout.log" ]] && rm -rf "${SCRIPT_PATH}/stdout.log" +[[ -f "${SCRIPT_PATH}/stdout.log" ]] || touch "${SCRIPT_PATH}/stdout.log" + +echo ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> BEGIN USER IDENT >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" +id +echo "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< END USER IDENT <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<" +echo + +/usr/bin/env java ${JAVA_HEAP_OPTS} ${JAVA_OPTS} -cp "data/lib/*" com.swirlds.platform.Browser > >(tee stdout.log) 2>&1 +printf "java exit code %s" "${?}\n" >> stdout.log diff --git a/hedera-node/infrastructure/docker/containers/local-node/network-node-base/Dockerfile b/hedera-node/infrastructure/docker/containers/local-node/network-node-base/Dockerfile new file mode 100644 index 000000000000..4e9ac58eb6be --- /dev/null +++ b/hedera-node/infrastructure/docker/containers/local-node/network-node-base/Dockerfile @@ -0,0 +1,149 @@ +######################################################################################################################## +# +# Define Global Build Arguments +# +######################################################################################################################## +#ARG UBUNTU_TAG="focal-20220426" +#ARG UBUNTU_TAG="focal-20220531" +#ARG UBUNTU_TAG="focal-20220801" +#ARG UBUNTU_TAG="focal-20220922" +#ARG UBUNTU_TAG="focal-20221019" +#ARG UBUNTU_TAG="focal-20221130" +ARG UBUNTU_TAG="focal-20230605" + +######################################################################################################################## +# +# Setup Builder Image +# +######################################################################################################################## +FROM ubuntu:${UBUNTU_TAG} AS openjdk-builder + +# Define Standard Environment Variables +ENV LC_ALL=C.UTF-8 +ENV DEBIAN_FRONTEND=noninteractive + +# Install basic OS utilities for building +RUN apt-get update && \ + apt-get install --yes tar gzip gnupg2 curl + +########################## +#### Java Setup #### +########################## + +RUN set -eux; \ + ARCH="$(dpkg --print-architecture)"; \ + case "${ARCH}" in \ + aarch64|arm64) \ + ESUM='0084272404b89442871e0a1f112779844090532978ad4d4191b8d03fc6adfade'; \ + BINARY_URL='https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.7%2B7/OpenJDK17U-jdk_aarch64_linux_hotspot_17.0.7_7.tar.gz'; \ + ;; \ + armhf|arm) \ + ESUM='e7a84c3e59704588510d7e6cce1f732f397b54a3b558c521912a18a1b4d0abdc'; \ + BINARY_URL='https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.7%2B7/OpenJDK17U-jdk_arm_linux_hotspot_17.0.7_7.tar.gz'; \ + ;; \ + ppc64el|powerpc:common64) \ + ESUM='8f4366ff1eddb548b1744cd82a1a56ceee60abebbcbad446bfb3ead7ac0f0f85'; \ + BINARY_URL='https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.7%2B7/OpenJDK17U-jdk_ppc64le_linux_hotspot_17.0.7_7.tar.gz'; \ + ;; \ + s390x|s390:64-bit) \ + ESUM='2d75540ae922d0c4162729267a8c741e2414881a468fd2ce4140b4069ba47ca9'; \ + BINARY_URL='https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.7%2B7/OpenJDK17U-jdk_s390x_linux_hotspot_17.0.7_7.tar.gz'; \ + ;; \ + amd64|i386:x86-64) \ + ESUM='e9458b38e97358850902c2936a1bb5f35f6cffc59da9fcd28c63eab8dbbfbc3b'; \ + BINARY_URL='https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.7%2B7/OpenJDK17U-jdk_x64_linux_hotspot_17.0.7_7.tar.gz'; \ + ;; \ + *) \ + echo "Unsupported arch: ${ARCH}"; \ + exit 1; \ + ;; \ + esac; \ + curl -LfsSo /tmp/openjdk.tar.gz ${BINARY_URL}; \ + echo "${ESUM} */tmp/openjdk.tar.gz" | sha256sum -c -; \ + mkdir -p /usr/local/java; \ + tar --extract \ + --file /tmp/openjdk.tar.gz \ + --directory "/usr/local/java" \ + --strip-components 1 \ + --no-same-owner \ + ; \ + rm -f /tmp/openjdk.tar.gz /usr/local/java/lib/src.zip; + + +######################################################################################################################## +# +# Build Final Image +# +######################################################################################################################## +FROM ubuntu:${UBUNTU_TAG} AS openjdk-base + +# Define Standard Environment Variables +ENV LC_ALL=C.UTF-8 +ENV DEBIAN_FRONTEND=noninteractive +ENV JAVA_VERSION="jdk-17.0.7+7" +ENV JAVA_HOME="/usr/local/java/" + +# Fetch Validated Java Binaries +COPY --from=openjdk-builder /usr/local/java/ /usr/local/java/ + +# Install Basic OS Requirements +RUN apt-get update && \ + apt-get install --yes --no-install-recommends tar gzip openssl zlib1g libsodium23 sudo && \ + apt-get install --yes --no-install-recommends libnetty-tcnative-jni && \ + apt-get autoremove --yes && \ + apt-get autoclean --yes && \ + apt-get clean all --yes && \ + rm -rf /var/lib/{apt,dpkg,cache,log}/ + +# Install Java Alternatives +RUN update-alternatives --install "/usr/bin/java" "java" "${JAVA_HOME}/bin/java" 1500 && \ + update-alternatives --install "/usr/bin/javac" "javac" "${JAVA_HOME}/bin/javac" 1500 && \ + update-alternatives --install "/usr/bin/javadoc" "javadoc" "${JAVA_HOME}/bin/javadoc" 1500 && \ + update-alternatives --install "/usr/bin/jshell" "jshell" "${JAVA_HOME}/bin/jshell" 1500 && \ + update-alternatives --install "/usr/bin/jstack" "jstack" "${JAVA_HOME}/bin/jstack" 1500 && \ + update-alternatives --install "/usr/bin/jps" "jps" "${JAVA_HOME}/bin/jps" 1500 && \ + update-alternatives --install "/usr/bin/jmap" "jmap" "${JAVA_HOME}/bin/jmap" 1500 + +# Create Application Folders +RUN mkdir -p "/opt/hgcapp" && \ + mkdir -p "/opt/hgcapp/accountBalances" && \ + mkdir -p "/opt/hgcapp/eventsStreams" && \ + mkdir -p "/opt/hgcapp/recordStreams" && \ + mkdir -p "/opt/hgcapp/services-hedera" && \ + mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0" && \ + mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0/data" && \ + mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0/data/apps" && \ + mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0/data/backup" && \ + mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0/data/config" && \ + mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0/data/diskFs" && \ + mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0/data/keys" && \ + mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0/data/lib" && \ + mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0/data/onboard" && \ + mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0/data/stats" && \ + mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0/data/saved" && \ + mkdir -p "/opt/hgcapp/services-hedera/HapiApp2.0/data/upgrade" + +# Configure the standard user account +RUN groupadd --gid 2000 hedera && \ + useradd --no-user-group --create-home --uid 2000 --gid 2000 --shell /bin/bash hedera && \ + chown -R hedera:hedera /opt/hgcapp + +# Configure SUDO support +RUN echo >> /etc/sudoers && \ + echo "%hedera ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers + + +# Define Volume Bindpoints +VOLUME "/opt/hgcapp/accountBalances" +VOLUME "/opt/hgcapp/eventsStreams" +VOLUME "/opt/hgcapp/recordStreams" +VOLUME "/opt/hgcapp/services-hedera/HapiApp2.0/data/config" +VOLUME "/opt/hgcapp/services-hedera/HapiApp2.0/data/diskFs" +VOLUME "/opt/hgcapp/services-hedera/HapiApp2.0/data/keys" +VOLUME "/opt/hgcapp/services-hedera/HapiApp2.0/data/onboard" +VOLUME "/opt/hgcapp/services-hedera/HapiApp2.0/data/stats" +VOLUME "/opt/hgcapp/services-hedera/HapiApp2.0/data/saved" +VOLUME "/opt/hgcapp/services-hedera/HapiApp2.0/data/upgrade" + +# Set Final Working Directory and Command/Entrypoint +WORKDIR "/opt/hgcapp" diff --git a/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/Dockerfile b/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/Dockerfile new file mode 100644 index 000000000000..3f05736ff696 --- /dev/null +++ b/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/Dockerfile @@ -0,0 +1,67 @@ +######################################################################################################################## +# +# Define Global Build Arguments +# +######################################################################################################################## +#ARG UBUNTU_TAG="focal-20220426" +#ARG UBUNTU_TAG="focal-20220531" +#ARG UBUNTU_TAG="focal-20220801" +#ARG UBUNTU_TAG="focal-20220922" +#ARG UBUNTU_TAG="focal-20221019" +#ARG UBUNTU_TAG="focal-20221130" +ARG UBUNTU_TAG="focal-20230605" + +ARG HAVEGED_VERSION="1.9.1-6ubuntu1" + +######################################################################################################################## +# +# Setup Builder Image +# +######################################################################################################################## +FROM ubuntu:${UBUNTU_TAG} AS haveged-builder + +# Define Global Argument Refs +ARG HAVEGED_VERSION + +# Import Checksum Files from the Build Context +ADD checksums/* /tmp/checksums/ + +# Install basic OS utilities for building +RUN apt-get update && \ + apt-get install -y tar gzip + +# Download PostgreSQL deb archive +WORKDIR "/tmp/deb" +RUN apt-get update && \ + apt-get download --yes haveged=${HAVEGED_VERSION} libhavege1=${HAVEGED_VERSION} && \ + apt-get clean all + +# Validate Haveged deb archive +RUN sha256sum -c /tmp/checksums/haveged_${HAVEGED_VERSION}_$(dpkg --print-architecture).deb.sha256 && \ + sha256sum -c /tmp/checksums/libhavege1_${HAVEGED_VERSION}_$(dpkg --print-architecture).deb.sha256 + +######################################################################################################################## +# +# Build Final Image +# +######################################################################################################################## +FROM ubuntu:${UBUNTU_TAG} AS network-node-haveged + +# Define Standard Environment Variables +ENV LC_ALL=C.UTF-8 +ENV DEBIAN_FRONTEND=noninteractive + +# Fetch Validated Debian Packages +COPY --from=haveged-builder /tmp/deb/*.deb /tmp/deb/ + +# Install Validated Local Debian Packages +RUN apt-get --allow-unauthenticated install --yes /tmp/deb/*.deb && \ + apt-get autoremove --yes && \ + apt-get autoclean --yes && \ + apt-get clean all --yes && \ + rm -rf /tmp/deb && \ + rm -rf /var/lib/{apt,dpkg,cache,log}/ + +# Set Final Working Directory and Command/Entrypoint +WORKDIR "/" +ENTRYPOINT ["/usr/bin/env", "haveged", "-v", "3", "-F"] diff --git a/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/checksums/haveged_1.9.1-6ubuntu1_amd64.deb.sha256 b/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/checksums/haveged_1.9.1-6ubuntu1_amd64.deb.sha256 new file mode 100644 index 000000000000..afee61a883ea --- /dev/null +++ b/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/checksums/haveged_1.9.1-6ubuntu1_amd64.deb.sha256 @@ -0,0 +1 @@ +3526952885ee343a4874a4426fabafd203ee890167ae46a3ca506829250178a7 haveged_1.9.1-6ubuntu1_amd64.deb diff --git a/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/checksums/haveged_1.9.1-6ubuntu1_arm64.deb.sha256 b/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/checksums/haveged_1.9.1-6ubuntu1_arm64.deb.sha256 new file mode 100644 index 000000000000..58e984f1b185 --- /dev/null +++ b/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/checksums/haveged_1.9.1-6ubuntu1_arm64.deb.sha256 @@ -0,0 +1 @@ +27c94952983d14bdda5fbb5650885c7cbf7ccf3292b7dac7adf00efadd240ef3 haveged_1.9.1-6ubuntu1_arm64.deb diff --git a/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/checksums/libhavege1_1.9.1-6ubuntu1_amd64.deb.sha256 b/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/checksums/libhavege1_1.9.1-6ubuntu1_amd64.deb.sha256 new file mode 100644 index 000000000000..f60405a58d34 --- /dev/null +++ b/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/checksums/libhavege1_1.9.1-6ubuntu1_amd64.deb.sha256 @@ -0,0 +1 @@ +1ee05009f3646ded6a620e9a280739aea4a2d1c27a4e98c42ef9ab5598b78b2b libhavege1_1.9.1-6ubuntu1_amd64.deb diff --git a/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/checksums/libhavege1_1.9.1-6ubuntu1_arm64.deb.sha256 b/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/checksums/libhavege1_1.9.1-6ubuntu1_arm64.deb.sha256 new file mode 100644 index 000000000000..ce8601dc265c --- /dev/null +++ b/hedera-node/infrastructure/docker/containers/local-node/network-node-haveged/checksums/libhavege1_1.9.1-6ubuntu1_arm64.deb.sha256 @@ -0,0 +1 @@ +fe474cb31f53deaf111e212e3fd9f9ef7e0f6cda13251b99d635f41a0428ab6b libhavege1_1.9.1-6ubuntu1_arm64.deb diff --git a/hedera-node/test-clients/build.gradle.kts b/hedera-node/test-clients/build.gradle.kts index aabf770f9df6..312f7d860c05 100644 --- a/hedera-node/test-clients/build.gradle.kts +++ b/hedera-node/test-clients/build.gradle.kts @@ -33,6 +33,8 @@ tasks.test { exclude("**/*") } +tasks.itest { systemProperty("itests", System.getProperty("itests")) } + configurations { evaluationDependsOn(":app-hapi-fees") } sourceSets { @@ -82,10 +84,16 @@ dependencies { itestImplementation(project(path)) itestImplementation(project(":hapi")) - itestImplementation(gav("org.apache.commons.lang3")) + itestImplementation(project(":app")) + itestImplementation(project(":config")) itestImplementation(gav("org.junit.jupiter.api")) itestImplementation(gav("org.testcontainers")) itestImplementation(gav("org.testcontainers.junit.jupiter")) + itestImplementation(gav("org.apache.commons.lang3")) + itestImplementation(gav("org.apache.logging.log4j.core")) + itestImplementation(gav("org.apache.logging.log4j.jul")) + itestImplementation(gav("com.swirlds.platform.core")) + itestImplementation(gav("com.github.spotbugs.annotations")) eetImplementation(project(path)) eetImplementation(gav("org.junit.jupiter.api")) diff --git a/hedera-node/test-clients/src/itest/java/AllIntegrationTests.java b/hedera-node/test-clients/src/itest/java/AllIntegrationTests.java index 37bd3258d6d8..ab779734d369 100644 --- a/hedera-node/test-clients/src/itest/java/AllIntegrationTests.java +++ b/hedera-node/test-clients/src/itest/java/AllIntegrationTests.java @@ -45,7 +45,7 @@ */ @TestMethodOrder(MethodOrderer.OrderAnnotation.class) @SuppressWarnings("java:S2699") -class AllIntegrationTests extends IntegrationTestBase { +class AllIntegrationTests extends DockerIntegrationTestBase { private static final String TEST_CONTAINER_NODE0_STREAMS = "build/network/itest/records/node_0"; @Tag("integration") diff --git a/hedera-node/test-clients/src/itest/java/IntegrationTestBase.java b/hedera-node/test-clients/src/itest/java/DockerIntegrationTestBase.java similarity index 97% rename from hedera-node/test-clients/src/itest/java/IntegrationTestBase.java rename to hedera-node/test-clients/src/itest/java/DockerIntegrationTestBase.java index 9e76ca71e257..88e34c32b285 100644 --- a/hedera-node/test-clients/src/itest/java/IntegrationTestBase.java +++ b/hedera-node/test-clients/src/itest/java/DockerIntegrationTestBase.java @@ -31,7 +31,7 @@ /** Base class for integration tests */ @Testcontainers -public abstract class IntegrationTestBase extends TestBase { +public abstract class DockerIntegrationTestBase extends TestBase { private static final File WORKSPACE = new File(System.getProperty("networkWorkspaceDir")); private static final DockerImageName IMAGE = DockerImageName.parse(System.getProperty("TAG")); diff --git a/hedera-node/test-clients/src/itest/java/InProcessIntegrationTestBase.java b/hedera-node/test-clients/src/itest/java/InProcessIntegrationTestBase.java new file mode 100644 index 000000000000..11b940a5a810 --- /dev/null +++ b/hedera-node/test-clients/src/itest/java/InProcessIntegrationTestBase.java @@ -0,0 +1,235 @@ +/* + * Copyright (C) 2022-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import static java.util.Objects.requireNonNull; + +import com.hedera.node.app.Hedera; +import com.hedera.services.bdd.junit.TestBase; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.props.JutilPropertySource; +import com.swirlds.common.config.BasicConfig; +import com.swirlds.common.config.ConsensusConfig; +import com.swirlds.common.config.OSHealthCheckConfig; +import com.swirlds.common.config.StateConfig; +import com.swirlds.common.config.WiringConfig; +import com.swirlds.common.config.singleton.ConfigurationHolder; +import com.swirlds.common.constructable.ConstructableRegistry; +import com.swirlds.common.context.DefaultPlatformContext; +import com.swirlds.common.crypto.CryptographyHolder; +import com.swirlds.common.crypto.config.CryptoConfig; +import com.swirlds.common.io.config.TemporaryFileConfig; +import com.swirlds.common.merkle.synchronization.config.ReconnectConfig; +import com.swirlds.common.metrics.Metrics; +import com.swirlds.common.metrics.config.MetricsConfig; +import com.swirlds.common.metrics.platform.DefaultMetricsProvider; +import com.swirlds.common.metrics.platform.prometheus.PrometheusConfig; +import com.swirlds.common.system.BasicSoftwareVersion; +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.SwirldState; +import com.swirlds.common.system.address.Address; +import com.swirlds.common.system.address.AddressBook; +import com.swirlds.config.api.spi.ConfigurationBuilderFactory; +import com.swirlds.fchashmap.config.FCHashMapConfig; +import com.swirlds.jasperdb.config.JasperDbConfig; +import com.swirlds.platform.Crypto; +import com.swirlds.platform.CryptoMetrics; +import com.swirlds.platform.Settings; +import com.swirlds.platform.SwirldsPlatform; +import com.swirlds.platform.config.AddressBookConfig; +import com.swirlds.platform.config.ThreadConfig; +import com.swirlds.platform.crypto.CryptoStatic; +import com.swirlds.platform.dispatch.DispatchConfiguration; +import com.swirlds.platform.gossip.chatter.config.ChatterConfig; +import com.swirlds.platform.gossip.sync.config.SyncConfig; +import com.swirlds.platform.recovery.EmergencyRecoveryManager; +import com.swirlds.platform.state.signed.ReservedSignedState; +import com.swirlds.platform.uptime.UptimeConfig; +import com.swirlds.virtualmap.config.VirtualMapConfig; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.net.InetSocketAddress; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; +import java.util.ServiceLoader; +import java.util.concurrent.Executors; +import java.util.function.Supplier; +import org.apache.logging.log4j.core.config.ConfigurationSource; +import org.apache.logging.log4j.core.config.Configurator; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.io.TempDir; + +/** + * Base class for integration tests. Not currently used. Once the e2e tests are all passing, we'll switch + * {@link AllIntegrationTests} to use this instead of `DockerIntegrationTestBase` -- most likely. + */ +public abstract class InProcessIntegrationTestBase extends TestBase { + static { + System.setProperty("java.util.logging.manager", "org.apache.logging.log4j.jul.LogManager"); + } + + static Hedera hedera; + + /** Create and start a node, and configure the HapiSpec to use that node. */ + @BeforeAll + static void beforeAll(@TempDir @NonNull final Path tmpDir) throws Exception { + // Setup logging + try (final var log4j2ConfigFile = InProcessIntegrationTestBase.class.getResourceAsStream("/log4j2.xml")) { + final var source = new ConfigurationSource(requireNonNull(log4j2ConfigFile)); + try (final var ignored = Configurator.initialize(null, source)) { + // Nothing to do here. + } + } + + // Setup the constructable registry so the platform can deserialize the state. + final var registry = ConstructableRegistry.getInstance(); + registry.reset(); + registry.registerConstructables("com.swirlds.merklemap"); + registry.registerConstructables("com.swirlds.jasperdb"); + registry.registerConstructables("com.swirlds.fcqueue"); + registry.registerConstructables("com.swirlds.virtualmap"); + registry.registerConstructables("com.swirlds.common.merkle"); + registry.registerConstructables("com.swirlds.common"); + registry.registerConstructables("com.swirlds.merkle"); + registry.registerConstructables("com.swirlds.merkle.tree"); + + // 1. Create a configuration instance with any desired overrides. + final var factory = ServiceLoader.load(ConfigurationBuilderFactory.class); + final var configBuilder = factory.findFirst().orElseThrow().create(); + final var config = configBuilder + .withConfigDataType(BasicConfig.class) + .withConfigDataType(StateConfig.class) + .withConfigDataType(CryptoConfig.class) + .withConfigDataType(TemporaryFileConfig.class) + .withConfigDataType(ReconnectConfig.class) + .withConfigDataType(FCHashMapConfig.class) + .withConfigDataType(JasperDbConfig.class) + .withConfigDataType(ChatterConfig.class) + .withConfigDataType(AddressBookConfig.class) + .withConfigDataType(VirtualMapConfig.class) + .withConfigDataType(ConsensusConfig.class) + .withConfigDataType(ThreadConfig.class) + .withConfigDataType(DispatchConfiguration.class) + .withConfigDataType(MetricsConfig.class) + .withConfigDataType(PrometheusConfig.class) + .withConfigDataType(OSHealthCheckConfig.class) + .withConfigDataType(WiringConfig.class) + .withConfigDataType(SyncConfig.class) + .withConfigDataType(UptimeConfig.class) + .build(); + + ConfigurationHolder.getInstance().setConfiguration(config); + CryptographyHolder.reset(); + + final var port = new InetSocketAddress(0).getPort(); + + System.setProperty("version.services", "0.40.0"); // TBD Get from actual build args... + System.setProperty("version.hapi", "0.40.0"); // TBD Get from actual build args... + System.setProperty( + "hedera.recordStream.logDir", tmpDir.resolve("recordStream").toString()); + System.setProperty("accounts.storeOnDisk", "true"); + System.setProperty("grpc.port", "0"); + System.setProperty("grpc.tlsPort", "0"); + System.setProperty("grpc.workflowsPort", "0"); + System.setProperty("grpc.workflowsTlsPort", "0"); + System.setProperty("hedera.workflows.enabled", "CryptoCreate"); + + // 2. Configure Settings + final var settingsPath = Files.createFile(tmpDir.resolve("settings.txt")); + Settings.getInstance().loadSettings(settingsPath); + + // 3. Create a new Node ID for our node + final var nodeId = new NodeId(0); + + // 4. Set up Metrics + final var metricsProvider = new DefaultMetricsProvider(config); + final Metrics globalMetrics = metricsProvider.createGlobalMetrics(); + CryptoMetrics.registerMetrics(globalMetrics); + + // 5. Create the Platform Context + final var platformContext = new DefaultPlatformContext( + config, metricsProvider.createPlatformMetrics(nodeId), CryptographyHolder.get()); + + // 6. Create an Address Book + final var addressBook = new AddressBook(); + addressBook.add(new Address( + nodeId, + "TEST0", + "TEST0", + 1, + true, + new byte[] {127, 0, 0, 1}, + port, + new byte[] {127, 0, 0, 1}, + port, + "0.0.3")); + + // 7. Setup some cryptography + // final var crypto = CryptoSetup.initNodeSecurity(addressBook, config)[0]; + final var keysAndCertsForAllNodes = + CryptoStatic.generateKeysAndCerts(addressBook, Executors.newFixedThreadPool(4)); + final var crypto = new Crypto(keysAndCertsForAllNodes.get(nodeId), Executors.newFixedThreadPool(4)); + CryptographyHolder.get().digestSync(addressBook); + + // 8. Create the Main + hedera = new Hedera(registry); + + // 9. Create a SwirldsPlatform (using nasty reflection. Eek). + final var constructor = SwirldsPlatform.class.getDeclaredConstructors()[0]; + constructor.setAccessible(true); + // @NonNull Supplier genesisStateBuilder, @NonNull ReservedSignedState loadedSignedState, @NonNull + // EmergencyRecoveryManager emergencyRecoveryManager + final SwirldsPlatform platform = (SwirldsPlatform) constructor.newInstance( + platformContext, + crypto, + addressBook, + nodeId, + "Hedera", + "Hedera", + new BasicSoftwareVersion( + Long.MAX_VALUE), // TBD: Use the same as what we are passing to the services version + (Supplier) hedera::newState, + ReservedSignedState.createNullReservation(), + new EmergencyRecoveryManager( + (s, exitCode) -> { + System.out.println("Asked to shutdownGrpcServer because of " + s); + System.exit(exitCode.getExitCode()); + }, + tmpDir.resolve("recovery"))); + + // 10. Init and Start + hedera.init(platform, nodeId); + platform.start(); + + // 11. Initialize the HAPI Spec system + final var defaultProperties = JutilPropertySource.getDefaultInstance(); + HapiSpec.runInCiMode( + String.valueOf(hedera.getGrpcPort()), + defaultProperties.get("default.payer"), + defaultProperties.get("default.node").split("\\.")[2], + defaultProperties.get("tls"), + defaultProperties.get("txn.proto.structure"), + defaultProperties.get("node.selector"), + Map.of("recordStream.path", tmpDir.resolve("recordStream").toString())); + } + + @AfterAll + static void afterAll() { + if (hedera != null) { + hedera.shutdownGrpcServer(); + } + } +} diff --git a/hedera-node/test-clients/src/itest/resources/log4j2.xml b/hedera-node/test-clients/src/itest/resources/log4j2.xml index c17be785ffe3..5a60f03f8e53 100644 --- a/hedera-node/test-clients/src/itest/resources/log4j2.xml +++ b/hedera-node/test-clients/src/itest/resources/log4j2.xml @@ -5,51 +5,22 @@ - - - - %d{yyyy-MM-dd HH:mm:ss.SSS} %-5p %-4L %c{1} - %m{nolookups}%n - - - - - - - - - - - %d{yyyy-MM-dd HH:mm:ss.SSS} %-5p %-4L %c{1} - %m{nolookups}%n - - - - - - - - - - %d{yyyy-MM-dd HH:mm:ss.SSS} %-8sn %-5p %-16marker <%t> %c{1}: %msg{nolookups}%n - - - - - - - - + - - - - + + + + + + + + + + \ No newline at end of file diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/TestBase.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/TestBase.java index ab5935d719cc..0714dc390a2c 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/TestBase.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/TestBase.java @@ -196,6 +196,17 @@ private static Stream suffixContextualizedSpecsFromConcurrent( * @return */ protected final DynamicContainer extractSpecsFromSuite(final Supplier suiteSupplier) { + return extractSpecsFromSuite(suiteSupplier, ".*"); + } + + /** + * Utility that creates a DynamicTest for each HapiApiSpec in the given suite. + * + * @param suiteSupplier + * @return + */ + protected final DynamicContainer extractSpecsFromSuite( + final Supplier suiteSupplier, final String filter) { final var suite = suiteSupplier.get(); final var tests = suite.getSpecsInSuiteWithOverrides().stream() .map(s -> dynamicTest(s.getName(), () -> { @@ -211,7 +222,8 @@ protected final DynamicContainer extractSpecsFromSuite(final Supplier + s.getName() + "}: " + s.getCause()); - })); + })) + .filter(t -> t.getDisplayName().matches(filter)); return dynamicContainer(suite.getClass().getSimpleName(), tests); } diff --git a/hedera-platform/build.gradle.kts b/hedera-platform/build.gradle.kts index 54b90bcf18e4..35bf651dbb35 100644 --- a/hedera-platform/build.gradle.kts +++ b/hedera-platform/build.gradle.kts @@ -90,6 +90,7 @@ dependencies.constraints { api(gav("org.apache.commons.lang3", "3.12.0")) api(gav("org.apache.logging.log4j", log4jVersion)) api(gav("org.apache.logging.log4j.core", log4jVersion)) + api(gav("org.apache.logging.log4j.jul", log4jVersion)) api(gav("org.assertj.core", "3.23.1")) api(gav("org.bouncycastle.pkix", bouncycastleVersion)) api(gav("org.bouncycastle.provider", bouncycastleVersion)) diff --git a/platform-sdk/platform-apps/demos/HelloSwirldDemo/src/main/java/com/swirlds/demo/hello/HelloSwirldDemoMain.java b/platform-sdk/platform-apps/demos/HelloSwirldDemo/src/main/java/com/swirlds/demo/hello/HelloSwirldDemoMain.java index 98fe42a80861..8ef9ec7721ee 100644 --- a/platform-sdk/platform-apps/demos/HelloSwirldDemo/src/main/java/com/swirlds/demo/hello/HelloSwirldDemoMain.java +++ b/platform-sdk/platform-apps/demos/HelloSwirldDemo/src/main/java/com/swirlds/demo/hello/HelloSwirldDemoMain.java @@ -52,7 +52,7 @@ public class HelloSwirldDemoMain implements SwirldMain { /** the platform running this app */ public SwirldsPlatform platform; /** ID number for this member */ - public long selfId; + public NodeId selfId; /** a console window for text output */ public Console console; /** sleep this many milliseconds after each sync */ @@ -78,7 +78,7 @@ public void init(final Platform platform, final NodeId id) { platform.getNotificationEngine().register(PlatformStatusChangeListener.class, this::platformStatusChange); this.platform = (SwirldsPlatform) platform; - this.selfId = id.id(); + this.selfId = id; this.console = createConsole(platform, true); // create the window, make it visible GuiPlatformAccessor.getInstance().setAbout(platform.getSelfId(), "Hello Swirld v. 1.0\n"); } diff --git a/platform-sdk/platform-apps/demos/StatsDemo/src/main/java/com/swirlds/demo/stats/StatsDemoMain.java b/platform-sdk/platform-apps/demos/StatsDemo/src/main/java/com/swirlds/demo/stats/StatsDemoMain.java index 422821add58a..b51bf61e2e45 100644 --- a/platform-sdk/platform-apps/demos/StatsDemo/src/main/java/com/swirlds/demo/stats/StatsDemoMain.java +++ b/platform-sdk/platform-apps/demos/StatsDemo/src/main/java/com/swirlds/demo/stats/StatsDemoMain.java @@ -75,7 +75,7 @@ public class StatsDemoMain implements SwirldMain { /** path and filename of the .csv file to write to */ private String path; /** ID number for this member */ - private long selfId; + private NodeId selfId; /** the app is run by this */ private Platform platform; /** a console window for text output */ @@ -209,7 +209,7 @@ private synchronized void generateTransactions() { public void init(final Platform platform, final NodeId id) { this.platform = platform; - selfId = id.id(); + selfId = id; // parse the config.txt parameters, and allow optional _ as in 1_000_000 final String[] parameters = ParameterProvider.getInstance().getParameters(); headless = (parameters[0].equals("1")); diff --git a/platform-sdk/platform-apps/tests/AddressBookTestingTool/src/main/java/com/swirlds/demo/addressbook/AddressBookTestingToolState.java b/platform-sdk/platform-apps/tests/AddressBookTestingTool/src/main/java/com/swirlds/demo/addressbook/AddressBookTestingToolState.java index 82f28a24af91..8e5827c7da45 100644 --- a/platform-sdk/platform-apps/tests/AddressBookTestingTool/src/main/java/com/swirlds/demo/addressbook/AddressBookTestingToolState.java +++ b/platform-sdk/platform-apps/tests/AddressBookTestingTool/src/main/java/com/swirlds/demo/addressbook/AddressBookTestingToolState.java @@ -42,6 +42,7 @@ import com.swirlds.common.merkle.MerkleLeaf; import com.swirlds.common.merkle.impl.PartialMerkleLeaf; import com.swirlds.common.system.InitTrigger; +import com.swirlds.common.system.NodeId; import com.swirlds.common.system.Platform; import com.swirlds.common.system.Round; import com.swirlds.common.system.SoftwareVersion; @@ -92,7 +93,7 @@ private static class ClassVersion { private static final long CLASS_ID = 0xf052378c7364ef47L; - private long selfId; + private NodeId selfId; /** false until the test scenario has been validated, true afterwards. */ private final AtomicBoolean validationPerformed = new AtomicBoolean(false); @@ -153,7 +154,7 @@ public void init( logger.info(STARTUP.getMarker(), "init called in State."); throwIfImmutable(); - this.selfId = platform.getSelfId().id(); + this.selfId = platform.getSelfId(); } /** diff --git a/platform-sdk/platform-apps/tests/MigrationTestingTool/src/main/java/com/swirlds/demo/migration/MigrationTestingToolState.java b/platform-sdk/platform-apps/tests/MigrationTestingTool/src/main/java/com/swirlds/demo/migration/MigrationTestingToolState.java index 307b0437a2db..2321943d1907 100644 --- a/platform-sdk/platform-apps/tests/MigrationTestingTool/src/main/java/com/swirlds/demo/migration/MigrationTestingToolState.java +++ b/platform-sdk/platform-apps/tests/MigrationTestingTool/src/main/java/com/swirlds/demo/migration/MigrationTestingToolState.java @@ -24,6 +24,7 @@ import com.swirlds.common.merkle.MerkleNode; import com.swirlds.common.merkle.impl.PartialNaryMerkleInternal; import com.swirlds.common.system.InitTrigger; +import com.swirlds.common.system.NodeId; import com.swirlds.common.system.Platform; import com.swirlds.common.system.Round; import com.swirlds.common.system.SoftwareVersion; @@ -92,7 +93,7 @@ private static class ChildIndices { public static final int CHILD_COUNT = 3; } - public long selfId; + public NodeId selfId; public MigrationTestingToolState() { super(ChildIndices.CHILD_COUNT); @@ -219,7 +220,7 @@ private void genesisInit(final Platform platform) { .preferDiskBasedIndexes(false); setVirtualMap(new VirtualMap<>("virtualMap", jasperDbBuilder)); - selfId = platform.getSelfId().id(); + selfId = platform.getSelfId(); } /** @@ -240,7 +241,7 @@ public void init( if (virtualMap != null) { logger.info(MARKER, "VirtualMap initialized with {} values", virtualMap.size()); } - selfId = platform.getSelfId().id(); + selfId = platform.getSelfId(); if (trigger == InitTrigger.GENESIS) { logger.error(MARKER, "InitTrigger was {} when expecting RESTART or RECONNECT", trigger); diff --git a/platform-sdk/platform-apps/tests/StatsSigningTestingTool/build.gradle.kts b/platform-sdk/platform-apps/tests/StatsSigningTestingTool/build.gradle.kts index 30d0fabd2f76..e8ad4e1b6863 100644 --- a/platform-sdk/platform-apps/tests/StatsSigningTestingTool/build.gradle.kts +++ b/platform-sdk/platform-apps/tests/StatsSigningTestingTool/build.gradle.kts @@ -15,11 +15,12 @@ */ plugins { - id("com.swirlds.platform.conventions") - id("com.swirlds.platform.application") + id("com.swirlds.platform.conventions") + id("com.swirlds.platform.application") } dependencies { - // Individual Dependencies - implementation(project(":swirlds-platform-core")) + // Individual Dependencies + implementation(project(":swirlds-platform-core")) + compileOnly(libs.spotbugs.annotations) } diff --git a/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/com/swirlds/demo/stats/signing/StatsSigningTestingToolMain.java b/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/com/swirlds/demo/stats/signing/StatsSigningTestingToolMain.java index 441f745fdee6..09eeef41b882 100644 --- a/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/com/swirlds/demo/stats/signing/StatsSigningTestingToolMain.java +++ b/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/com/swirlds/demo/stats/signing/StatsSigningTestingToolMain.java @@ -87,10 +87,6 @@ public class StatsSigningTestingToolMain implements SwirldMain { * the size of the signed transaction pool */ private int signedTransPoolSize = 1024; - /** - * ID number for this member - */ - private long selfId; /** * the app is run by this */ @@ -152,9 +148,7 @@ public StatsSigningTestingToolMain() { @Override public void init(final Platform platform, final NodeId id) { - this.platform = platform; - selfId = id.id(); // parse the config.txt parameters, and allow optional _ as in 1_000_000 final String[] parameters = ParameterProvider.getInstance().getParameters(); headless = (parameters[0].equals("1")); @@ -183,7 +177,7 @@ public void init(final Platform platform, final NodeId id) { + " such as the number of transactions per second."); transactionPool = new TransactionPool( - platform.getSelfId().id(), + platform.getSelfId(), signedTransPoolSize, bytesPerTrans, true, @@ -282,7 +276,7 @@ private synchronized void generateTransactions() { @Override public SwirldState newState() { - return new StatsSigningTestingToolState(selfId, () -> transactionPool); + return new StatsSigningTestingToolState(() -> transactionPool); } /** diff --git a/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/com/swirlds/demo/stats/signing/StatsSigningTestingToolState.java b/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/com/swirlds/demo/stats/signing/StatsSigningTestingToolState.java index c8689bf8f12f..449b3ad6288d 100644 --- a/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/com/swirlds/demo/stats/signing/StatsSigningTestingToolState.java +++ b/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/com/swirlds/demo/stats/signing/StatsSigningTestingToolState.java @@ -43,8 +43,10 @@ import com.swirlds.common.system.events.Event; import com.swirlds.common.system.transaction.ConsensusTransaction; import com.swirlds.common.system.transaction.Transaction; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.util.Arrays; +import java.util.Objects; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.function.Supplier; @@ -71,8 +73,6 @@ public class StatsSigningTestingToolState extends PartialMerkleLeaf implements S /** A running sum of transaction contents */ private long runningSum = 0; - private final long selfId; - /** if true, artificially take {@link #HANDLE_MICROS} to handle each consensus transaction */ private static final boolean SYNTHETIC_HANDLE_TIME = false; @@ -80,17 +80,15 @@ public class StatsSigningTestingToolState extends PartialMerkleLeaf implements S private static final int HANDLE_MICROS = 100; public StatsSigningTestingToolState() { - this(0L, () -> null); + this(() -> null); } - public StatsSigningTestingToolState(final long selfId, final Supplier transactionPoolSupplier) { - this.selfId = selfId; - this.transactionPoolSupplier = transactionPoolSupplier; + public StatsSigningTestingToolState(@NonNull final Supplier transactionPoolSupplier) { + this.transactionPoolSupplier = Objects.requireNonNull(transactionPoolSupplier); } - private StatsSigningTestingToolState(final long selfId, final StatsSigningTestingToolState sourceState) { + private StatsSigningTestingToolState(@NonNull final StatsSigningTestingToolState sourceState) { super(sourceState); - this.selfId = selfId; this.transactionPoolSupplier = sourceState.transactionPoolSupplier; setImmutable(false); sourceState.setImmutable(true); @@ -102,7 +100,7 @@ private StatsSigningTestingToolState(final long selfId, final StatsSigningTestin @Override public synchronized StatsSigningTestingToolState copy() { throwIfImmutable(); - return new StatsSigningTestingToolState(selfId, this); + return new StatsSigningTestingToolState(this); } /** @@ -181,8 +179,7 @@ private boolean validateSignature(final TransactionSignature signature, final Tr } catch (final InterruptedException e) { logger.info( TESTING_EXCEPTIONS_ACCEPTABLE_RECONNECT.getMarker(), - "handleTransaction Interrupted [ nodeId = {} ]. This should happen only during a reconnect", - selfId); + "handleTransaction Interrupted. This should happen only during a reconnect"); Thread.currentThread().interrupt(); } catch (final ExecutionException e) { logger.error( diff --git a/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/com/swirlds/demo/stats/signing/TransactionPool.java b/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/com/swirlds/demo/stats/signing/TransactionPool.java index a67797e14b7c..ace355769021 100644 --- a/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/com/swirlds/demo/stats/signing/TransactionPool.java +++ b/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/com/swirlds/demo/stats/signing/TransactionPool.java @@ -20,16 +20,20 @@ import static com.swirlds.logging.LogMarker.ADV_CRYPTO_SYSTEM; import static com.swirlds.logging.LogMarker.EXCEPTION; +import com.swirlds.common.system.NodeId; import com.swirlds.common.system.transaction.Transaction; import com.swirlds.demo.stats.signing.algorithms.ExtendedSignature; import com.swirlds.demo.stats.signing.algorithms.SigningAlgorithm; import com.swirlds.demo.stats.signing.algorithms.X25519SigningAlgorithm; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.security.SignatureException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Random; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -68,7 +72,7 @@ final class TransactionPool { private final Map activeAlgorithms; - private final long nodeId; + private final NodeId nodeId; /** * Constructs a TransactionPool instance with a fixed pool size, fixed transaction size, and whether to pre-sign @@ -85,11 +89,11 @@ final class TransactionPool { * if the {@code poolSize} or the {@code transactionSize} parameters are less than one (1) */ TransactionPool( - final long nodeId, + @NonNull final NodeId nodeId, final int poolSize, final int transactionSize, final boolean signed, - final SigningAlgorithm... enabledAlgorithms) { + @Nullable final SigningAlgorithm... enabledAlgorithms) { if (poolSize < 1) { throw new IllegalArgumentException("poolSize"); } @@ -101,7 +105,7 @@ final class TransactionPool { this.random = new Random(); this.signed = signed; - this.nodeId = nodeId; + this.nodeId = Objects.requireNonNull(nodeId); this.transactionSize = transactionSize; this.transactions = new byte[poolSize][]; this.enabledAlgorithms = new ArrayList<>(); @@ -143,7 +147,7 @@ private void init() { final List algorithms = new ArrayList<>(this.activeAlgorithms.values()); int lastChosenAlg = 0; - long transactionId = nodeId * transactions.length; + long transactionId = nodeId.id() * transactions.length; for (int i = 0; i < transactions.length; i++) { final byte[] data = new byte[transactionSize]; diff --git a/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/module-info.java b/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/module-info.java index c5c9d0887d5f..3a8b294f6e7b 100644 --- a/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/module-info.java +++ b/platform-sdk/platform-apps/tests/StatsSigningTestingTool/src/main/java/module-info.java @@ -6,4 +6,5 @@ requires org.bouncycastle.provider; requires org.bouncycastle.pkix; requires org.apache.logging.log4j; + requires static com.github.spotbugs.annotations; } diff --git a/platform-sdk/sdk/settings.txt b/platform-sdk/sdk/settings.txt index 2454ed2a3cc2..244ff781f829 100644 --- a/platform-sdk/sdk/settings.txt +++ b/platform-sdk/sdk/settings.txt @@ -11,51 +11,54 @@ # Gossip / Networking # ############################# -chatter.useChatter, false -doUpnp, false -maxOutgoingSyncs, 1 -sync.syncAsProtocolEnabled, true -sync.syncProtocolPermitCount, 17 -useLoopbackIp, false -freezeSecondsAfterStartup, 0 +chatter.useChatter, false +socket.doUpnp, false +socket.useLoopbackIp, false +sync.maxOutgoingSyncs, 1 +sync.syncAsProtocolEnabled, true +sync.syncProtocolPermitCount, 2 ############################# - # Preconsensus Event Stream # + # Event Creation # ############################# -event.preconsensus.enableReplay, false -event.preconsensus.enableStorage, false +freezeSecondsAfterStartup, 0 +event.creation.maxCreationRate, 5 +event.creation.useTipsetAlgorithm, true ############################# - # Consensus Event Stream # + # Event Stream # ############################# -enableEventStreaming, false +event.enableEventStreaming, false +event.preconsensus.enableReplay, false +event.preconsensus.enableStorage, false + ############################# # Reconnect # ############################# -reconnect.active, true -state.saveReconnectStateToDisk, false +reconnect.active, true +state.saveReconnectStateToDisk, false ############################# # Metrics # ############################# -csvFileName, PlatformTesting -showInternalStats, true +metrics.csvFileName, PlatformTesting +showInternalStats, true ############################# # State # ############################# -checkSignedStateFromDisk, true -state.saveStatePeriod, 0 + +state.checkSignedStateFromDisk, true +state.saveStatePeriod, 0 ############################# # Misc. # ############################# -loadKeysFromPfxFiles, false - +loadKeysFromPfxFiles, false \ No newline at end of file diff --git a/platform-sdk/swirlds-cli/color-logs.py b/platform-sdk/swirlds-cli/color-logs.py index 96600d53e582..c9aa0c2700fe 100755 --- a/platform-sdk/swirlds-cli/color-logs.py +++ b/platform-sdk/swirlds-cli/color-logs.py @@ -130,6 +130,8 @@ def format(line): format_class_name(class_name) + \ remainder + "\n" -for line in stdin: - print(format(line), end='') - +try: + for line in stdin: + print(format(line), end='') +except KeyboardInterrupt: + pass diff --git a/platform-sdk/swirlds-cli/pcli.sh b/platform-sdk/swirlds-cli/pcli.sh index dddba2204af5..34b930bbb2de 100755 --- a/platform-sdk/swirlds-cli/pcli.sh +++ b/platform-sdk/swirlds-cli/pcli.sh @@ -65,14 +65,25 @@ SCRIPT_PATH="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 || exit ; pwd -P )" # The entrypoint into the platform CLI (i.e. where the main() method is) MAIN_CLASS_NAME='com.swirlds.cli.PlatformCli' +PYTHON_INSTALLED=true +python3 --version >/dev/null 2>&1 || PYTHON_INSTALLED=false + COLOR=false COLOR_LOGS_PATH="${SCRIPT_PATH}/color-logs.py" if [[ -e "$COLOR_LOGS_PATH" ]]; then - # The color-logs.py script exists, enable color. - COLOR=true + if [[ "$PYTHON_INSTALLED" = true ]]; then + # The color-logs.py script exists and can be executed, enable color. + COLOR=true + fi +fi - # If python is not installed then we can't use the script. - python3 --version >/dev/null 2>&1 || COLOR=false +SQUELCH_SPAM=false +SQUELCH_SPAM_PATH="${SCRIPT_PATH}/squelch-spam.py" +if [[ -e "$SQUELCH_SPAM_PATH" ]]; then + if [[ "$PYTHON_INSTALLED" = true ]]; then + # The squelch-spam.py script exists and can be executed, enable squelching. + SQUELCH_SPAM=true + fi fi # Iterate over arguments and strip out the classpath arguments and JVM arguments. @@ -159,8 +170,20 @@ if [[ "$JVM_CLASSPATH" = '' ]]; then exit 1 fi +run () { + java "${JVM_ARGS[@]}" -cp "${JVM_CLASSPATH}" $MAIN_CLASS_NAME "${PROGRAM_ARGS[@]}" +} + if [[ "$COLOR" = true ]]; then - java "${JVM_ARGS[@]}" -cp "${JVM_CLASSPATH}" $MAIN_CLASS_NAME "${PROGRAM_ARGS[@]}" | $COLOR_LOGS_PATH + if [[ "$SQUELCH_SPAM" = true ]]; then + run 2>&1 | $SQUELCH_SPAM_PATH | $COLOR_LOGS_PATH + else + run | $COLOR_LOGS_PATH + fi else - java "${JVM_ARGS[@]}" -cp "${JVM_CLASSPATH}" $MAIN_CLASS_NAME "${PROGRAM_ARGS[@]}" + if [[ "$SQUELCH_SPAM" = true ]]; then + run 2>&1 | $SQUELCH_SPAM_PATH + else + run + fi fi diff --git a/platform-sdk/swirlds-cli/squelch-spam.py b/platform-sdk/swirlds-cli/squelch-spam.py new file mode 100755 index 000000000000..ad665e668aa6 --- /dev/null +++ b/platform-sdk/swirlds-cli/squelch-spam.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 + +# +# Copyright 2016-2022 Hedera Hashgraph, LLC +# +# This software is the confidential and proprietary information of +# Hedera Hashgraph, LLC. ("Confidential Information"). You shall not +# disclose such Confidential Information and shall use it only in +# accordance with the terms of the license agreement you entered into +# with Hedera Hashgraph. +# +# HEDERA HASHGRAPH MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE SUITABILITY OF +# THE SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +# TO THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +# PARTICULAR PURPOSE, OR NON-INFRINGEMENT. HEDERA HASHGRAPH SHALL NOT BE LIABLE FOR +# ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR +# DISTRIBUTING THIS SOFTWARE OR ITS DERIVATIVES. +# + +# This program reads from standard in repeats to standard out, removing spammy log lines. +# An example of this log is shown below. It is a known issue without a fix. It is harmless, but annoying. + +# 2023-06-27 11:22:58.383 java[64808:1357823] Bad JNI lookup accessibilityHitTest +# 2023-06-27 11:22:58.386 java[64808:1357823] ( +# 0 libawt_lwawt.dylib 0x0000000150ecdccd -[CommonComponentAccessibility accessibilityHitTest:] + 173 +# 1 libawt_lwawt.dylib 0x0000000150e86d83 -[AWTView accessibilityHitTest:] + 179 +# 2 AppKit 0x00007ff80ca55673 -[NSWindow(NSWindowAccessibility) accessibilityHitTest:] + 302 +# 3 AppKit 0x00007ff80c524b3a -[NSApplication(NSApplicationAccessibility) accessibilityHitTest:] + 285 +# 4 AppKit 0x00007ff80c4e5abb CopyElementAtPosition + 138 +# 5 HIServices 0x00007ff80e917efa _AXXMIGCopyElementAtPosition + 399 +# 6 HIServices 0x00007ff80e93a128 _XCopyElementAtPosition + 355 +# 7 HIServices 0x00007ff80e8f74c9 mshMIGPerform + 182 +# 8 CoreFoundation 0x00007ff80910ae2d __CFRUNLOOP_IS_CALLING_OUT_TO_A_SOURCE1_PERFORM_FUNCTION__ + 41 +# 9 CoreFoundation 0x00007ff80910ad70 __CFRunLoopDoSource1 + 536 +# 10 CoreFoundation 0x00007ff809109a00 __CFRunLoopRun + 2698 +# 11 CoreFoundation 0x00007ff80910891c CFRunLoopRunSpecific + 560 +# 12 HIToolbox 0x00007ff81304cdad RunCurrentEventLoopInMode + 292 +# 13 HIToolbox 0x00007ff81304cbbe ReceiveNextEventCommon + 657 +# 14 HIToolbox 0x00007ff81304c918 _BlockUntilNextEventMatchingListInModeWithFilter + 64 +# 15 AppKit 0x00007ff80c0fc5d0 _DPSNextEvent + 858 +# 16 AppKit 0x00007ff80c0fb47a -[NSApplication(NSEvent) _nextEventMatchingEventMask:untilDate:inMode:dequeue:] + 1214 +# 17 libosxapp.dylib 0x000000015013d4fa -[NSApplicationAWT nextEventMatchingMask:untilDate:inMode:dequeue:] + 122 +# 18 AppKit 0x00007ff80c0edae8 -[NSApplication run] + 586 +# 19 libosxapp.dylib 0x000000015013d2c9 +[NSApplicationAWT runAWTLoopWithApp:] + 185 +# 20 libawt_lwawt.dylib 0x0000000150eee908 +[AWTStarter starter:headless:] + 520 +# 21 libosxapp.dylib 0x000000015013f00f +[ThreadUtilities invokeBlockCopy:] + 15 +# 22 Foundation 0x00007ff809f14793 __NSThreadPerformPerform + 177 +# 23 CoreFoundation 0x00007ff80910a906 __CFRUNLOOP_IS_CALLING_OUT_TO_A_SOURCE0_PERFORM_FUNCTION__ + 17 +# 24 CoreFoundation 0x00007ff80910a8a9 __CFRunLoopDoSource0 + 157 +# 25 CoreFoundation 0x00007ff80910a686 __CFRunLoopDoSources0 + 217 +# 26 CoreFoundation 0x00007ff80910930a __CFRunLoopRun + 916 +# 27 CoreFoundation 0x00007ff80910891c CFRunLoopRunSpecific + 560 +# 28 libjli.dylib 0x000000010a866e82 CreateExecutionEnvironment + 402 +# 29 libjli.dylib 0x000000010a8626a8 JLI_Launch + 1496 +# 30 java 0x00000001022d1c0e main + 414 +# 31 dyld 0x00000002026ae41f start + 1903 +# ) +# Exception in thread "AppKit Thread" java.lang.NoSuchMethodError: accessibilityHitTest + + +from sys import stdin +from sys import stdout + +start_of_spam = "] Bad JNI lookup accessibilityHitTest" +end_of_spam = "Exception in thread \"AppKit Thread\" java.lang.NoSuchMethodError: accessibilityHitTest" + +in_spam = False +next_line_is_spam = False + +def line_is_spam(line): + global in_spam + global next_line_is_spam + + if not next_line_is_spam: + in_spam = False + + if not in_spam: + if start_of_spam in line: + in_spam = True + next_line_is_spam = True + else: + if end_of_spam in line: + # current line is spam, but the next one isn't (as far as we know at this moment) + next_line_is_spam = False + + return in_spam + +try: + for line in stdin: + if line_is_spam(line): + continue + print(line, end='') + stdout.flush() +except KeyboardInterrupt: + pass \ No newline at end of file diff --git a/platform-sdk/swirlds-common/build.gradle.kts b/platform-sdk/swirlds-common/build.gradle.kts index 92cd275e782d..5b998ffdd775 100644 --- a/platform-sdk/swirlds-common/build.gradle.kts +++ b/platform-sdk/swirlds-common/build.gradle.kts @@ -15,37 +15,38 @@ */ plugins { - id("com.swirlds.platform.conventions") - id("com.swirlds.platform.library") - id("com.swirlds.platform.maven-publish") - id("org.gradle.java-test-fixtures") + id("com.swirlds.platform.conventions") + id("com.swirlds.platform.library") + id("com.swirlds.platform.maven-publish") + id("org.gradle.java-test-fixtures") } dependencies { - // Individual Dependencies - api(project(":swirlds-base")) - api(project(":swirlds-logging")) - api(project(":swirlds-config-api")) - implementation(project(":swirlds-base")) - implementation(libs.classgraph) - implementation(libs.commons.codec) - implementation(libs.prometheus.httpserver) { - exclude("io.prometheus", "simpleclient_tracer_otel") - exclude("io.prometheus", "simpleclient_tracer_otel_agent") - } - compileOnly(libs.spotbugs.annotations) + // Individual Dependencies + api(project(":swirlds-base")) + api(project(":swirlds-logging")) + api(project(":swirlds-config-api")) + implementation(project(":swirlds-base")) + implementation(libs.classgraph) + implementation(libs.commons.codec) + implementation(libs.prometheus.httpserver) { + exclude("io.prometheus", "simpleclient_tracer_otel") + exclude("io.prometheus", "simpleclient_tracer_otel_agent") + } + compileOnly(libs.spotbugs.annotations) - // Bundle Dependencies - api(libs.bundles.cryptography.core) - runtimeOnly(libs.bundles.cryptography.runtime) - implementation(libs.bundles.logging.impl) - compileOnly(libs.spotbugs.annotations) + // Bundle Dependencies + api(libs.bundles.cryptography.core) + runtimeOnly(libs.bundles.cryptography.runtime) + implementation(libs.bundles.logging.impl) + compileOnly(libs.spotbugs.annotations) - // Test Dependencies - testImplementation(testLibs.bundles.junit) - testImplementation(testLibs.bundles.mocking) - testImplementation(testLibs.bundles.utils) - testImplementation(project(":swirlds-config-impl")) - testImplementation(project(":swirlds-unit-tests:common:swirlds-test-framework")) - testImplementation(project(":swirlds-unit-tests:common:swirlds-common-test")) + // Test Dependencies + testCompileOnly(libs.spotbugs.annotations) + testImplementation(testLibs.bundles.junit) + testImplementation(testLibs.bundles.mocking) + testImplementation(testLibs.bundles.utils) + testImplementation(project(":swirlds-config-impl")) + testImplementation(project(":swirlds-unit-tests:common:swirlds-test-framework")) + testImplementation(project(":swirlds-unit-tests:common:swirlds-common-test")) } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java index eab93af87210..2416ff48d73f 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java @@ -27,6 +27,11 @@ * Basic configuration data record. This record contains all general config properties that can not be defined for a * specific subsystem. The record is based on the definition of config data objects as described in {@link ConfigData}. * + *

+ * Do not add new settings to this record unless you have a very good reason. New settings should go + * into config records with a prefix defined by a {@link ConfigData @ConfigData("prefix")} tag. Adding + * settings to this record pollutes the top level namespace. + * * @param configsUsedFilename * the name of the file that contains the list of config files used to create this config * @param verifyEventSigs @@ -125,6 +130,12 @@ * @param genesisFreezeTime * If this node starts from genesis, this value is used as the freeze time. This feature is deprecated and * planned for removal in a future platform version. + * @param deadlockCheckPeriod + * check for deadlocks every this many milliseconds (-1 for never) + * @param statsBufferSize + * number of bins to store for the history (in StatsBuffer etc.) + * @param statsRecentSeconds + * number of seconds covered by "recent" history (in StatsBuffer etc.) */ @ConfigData public record BasicConfig( @@ -164,7 +175,10 @@ public record BasicConfig( @ConfigProperty(defaultValue = "log4j2.xml") Path logPath, @ConfigProperty(defaultValue = "60s") Duration hangingThreadDuration, @ConfigProperty(defaultValue = "data/saved") String emergencyRecoveryFileLoadDir, - @ConfigProperty(defaultValue = "0") long genesisFreezeTime) { + @ConfigProperty(defaultValue = "0") long genesisFreezeTime, + @ConfigProperty(defaultValue = "1000") int deadlockCheckPeriod, + @ConfigProperty(defaultValue = "100") int statsBufferSize, + @ConfigProperty(defaultValue = "63") double statsRecentSeconds) { /** * @return Absolute path to the emergency recovery file load directory. diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/FunctionalSerialize.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/FunctionalSerialize.java index b6c38363cfef..2e3e598cc9aa 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/FunctionalSerialize.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/FunctionalSerialize.java @@ -17,6 +17,7 @@ package com.swirlds.common.io; import com.swirlds.common.io.streams.SerializableDataOutputStream; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; @FunctionalInterface @@ -30,5 +31,5 @@ public interface FunctionalSerialize { * @throws IOException * Thrown in case of an IO exception. */ - void serialize(SerializableDataOutputStream out) throws IOException; + void serialize(@NonNull SerializableDataOutputStream out) throws IOException; } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/SelfSerializable.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/SelfSerializable.java index 9f4ea9269aa8..6fcaf77d040e 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/SelfSerializable.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/SelfSerializable.java @@ -18,6 +18,7 @@ import com.swirlds.common.io.streams.SerializableDataInputStream; import com.swirlds.common.io.streams.SerializableDataOutputStream; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; /** @@ -37,5 +38,5 @@ public interface SelfSerializable extends SerializableDet, FunctionalSerialize { * @throws IOException * Thrown in case of an IO exception. */ - void deserialize(SerializableDataInputStream in, int version) throws IOException; + void deserialize(@NonNull SerializableDataInputStream in, int version) throws IOException; } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/BusyTime.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/BusyTime.java index d7975cc4a9e3..3455cf453280 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/BusyTime.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/BusyTime.java @@ -170,7 +170,7 @@ private long statusUpdate(final long previousPair, final long statusChange) { if ((statusChange == WORK_START && !isIdle(currentStatus)) || (statusChange == WORK_END && isIdle(currentStatus))) { // this means that the metric has not been updated correctly, we will not change the value - if (logLimiter.request()) { + if (logLimiter.requestAndTrigger()) { log.error( LogMarker.EXCEPTION.getMarker(), "BusyTime metric has been updated incorrectly. " diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultMetricsProvider.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultMetricsProvider.java index 334cc3d2aeb0..da7bcbf5130c 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultMetricsProvider.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultMetricsProvider.java @@ -31,9 +31,11 @@ import com.swirlds.common.system.NodeId; import com.swirlds.common.utility.CommonUtils; import com.swirlds.config.api.Configuration; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.file.Path; +import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executors; @@ -59,14 +61,15 @@ public class DefaultMetricsProvider implements MetricsProvider, Lifecycle { private final PrometheusEndpoint prometheusEndpoint; private final SnapshotService snapshotService; private final MetricsConfig metricsConfig; + private final Configuration configuration; private LifecyclePhase lifecyclePhase = LifecyclePhase.NOT_STARTED; /** * Constructor of {@code DefaultMetricsProvider} */ - public DefaultMetricsProvider(final Configuration configuration) { - CommonUtils.throwArgNull(configuration, "configuration"); + public DefaultMetricsProvider(@NonNull final Configuration configuration) { + this.configuration = Objects.requireNonNull(configuration, "configuration is null"); metricsConfig = configuration.getConfigData(MetricsConfig.class); final PrometheusConfig prometheusConfig = configuration.getConfigData(PrometheusConfig.class); @@ -128,7 +131,7 @@ public Metrics createPlatformMetrics(final NodeId nodeId) { // setup LegacyCsvWriter if (StringUtils.isNotBlank(metricsConfig.csvFileName())) { - final LegacyCsvWriter legacyCsvWriter = new LegacyCsvWriter(nodeId, folderPath, metricsConfig); + final LegacyCsvWriter legacyCsvWriter = new LegacyCsvWriter(nodeId, folderPath, configuration); snapshotService.subscribe(legacyCsvWriter::handleSnapshots); } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/LegacyCsvWriter.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/LegacyCsvWriter.java index 91a837c4d7b4..04a8bda54de1 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/LegacyCsvWriter.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/LegacyCsvWriter.java @@ -16,7 +16,6 @@ package com.swirlds.common.metrics.platform; -import static com.swirlds.common.utility.CommonUtils.throwArgNull; import static com.swirlds.logging.LogMarker.EXCEPTION; import static com.swirlds.logging.LogMarker.STARTUP; import static java.lang.Double.isInfinite; @@ -26,13 +25,15 @@ import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING; import static org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE; -import com.swirlds.common.internal.SettingsCommon; +import com.swirlds.common.config.BasicConfig; import com.swirlds.common.metrics.Metric; import com.swirlds.common.metrics.Metric.ValueType; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.metrics.config.MetricsConfig; import com.swirlds.common.system.NodeId; import com.swirlds.common.utility.ThresholdLimitingHandler; +import com.swirlds.config.api.Configuration; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.file.Files; @@ -85,6 +86,7 @@ public class LegacyCsvWriter { // path and filename of the .csv file to write to private final Path csvFilePath; private final MetricsConfig metricsConfig; + private final BasicConfig basicConfig; private final Map, Integer> indexLookup = new HashMap<>(); private final List cellCount = new ArrayList<>(); @@ -98,16 +100,20 @@ public class LegacyCsvWriter { * Constructor of a {@code LegacyCsvWriter} * * @param selfId - * {@link NodeId} of the platform for which the CSV-file is written + * {@link NodeId} of the platform for which the CSV-file is written * @param folderPath - * {@link Path} to the folder where the file should be stored - * @param metricsConfig - * the metrics configuration + * {@link Path} to the folder where the file should be stored + * @param configuration + * the configuration */ - public LegacyCsvWriter(final NodeId selfId, final Path folderPath, final MetricsConfig metricsConfig) { - this.selfId = throwArgNull(selfId, "selfId"); - throwArgNull(folderPath, "folderPath"); - this.metricsConfig = throwArgNull(metricsConfig, "metricsConfig"); + public LegacyCsvWriter( + @NonNull final NodeId selfId, @NonNull final Path folderPath, @NonNull final Configuration configuration) { + Objects.requireNonNull(folderPath, "folderPath is null"); + Objects.requireNonNull(configuration, "configuration is null"); + + this.selfId = Objects.requireNonNull(selfId, "selfId is null"); + metricsConfig = configuration.getConfigData(MetricsConfig.class); + basicConfig = configuration.getConfigData(BasicConfig.class); final String fileName = String.format("%s%d.csv", metricsConfig.csvFileName(), selfId.id()); this.csvFilePath = folderPath.resolve(fileName); @@ -127,7 +133,7 @@ public Path getCsvFilePath() { * it is not possible to add new metrics. * * @param snapshots - * {@link List} of {@link Snapshot}s of all known metrics at this point in time + * {@link List} of {@link Snapshot}s of all known metrics at this point in time */ private void init(final Collection snapshots) { logger.info( @@ -139,7 +145,7 @@ private void init(final Collection snapshots) { // eventually filter out internal metrics final List filteredMetrics = snapshots.stream() .map(Snapshot::metric) - .filter(LegacyCsvWriter::shouldWrite) + .filter(this::shouldWrite) .toList(); indexLookup.clear(); @@ -153,7 +159,6 @@ private void init(final Collection snapshots) { try { // create parent folder, if it does not exist ensureFolderExists(); - if (metricsConfig.csvAppend() && Files.exists(csvFilePath)) { // make sure last line of previous test was ended, and a blank line is inserted between tests. Files.writeString(csvFilePath, "\n\n", StandardOpenOption.APPEND); @@ -185,13 +190,16 @@ private void init(final Collection snapshots) { } } - @SuppressWarnings("deprecation") - private static boolean showAllEntries(final Metric metric) { - return SettingsCommon.verboseStatistics && !metric.getCategory().contains(EXCLUDE_CATEGORY); + private boolean showAllEntries(@NonNull final Metric metric) { + Objects.requireNonNull(metric, "metric is null"); + return basicConfig.verboseStatistics() && !metric.getCategory().contains(EXCLUDE_CATEGORY); } // Add two rows, one with all categories, the other with all names - private static void addHeaderRows(final ContentBuilder builder, final List metrics) { + private void addHeaderRows(@NonNull final ContentBuilder builder, @NonNull final List metrics) { + Objects.requireNonNull(builder, "builder is null"); + Objects.requireNonNull(metrics, "metrics is null"); + final List categories = new ArrayList<>(); final List names = new ArrayList<>(); for (final Metric metric : metrics) { @@ -315,9 +323,9 @@ private String format(final Metric metric, final Object value) { } // Returns false, if a Metric is internal and internal metrics should not be written - @SuppressWarnings("deprecation") - private static boolean shouldWrite(final Metric metric) { - return SettingsCommon.showInternalStats || !metric.getCategory().equals(Metrics.INTERNAL_CATEGORY); + private boolean shouldWrite(@NonNull final Metric metric) { + Objects.requireNonNull(metric, "metric is null"); + return basicConfig.showInternalStats() || !metric.getCategory().equals(Metrics.INTERNAL_CATEGORY); } // Ensure that the parent folder specified by {@link #csvFilePath} exists and if not create it recursively. diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/map/ConcurrentSequenceMap.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/map/ConcurrentSequenceMap.java index 972148d2a4aa..cd78d1ba5903 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/map/ConcurrentSequenceMap.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/map/ConcurrentSequenceMap.java @@ -19,6 +19,7 @@ import com.swirlds.common.sequence.map.internal.AbstractSequenceMap; import com.swirlds.common.threading.locks.IndexLock; import com.swirlds.common.threading.locks.Locks; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; @@ -29,10 +30,8 @@ /** * A thread safe implementation of {@link SequenceMap}. * - * @param - * the type of the key - * @param - * the type of the value + * @param the type of the key + * @param the type of the value */ public class ConcurrentSequenceMap extends AbstractSequenceMap { @@ -41,33 +40,53 @@ public class ConcurrentSequenceMap extends AbstractSequenceMap { private final int parallelism; /** - * When inserting data into this data structure, it is critical that data is not inserted after the - * data's sequence number has been purged (as this would lead to a memory leak). Whenever new data is inserted, - * acquire a lock that prevents concurrent purging of that sequence number. + * When inserting data into this data structure, it is critical that data is not inserted after the data's sequence + * number has been purged (as this would lead to a memory leak). Whenever new data is inserted, acquire a lock that + * prevents concurrent purging of that sequence number. */ private final IndexLock lock; private final Lock windowLock = new ReentrantLock(); + /** + * Construct a thread safe {@link SequenceMap} that does not permit expansion. + * + * @param firstSequenceNumberInWindow the lowest allowed sequence number + * @param sequenceNumberCapacity the number of sequence numbers permitted to exist in this data structure. E.g. + * if the lowest allowed sequence number is 100 and the capacity is 10, then + * values with a sequence number between 100 and 109 (inclusive) will be allowed, + * and any value with a sequence number outside that range will be rejected. + * @param getSequenceNumberFromKey a method that extracts the sequence number from a key + */ + public ConcurrentSequenceMap( + final long firstSequenceNumberInWindow, + final int sequenceNumberCapacity, + @NonNull final ToLongFunction getSequenceNumberFromKey) { + + this(firstSequenceNumberInWindow, sequenceNumberCapacity, false, getSequenceNumberFromKey); + } + /** * Construct a thread safe {@link SequenceMap}. * - * @param firstSequenceNumberInWindow - * the lowest allowed sequence number - * @param sequenceNumberCapacity - * the number of sequence numbers permitted to exist in this data structure. E.g. if - * the lowest allowed sequence number is 100 and the capacity is 10, then values with - * a sequence number between 100 and 109 (inclusive) will be allowed, and any value - * with a sequence number outside that range will be rejected. - * @param getSequenceNumberFromKey - * a method that extracts the sequence number from a key + * @param firstSequenceNumberInWindow the lowest allowed sequence number + * @param sequenceNumberCapacity the number of sequence numbers permitted to exist in this data structure. E.g. + * if the lowest allowed sequence number is 100 and the capacity is 10, then + * values with a sequence number between 100 and 109 (inclusive) will be allowed, + * and any value with a sequence number outside that range will be rejected. + * @param allowExpansion if true, then instead of rejecting elements with a sequence number higher than + * the allowed by the current capacity, increase capacity and then insert the + * element. Does not expand if the sequence number is too low to fit in the + * current capacity. + * @param getSequenceNumberFromKey a method that extracts the sequence number from a key */ public ConcurrentSequenceMap( final long firstSequenceNumberInWindow, final int sequenceNumberCapacity, - final ToLongFunction getSequenceNumberFromKey) { + final boolean allowExpansion, + @NonNull final ToLongFunction getSequenceNumberFromKey) { - super(firstSequenceNumberInWindow, sequenceNumberCapacity, getSequenceNumberFromKey); + super(firstSequenceNumberInWindow, sequenceNumberCapacity, allowExpansion, getSequenceNumberFromKey); parallelism = Math.min(MAX_PARALLELISM, sequenceNumberCapacity); lock = Locks.createIndexLock(parallelism); diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/map/StandardSequenceMap.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/map/StandardSequenceMap.java index f992b7b0c012..bd29d3d00f1a 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/map/StandardSequenceMap.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/map/StandardSequenceMap.java @@ -24,34 +24,52 @@ /** * A lock free implementation of {@link SequenceMap}. * - * @param - * the type of the key - * @param - * the type of the value + * @param the type of the key + * @param the type of the value */ public class StandardSequenceMap extends AbstractSequenceMap { private long firstSequenceNumberInWindow; + /** + * Construct a {@link SequenceMap} that does not permit expansion. + * + * @param firstSequenceNumberInWindow the lowest allowed sequence number + * @param sequenceNumberCapacity the number of sequence numbers permitted to exist in this data structure. E.g. + * if the lowest allowed sequence number is 100 and the capacity is 10, then + * values with a sequence number between 100 and 109 (inclusive) will be allowed, + * and any value with a sequence number outside that range will be rejected. + * @param getSequenceNumberFromKey a method that extracts the sequence number from a key + */ + public StandardSequenceMap( + final long firstSequenceNumberInWindow, + final int sequenceNumberCapacity, + final ToLongFunction getSequenceNumberFromKey) { + + this(firstSequenceNumberInWindow, sequenceNumberCapacity, false, getSequenceNumberFromKey); + } + /** * Construct a {@link SequenceMap}. * - * @param firstSequenceNumberInWindow - * the lowest allowed sequence number - * @param sequenceNumberCapacity - * the number of sequence numbers permitted to exist in this data structure. E.g. if - * the lowest allowed sequence number is 100 and the capacity is 10, then values with - * a sequence number between 100 and 109 (inclusive) will be allowed, and any value - * with a sequence number outside that range will be rejected. - * @param getSequenceNumberFromKey - * a method that extracts the sequence number from a key + * @param firstSequenceNumberInWindow the lowest allowed sequence number + * @param sequenceNumberCapacity the number of sequence numbers permitted to exist in this data structure. E.g. + * if the lowest allowed sequence number is 100 and the capacity is 10, then + * values with a sequence number between 100 and 109 (inclusive) will be allowed, + * and any value with a sequence number outside that range will be rejected. + * @param allowExpansion if true, then instead of rejecting elements with a sequence number higher than + * the allowed by the current capacity, increase capacity and then insert the + * element. Does not expand if the sequence number is too low to fit in the + * current capacity. + * @param getSequenceNumberFromKey a method that extracts the sequence number from a key */ public StandardSequenceMap( final long firstSequenceNumberInWindow, final int sequenceNumberCapacity, + final boolean allowExpansion, final ToLongFunction getSequenceNumberFromKey) { - super(firstSequenceNumberInWindow, sequenceNumberCapacity, getSequenceNumberFromKey); + super(firstSequenceNumberInWindow, sequenceNumberCapacity, allowExpansion, getSequenceNumberFromKey); this.firstSequenceNumberInWindow = firstSequenceNumberInWindow; } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/map/internal/AbstractSequenceMap.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/map/internal/AbstractSequenceMap.java index 18f71a500980..83c0de896531 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/map/internal/AbstractSequenceMap.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/map/internal/AbstractSequenceMap.java @@ -17,11 +17,12 @@ package com.swirlds.common.sequence.map.internal; import com.swirlds.common.sequence.map.SequenceMap; -import java.lang.reflect.Array; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.AbstractMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.ToLongFunction; @@ -29,13 +30,17 @@ /** * Boilerplate implementation for {@link SequenceMap}. * - * @param - * the type of the key - * @param - * the type of the value + * @param the type of the key + * @param the type of the value */ public abstract class AbstractSequenceMap implements SequenceMap { + /** + * The maximum supported size of an array is JVM dependant, but it's usually a little smaller than the maximum + * integer size. Various sources suggest this is a generally safe value to use. + */ + private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; + /** * The data in the map. */ @@ -44,9 +49,13 @@ public abstract class AbstractSequenceMap implements SequenceMap { /** * Keys for each sequence number currently being stored. */ - private final SequenceKeySet[] keySets; + private SequenceKeySet[] keySets; - private final int sequenceNumberCapacity; + /** + * The current capacity for sequence numbers. Equal to the maximum sequence number minus the minimum sequence + * number. If {@link #allowExpansion} is true, then this value can be increased. If not, it is fixed. + */ + private int sequenceNumberCapacity; /** * A method that gets the sequence number associated with a given key. @@ -58,29 +67,40 @@ public abstract class AbstractSequenceMap implements SequenceMap { */ private final long initialFirstSequenceNumber; + /** + * If true, expand when we get a high sequence number that does not fit. If false, reject the element. + */ + private final boolean allowExpansion; + /** * Construct an abstract sequence map. * - * @param initialFirstSequenceNumber - * the lowest allowed sequence number when this object is constructed, - * or after it is cleared - * @param sequenceNumberCapacity - * the number of sequence numbers permitted to exist in this data structure. E.g. if - * the lowest allowed sequence number is 100 and the capacity is 10, then values with - * a sequence number between 100 and 109 (inclusive) will be allowed, and any value - * with a sequence number outside that range will be rejected. - * @param getSequenceNumberFromKey - * a method that extracts the sequence number from a 1key + * @param initialFirstSequenceNumber the lowest allowed sequence number when this object is constructed, or after it + * is cleared + * @param sequenceNumberCapacity the number of sequence numbers permitted to exist in this data structure. E.g. + * if the lowest allowed sequence number is 100 and the capacity is 10, then + * values with a sequence number between 100 and 109 (inclusive) will be allowed, + * and any value with a sequence number outside that range will be rejected. + * @param allowExpansion if true, then instead of rejecting elements with a sequence number higher than + * the allowed by the current capacity, increase capacity and then insert the + * element. Does not expand if the sequence number is too low to fit in the + * current capacity. + * @param getSequenceNumberFromKey a method that extracts the sequence number from a key */ @SuppressWarnings("unchecked") protected AbstractSequenceMap( final long initialFirstSequenceNumber, final int sequenceNumberCapacity, - final ToLongFunction getSequenceNumberFromKey) { + final boolean allowExpansion, + @NonNull final ToLongFunction getSequenceNumberFromKey) { + this.initialFirstSequenceNumber = initialFirstSequenceNumber; this.sequenceNumberCapacity = sequenceNumberCapacity; + this.allowExpansion = allowExpansion; + this.getSequenceNumberFromKey = Objects.requireNonNull(getSequenceNumberFromKey); + data = buildDataMap(); - keySets = (SequenceKeySet[]) Array.newInstance(SequenceKeySet.class, sequenceNumberCapacity); + keySets = new SequenceKeySet[sequenceNumberCapacity]; for (long sequenceNumber = initialFirstSequenceNumber; sequenceNumber < initialFirstSequenceNumber + sequenceNumberCapacity; @@ -88,16 +108,12 @@ protected AbstractSequenceMap( keySets[getSequenceKeyIndex(sequenceNumber)] = new SequenceKeySet<>(sequenceNumber); } - - this.initialFirstSequenceNumber = initialFirstSequenceNumber; - this.getSequenceNumberFromKey = getSequenceNumberFromKey; } /** * Set the smallest allowed sequence number in the current window. * - * @param firstSequenceNumberInWindow - * the new first sequence number in the window + * @param firstSequenceNumberInWindow the new first sequence number in the window */ protected abstract void setFirstSequenceNumberInWindow(final long firstSequenceNumberInWindow); @@ -109,16 +125,16 @@ protected AbstractSequenceMap( protected abstract Map buildDataMap(); /** - * Acquire a lock on window management. Held during purge/expand calls, and during clear. - * No-op for implementations that do not require thread safety. + * Acquire a lock on window management. Held during purge/expand calls, and during clear. No-op for implementations + * that do not require thread safety. */ protected void windowLock() { // Override if thread safety is required } /** - * Release a lock on window management. Held during purge/expand calls, and during clear. - * No-op for implementations that do not require thread safety. + * Release a lock on window management. Held during purge/expand calls, and during clear. No-op for implementations + * that do not require thread safety. */ protected void windowUnlock() { // Override if thread safety is required @@ -127,8 +143,7 @@ protected void windowUnlock() { /** * Acquire an exclusive lock on a sequence number. No-op for implementations that do not require thread safety. * - * @param sequenceNumber - * the sequence number to lock + * @param sequenceNumber the sequence number to lock */ protected void lockSequenceNumber(final long sequenceNumber) { // Override if thread safety is required @@ -137,8 +152,7 @@ protected void lockSequenceNumber(final long sequenceNumber) { /** * Release an exclusive lock on a sequence number. No-op for implementations that do not require thread safety. * - * @param sequenceNumber - * the sequence number to unlock + * @param sequenceNumber the sequence number to unlock */ protected void unlockSequenceNumber(final long sequenceNumber) { // Override if thread safety is required @@ -159,9 +173,9 @@ protected void fullUnlock() { } /** - * When the window is shifted significantly, it can be more efficient to grab all locks at the start, as - * compared to locking on each sequence number one at a time. This method describes the size of the shift - * required to trigger a full lock. + * When the window is shifted significantly, it can be more efficient to grab all locks at the start, as compared to + * locking on each sequence number one at a time. This method describes the size of the shift required to trigger a + * full lock. * * @return shifts greater or equal to this in size will trigger a full lock */ @@ -194,8 +208,7 @@ public int getSequenceNumberCapacity() { /** * Get the sequence number from a key. * - * @param key - * the key + * @param key the key * @return the associated sequence number */ private long getSequenceNumber(final K key) { @@ -203,10 +216,9 @@ private long getSequenceNumber(final K key) { } /** - * Get the key set index for a given sequence number. + * Get the key set index for a given sequence number and current capacity. * - * @param sequenceNumber - * the sequence number in question + * @param sequenceNumber the sequence number in question * @return the index of the sequence number */ private int getSequenceKeyIndex(final long sequenceNumber) { @@ -219,8 +231,7 @@ private int getSequenceKeyIndex(final long sequenceNumber) { /** * Get the sequence key set for a given sequence number. * - * @param sequenceNumber - * the sequence number to fetch + * @param sequenceNumber the sequence number to fetch * @return the key set for the sequence number */ private SequenceKeySet getSequenceKeySet(final long sequenceNumber) { @@ -250,13 +261,18 @@ public V computeIfAbsent(final K key, final Function map @Override public boolean putIfAbsent(final K key, final V value) { final long sequenceNumber = getSequenceNumber(key); - final SequenceKeySet keys = getSequenceKeySet(sequenceNumber); + SequenceKeySet keys = getSequenceKeySet(sequenceNumber); lockSequenceNumber(sequenceNumber); try { if (keys.getSequenceNumber() != sequenceNumber) { // the key is outside the allowed window - return false; + if (allowExpansion && sequenceNumber > getFirstSequenceNumberInWindow()) { + expandCapacity(sequenceNumber); + keys = getSequenceKeySet(sequenceNumber); + } else { + return false; + } } if (data.containsKey(key)) { // don't re-insert if the value is already present @@ -278,13 +294,18 @@ public boolean putIfAbsent(final K key, final V value) { @Override public V put(final K key, final V value) { final long sequenceNumber = getSequenceNumber(key); - final SequenceKeySet keys = getSequenceKeySet(sequenceNumber); + SequenceKeySet keys = getSequenceKeySet(sequenceNumber); lockSequenceNumber(sequenceNumber); try { if (keys.getSequenceNumber() != sequenceNumber) { // the key is outside the allowed window - return null; + if (allowExpansion && sequenceNumber > getFirstSequenceNumberInWindow()) { + expandCapacity(sequenceNumber); + keys = getSequenceKeySet(sequenceNumber); + } else { + return null; + } } final V previousValue = data.put(key, value); @@ -338,7 +359,7 @@ public void shiftWindow(final long firstSequenceNumberInWindow, final BiConsumer final long previousFirstSequenceNumber = getFirstSequenceNumberInWindow(); if (firstSequenceNumberInWindow < previousFirstSequenceNumber) { throw new IllegalStateException( - "Window can only be shifted towards larger value. " + "Current lowest sequence number = " + "Window can only be shifted towards larger value. Current lowest sequence number = " + previousFirstSequenceNumber + ", requested lowest sequence number = " + firstSequenceNumberInWindow); } @@ -385,8 +406,8 @@ public void shiftWindow(final long firstSequenceNumberInWindow, final BiConsumer } /** - * When the window is shifted, it causes some key sets in the circular buffer increase their sequence number. - * This method computes the new sequence number that the key set is required to have. + * When the window is shifted, it causes some key sets in the circular buffer increase their sequence number. This + * method computes the new sequence number that the key set is required to have. */ private long mapToNewSequenceNumber(final long firstSequenceNumberInWindow, final long sequenceNumberToReplace) { // the distance between the new first sequence number in the window @@ -406,6 +427,52 @@ private long mapToNewSequenceNumber(final long firstSequenceNumberInWindow, fina return sequenceNumberToReplace + increase; } + /** + * Expand the capacity so that we fit the required sequence number. + * + * @param requiredSequenceNumber the sequence number that we need to fit into this structure + */ + @SuppressWarnings("unchecked") + private void expandCapacity(final long requiredSequenceNumber) { + windowLock(); + fullLock(); + try { + final int oldCapacity = keySets.length; + final long firstSequenceNumber = getFirstSequenceNumberInWindow(); + final long minimumCapacity = requiredSequenceNumber - firstSequenceNumber; + if (minimumCapacity < 0) { + // this can only happen if we get integer overflow + throw new IllegalStateException("Cannot expand capacity beyond " + MAX_ARRAY_SIZE); + } else if (minimumCapacity < MAX_ARRAY_SIZE / 2 - 1) { + sequenceNumberCapacity = (int) (minimumCapacity * 2); + } else if (minimumCapacity <= MAX_ARRAY_SIZE) { + sequenceNumberCapacity = MAX_ARRAY_SIZE; + } else { + throw new IllegalStateException("Cannot expand capacity beyond " + MAX_ARRAY_SIZE); + } + + final SequenceKeySet[] oldKeySets = keySets; + keySets = new SequenceKeySet[sequenceNumberCapacity]; + + // Copy the old key sets into the new array + for (int oldIndex = 0; oldIndex < oldCapacity; oldIndex++) { + final long sequenceNumber = oldKeySets[oldIndex].getSequenceNumber(); + final int newIndex = getSequenceKeyIndex(sequenceNumber); + keySets[newIndex] = oldKeySets[oldIndex]; + } + + // Create new key sets for the added capacity + for (int offset = 0; offset < (sequenceNumberCapacity - oldCapacity); offset++) { + final long newSequenceNumber = firstSequenceNumber + oldCapacity + offset; + final int index = getSequenceKeyIndex(newSequenceNumber); + keySets[index] = new SequenceKeySet<>(newSequenceNumber); + } + } finally { + fullUnlock(); + windowUnlock(); + } + } + /** * {@inheritDoc} */ diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/set/ConcurrentSequenceSet.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/set/ConcurrentSequenceSet.java index 4d24917b42d1..84bc14a1aa97 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/set/ConcurrentSequenceSet.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/set/ConcurrentSequenceSet.java @@ -19,46 +19,72 @@ import com.swirlds.common.sequence.map.ConcurrentSequenceMap; import com.swirlds.common.sequence.map.SequenceMap; import com.swirlds.common.sequence.set.internal.AbstractSequenceSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; import java.util.function.ToLongFunction; /** * A thread safe {@link SequenceSet}. * - * @param - * the type of the element contained within this set + * @param the type of the element contained within this set */ public class ConcurrentSequenceSet extends AbstractSequenceSet { + /** + * Create a new thread safe {@link SequenceSet} that does not permit expansion. + * + * @param lowestAllowedSequenceNumber the initial lowest permitted sequence in the set + * @param sequenceNumberCapacity the number of sequence numbers permitted to exist in this data structure. E.g. + * if the lowest allowed sequence number is 100 and the capacity is 10, then + * values with a sequence number between 100 and 109 (inclusive) will be allowed, + * and any value with a sequence number outside that range will be rejected. + * @param getSequenceNumberFromEntry given an entry, extract the sequence number + */ + public ConcurrentSequenceSet( + final long lowestAllowedSequenceNumber, + final int sequenceNumberCapacity, + @NonNull final ToLongFunction getSequenceNumberFromEntry) { + + this(lowestAllowedSequenceNumber, sequenceNumberCapacity, false, getSequenceNumberFromEntry); + } + /** * Create a new thread safe {@link SequenceSet}. * - * @param lowestAllowedSequenceNumber - * the initial lowest permitted sequence in the set - * @param sequenceNumberCapacity - * the number of sequence numbers permitted to exist in this data structure. E.g. if - * the lowest allowed sequence number is 100 and the capacity is 10, then values with - * a sequence number between 100 and 109 (inclusive) will be allowed, and any value - * with a sequence number outside that range will be rejected. - * @param getSequenceNumberFromEntry - * given an entry, extract the sequence number + * @param lowestAllowedSequenceNumber the initial lowest permitted sequence in the set + * @param sequenceNumberCapacity the number of sequence numbers permitted to exist in this data structure. E.g. + * if the lowest allowed sequence number is 100 and the capacity is 10, then + * values with a sequence number between 100 and 109 (inclusive) will be allowed, + * and any value with a sequence number outside that range will be rejected. + * @param allowExpansion if true, then instead of rejecting elements with a sequence number higher than + * the allowed by the current capacity, increase capacity and then insert the + * element. Does not expand if the sequence number is too low to fit in the + * current capacity. + * @param getSequenceNumberFromEntry given an entry, extract the sequence number */ public ConcurrentSequenceSet( final long lowestAllowedSequenceNumber, final int sequenceNumberCapacity, - final ToLongFunction getSequenceNumberFromEntry) { + final boolean allowExpansion, + @NonNull final ToLongFunction getSequenceNumberFromEntry) { - super(lowestAllowedSequenceNumber, sequenceNumberCapacity, getSequenceNumberFromEntry); + super(lowestAllowedSequenceNumber, sequenceNumberCapacity, allowExpansion, getSequenceNumberFromEntry); } /** * {@inheritDoc} */ + @NonNull @Override protected SequenceMap buildMap( final long lowestAllowedSequenceNumber, final int sequenceNumberCapacity, - final ToLongFunction getSequenceNumberFromEntry) { + final boolean allowExpansion, + @NonNull final ToLongFunction getSequenceNumberFromEntry) { + + Objects.requireNonNull(getSequenceNumberFromEntry); + return new ConcurrentSequenceMap<>( - lowestAllowedSequenceNumber, sequenceNumberCapacity, getSequenceNumberFromEntry); + lowestAllowedSequenceNumber, sequenceNumberCapacity, allowExpansion, getSequenceNumberFromEntry); } } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/set/StandardSequenceSet.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/set/StandardSequenceSet.java index 961bbebf2fdb..4550281a2ce6 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/set/StandardSequenceSet.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/set/StandardSequenceSet.java @@ -19,46 +19,72 @@ import com.swirlds.common.sequence.map.SequenceMap; import com.swirlds.common.sequence.map.StandardSequenceMap; import com.swirlds.common.sequence.set.internal.AbstractSequenceSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; import java.util.function.ToLongFunction; /** * A lock free {@link SequenceSet}. * - * @param - * the type of the element contained within this set + * @param the type of the element contained within this set */ public class StandardSequenceSet extends AbstractSequenceSet { + /** + * Create a new lock free {@link SequenceSet} that does not permit expansion. + * + * @param lowestAllowedSequenceNumber the initial lowest permitted sequence in the set + * @param sequenceNumberCapacity the number of sequence numbers permitted to exist in this data structure. E.g. + * if the lowest allowed sequence number is 100 and the capacity is 10, then + * values with a sequence number between 100 and 109 (inclusive) will be allowed, + * and any value with a sequence number outside that range will be rejected. + * @param getSequenceNumberFromEntry given an entry, extract the sequence number + */ + public StandardSequenceSet( + final long lowestAllowedSequenceNumber, + final int sequenceNumberCapacity, + @NonNull final ToLongFunction getSequenceNumberFromEntry) { + + super(lowestAllowedSequenceNumber, sequenceNumberCapacity, false, getSequenceNumberFromEntry); + } + /** * Create a new lock free {@link SequenceSet}. * - * @param lowestAllowedSequenceNumber - * the initial lowest permitted sequence in the set - * @param sequenceNumberCapacity - * the number of sequence numbers permitted to exist in this data structure. E.g. if - * the lowest allowed sequence number is 100 and the capacity is 10, then values with - * a sequence number between 100 and 109 (inclusive) will be allowed, and any value - * with a sequence number outside that range will be rejected. - * @param getSequenceNumberFromEntry - * given an entry, extract the sequence number + * @param lowestAllowedSequenceNumber the initial lowest permitted sequence in the set + * @param sequenceNumberCapacity the number of sequence numbers permitted to exist in this data structure. E.g. + * if the lowest allowed sequence number is 100 and the capacity is 10, then + * values with a sequence number between 100 and 109 (inclusive) will be allowed, + * and any value with a sequence number outside that range will be rejected. + * @param allowExpansion if true, then instead of rejecting elements with a sequence number higher than + * the allowed by the current capacity, increase capacity and then insert the + * element. Does not expand if the sequence number is too low to fit in the + * current capacity. + * @param getSequenceNumberFromEntry given an entry, extract the sequence number */ public StandardSequenceSet( final long lowestAllowedSequenceNumber, final int sequenceNumberCapacity, - final ToLongFunction getSequenceNumberFromEntry) { + final boolean allowExpansion, + @NonNull final ToLongFunction getSequenceNumberFromEntry) { - super(lowestAllowedSequenceNumber, sequenceNumberCapacity, getSequenceNumberFromEntry); + super(lowestAllowedSequenceNumber, sequenceNumberCapacity, allowExpansion, getSequenceNumberFromEntry); } /** * {@inheritDoc} */ + @NonNull @Override protected SequenceMap buildMap( final long lowestAllowedSequenceNumber, final int sequenceNumberCapacity, - final ToLongFunction getSequenceNumberFromEntry) { + final boolean allowExpansion, + @NonNull final ToLongFunction getSequenceNumberFromEntry) { + + Objects.requireNonNull(getSequenceNumberFromEntry); + return new StandardSequenceMap<>( - lowestAllowedSequenceNumber, sequenceNumberCapacity, getSequenceNumberFromEntry); + lowestAllowedSequenceNumber, sequenceNumberCapacity, allowExpansion, getSequenceNumberFromEntry); } } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/set/internal/AbstractSequenceSet.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/set/internal/AbstractSequenceSet.java index eee08c2be78e..1b765589edcc 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/set/internal/AbstractSequenceSet.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/sequence/set/internal/AbstractSequenceSet.java @@ -20,15 +20,16 @@ import com.swirlds.common.sequence.map.SequenceMap; import com.swirlds.common.sequence.set.SequenceSet; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.List; +import java.util.Objects; import java.util.function.Consumer; import java.util.function.ToLongFunction; /** * Boilerplate implementation for {@link SequenceSet}. * - * @param - * the type of the element contained within this set + * @param the type of the element contained within this set */ public abstract class AbstractSequenceSet implements SequenceSet { @@ -40,42 +41,47 @@ public abstract class AbstractSequenceSet implements SequenceSet { /** * Create a new abstract sequence set. * - * @param lowestAllowedSequenceNumber - * the initial lowest permitted sequence in the set - * @param sequenceNumberCapacity - * the number of sequence numbers permitted to exist in this data structure. E.g. if - * the lowest allowed sequence number is 100 and the capacity is 10, then values with - * a sequence number between 100 and 109 (inclusive) will be allowed, and any value - * with a sequence number outside that range will be rejected. - * @param getSequenceNumberFromEntry - * given an entry, extract the sequence number + * @param lowestAllowedSequenceNumber the initial lowest permitted sequence in the set + * @param sequenceNumberCapacity the number of sequence numbers permitted to exist in this data structure. E.g. + * if the lowest allowed sequence number is 100 and the capacity is 10, then + * values with a sequence number between 100 and 109 (inclusive) will be allowed, + * and any value with a sequence number outside that range will be rejected. + * @param allowExpansion if true, then instead of rejecting elements with a sequence number higher than + * the allowed by the current capacity, increase capacity and then insert the + * element. + * @param getSequenceNumberFromEntry given an entry, extract the sequence number */ protected AbstractSequenceSet( final long lowestAllowedSequenceNumber, final int sequenceNumberCapacity, - final ToLongFunction getSequenceNumberFromEntry) { + final boolean allowExpansion, + @NonNull final ToLongFunction getSequenceNumberFromEntry) { - map = buildMap(lowestAllowedSequenceNumber, sequenceNumberCapacity, getSequenceNumberFromEntry); + Objects.requireNonNull(getSequenceNumberFromEntry); + + map = buildMap(lowestAllowedSequenceNumber, sequenceNumberCapacity, allowExpansion, getSequenceNumberFromEntry); } /** * Build a map that is used to implement the set. * - * @param lowestAllowedSequenceNumber - * the initial lowest permitted sequence in the set - * @param sequenceNumberCapacity - * the number of sequence numbers permitted to exist in this data structure. E.g. if - * the lowest allowed sequence number is 100 and the capacity is 10, then values with - * a sequence number between 100 and 109 (inclusive) will be allowed, and any value - * with a sequence number outside that range will be rejected. - * @param getSequenceNumberFromEntry - * given an entry, extract the sequence number + * @param lowestAllowedSequenceNumber the initial lowest permitted sequence in the set + * @param sequenceNumberCapacity the number of sequence numbers permitted to exist in this data structure. E.g. + * if the lowest allowed sequence number is 100 and the capacity is 10, then + * values with a sequence number between 100 and 109 (inclusive) will be allowed, + * and any value with a sequence number outside that range will be rejected. + * @param allowExpansion if true, then instead of rejecting elements with a sequence number higher than + * the allowed by the current capacity, increase capacity and then insert the + * element. + * @param getSequenceNumberFromEntry given an entry, extract the sequence number * @return a sequence map */ + @NonNull protected abstract SequenceMap buildMap( final long lowestAllowedSequenceNumber, final int sequenceNumberCapacity, - final ToLongFunction getSequenceNumberFromEntry); + final boolean allowExpansion, + @NonNull final ToLongFunction getSequenceNumberFromEntry); /** * {@inheritDoc} diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/system/address/Address.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/system/address/Address.java index 4ca4a335d032..e886138d6e35 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/system/address/Address.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/system/address/Address.java @@ -677,35 +677,6 @@ public Address copy() { memo); } - /** - * Write the Address to the given stream. It should later be read from the stream with readAddress(). - * - * @param outStream the stream to write to. - * @throws IOException thrown if there any problems during operation - */ - @Deprecated - public void writeAddress(SerializableDataOutputStream outStream) throws IOException { - serialize(outStream); - } - - /** - * Return a new Address object read from the given stream. It should have been written to the stream with - * writeAddress(). - * - * @param inStream the stream to read from - * @param version the version of the serialized address - * @return the new Address object that was read. - * @throws IOException thrown if there are any problems in operation - * @deprecated 0.6.6 - */ - @Deprecated(forRemoval = true) - public static Address readAddress(SerializableDataInputStream inStream, long version) throws IOException { - Address address = new Address(); - address.deserialize(inStream, (int) version); - - return address; - } - /** * {@inheritDoc} */ diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/system/address/AddressBook.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/system/address/AddressBook.java index 2463e7c81ac2..1cbd1305246b 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/system/address/AddressBook.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/system/address/AddressBook.java @@ -329,28 +329,14 @@ public Address getAddress(@NonNull final NodeId id) { return address; } - /** - * Get the address for the member with the given ID - * - * @param id the member ID of the address to get - * @return the address if it exists, null otherwise. - * @deprecated use {@link #getAddress(NodeId)} instead - */ - @Deprecated(since = "0.39.0", forRemoval = true) - @NonNull - public Address getAddress(final long id) { - return getAddress(new NodeId(id)); - } - /** * Check if an address for a given node ID is contained within this address book. * * @param id a node ID * @return true if this address book contains an address for the given node ID */ - public boolean contains(@NonNull final NodeId id) { - Objects.requireNonNull(id, "nodeId is null"); - return addresses.containsKey(id); + public boolean contains(@Nullable final NodeId id) { + return id != null && addresses.containsKey(id); } /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/AbstractQueueThreadConfiguration.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/AbstractQueueThreadConfiguration.java index ce16ce23cdeb..4d5b8a456426 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/AbstractQueueThreadConfiguration.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/AbstractQueueThreadConfiguration.java @@ -67,6 +67,12 @@ public abstract class AbstractQueueThreadConfiguration handler) { * @return this object */ @SuppressWarnings("unchecked") - public C setIdleCallback(@NonNull final InterruptableRunnable idleCallback) { + public C setIdleCallback(@Nullable final InterruptableRunnable idleCallback) { this.idleCallback = idleCallback; return (C) this; } @@ -200,6 +206,25 @@ public InterruptableRunnable getIdleCallback() { return idleCallback; } + /** + * Get a callback that should be invoked, if non-null, whenever a batch of elements has been handled. + */ + @Nullable + public InterruptableRunnable getBatchHandledCallback() { + return batchHandledCallback; + } + + /** + * Set a callback that should be invoked whenever a batch of elements has been handled. + * + * @return this object + */ + @SuppressWarnings("unchecked") + public C setBatchHandledCallback(@Nullable final InterruptableRunnable batchHandledCallback) { + this.batchHandledCallback = batchHandledCallback; + return (C) this; + } + /** * Get the amount of time that the thread blocks while waiting for work. */ diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/QueueThreadImpl.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/QueueThreadImpl.java index 3c3e8db2d742..ca0bdb1af345 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/QueueThreadImpl.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/QueueThreadImpl.java @@ -33,8 +33,7 @@ /** * Implements a thread that continuously takes elements from a queue and handles them. * - * @param - * the type of the item in the queue + * @param the type of the item in the queue */ public class QueueThreadImpl extends AbstractBlockingQueue implements QueueThread { @@ -63,6 +62,11 @@ public class QueueThreadImpl extends AbstractBlockingQueue implements Queu */ private final InterruptableRunnable idleCallback; + /** + * If not null, called when a batch of work has been handled. + */ + private final InterruptableRunnable batchHandledCallback; + /** * The amount of time to wait for work. */ @@ -74,12 +78,11 @@ public class QueueThreadImpl extends AbstractBlockingQueue implements Queu *

* *

- * Unlike previous iterations of this class, this constructor DOES NOT start the background handler thread. - * Call {@link #start()} to start the handler thread. + * Unlike previous iterations of this class, this constructor DOES NOT start the background handler thread. Call + * {@link #start()} to start the handler thread. *

* - * @param configuration - * the configuration object + * @param configuration the configuration object */ public QueueThreadImpl(final AbstractQueueThreadConfiguration configuration) { super(ThreadBuildingUtils.getOrBuildQueue(configuration)); @@ -97,6 +100,7 @@ public QueueThreadImpl(final AbstractQueueThreadConfiguration configuratio buffer = new ArrayList<>(bufferSize); handler = configuration.getHandler(); idleCallback = configuration.getIdleCallback(); + batchHandledCallback = configuration.getBatchHandledCallback(); this.waitForWorkDuration = configuration.getWaitForWorkDuration(); metrics = new QueueThreadMetrics(configuration); @@ -109,15 +113,14 @@ public QueueThreadImpl(final AbstractQueueThreadConfiguration configuratio /** *

- * Build a "seed" that can be planted in a thread. When the runnable is executed, it takes over the calling - * thread - * and configures that thread the way it would configure a newly created thread. When work - * is finished, the calling thread is restored back to its original configuration. + * Build a "seed" that can be planted in a thread. When the runnable is executed, it takes over the calling thread + * and configures that thread the way it would configure a newly created thread. When work is finished, the calling + * thread is restored back to its original configuration. *

* *

- * Note that this seed will be unable to change the thread group of the calling thread, regardless of the - * thread group that is configured. + * Note that this seed will be unable to change the thread group of the calling thread, regardless of the thread + * group that is configured. *

* *

@@ -249,6 +252,7 @@ private void doWork() throws InterruptedException { metrics.startingWork(); if (item != null) { handler.accept(item); + batchHandled(); } return; } @@ -257,14 +261,23 @@ private void doWork() throws InterruptedException { handler.accept(item); } buffer.clear(); + batchHandled(); + } + + /** + * This method is called whenever a batch of work is completed. + */ + private void batchHandled() throws InterruptedException { + if (batchHandledCallback != null) { + batchHandledCallback.run(); + } } /** - * Wait a while for the next item to become available and return it. If no item becomes available before - * a timeout then return null. + * Wait a while for the next item to become available and return it. If no item becomes available before a timeout + * then return null. * - * @throws InterruptedException - * if this method is interrupted during execution + * @throws InterruptedException if this method is interrupted during execution */ private T waitForItem() throws InterruptedException { final T item = poll(waitForWorkDuration.toNanos(), NANOSECONDS); diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/utility/throttle/RateLimiter.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/utility/throttle/RateLimiter.java index 97ea6640effe..1cb1c012612b 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/utility/throttle/RateLimiter.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/utility/throttle/RateLimiter.java @@ -25,13 +25,13 @@ /** *

- * A simple utility designed to limit the frequency of an event, e.g. making sure a particular log message - * isn't written too often. + * A simple utility designed to limit the frequency of an event, e.g. making sure a particular log message isn't written + * too often. *

* *

- * This object is not thread safe. This object was designed for simplicity and ease of use. This object - * may not be suitable for code pathways with extremely high performance requirements. + * This object is not thread safe. This object was designed for simplicity and ease of use. This object may not be + * suitable for code pathways with extremely high performance requirements. *

*/ public class RateLimiter { @@ -59,10 +59,8 @@ public class RateLimiter { /** * Create a new rate limiter. * - * @param time - * provides the current time - * @param minimumPeriod - * the minimum time that must pass between operations + * @param time provides the current time + * @param minimumPeriod the minimum time that must pass between operations */ public RateLimiter(final Time time, final Duration minimumPeriod) { this.time = time; @@ -72,22 +70,21 @@ public RateLimiter(final Time time, final Duration minimumPeriod) { /** * Create a new rate limiter. * - * @param time - * provides the current time - * @param maxFrequency - * the maximum frequency of the operation, in hz + * @param time provides the current time + * @param maxFrequency the maximum frequency of the operation, in hz */ public RateLimiter(final Time time, final double maxFrequency) { this(time, Duration.ofNanos((long) (1.0 / maxFrequency * SECONDS_TO_NANOSECONDS))); } /** - * Request permission to perform an operation. Returns true if it is ok to perform the operation, - * returns false if the operation has been performed too recently in the past. + * Request permission to trigger an operation, and immediately trigger if permitted. Returns true if it is ok to + * perform the operation, returns false if the operation has been performed too recently in the past. Once this + * method returns true, it will return false for the remainder of the time span specified by the minimum period. * - * @return true if the operation can be performed without violating rate limits, otherwise false + * @return true if the operation can be triggered without violating rate limits, otherwise false */ - public boolean request() { + public boolean requestAndTrigger() { final Instant now = time.now(); final Duration elapsed = Duration.between(lastOperation, now); if (isGreaterThanOrEqualTo(elapsed, minimumPeriod)) { @@ -100,8 +97,38 @@ public boolean request() { } /** - * Get the number of times {@link #request()} has returned false since the last time it returned true. Immediately - * after {@link #request()} returns true, this method will always return 0. + * Check if it is legal to trigger the rate limited action. Unlike {@link #requestAndTrigger()}, this method can + * return true over and over in a time span smaller than the desired rate limit. In order to cause this method to + * return false for the remainder of the time span specified by the rate limit, call {@link #trigger()}. + * + * @return true if it is currently legal to trigger the rate limited action + */ + public boolean request() { + final Instant now = time.now(); + final Duration elapsed = Duration.between(lastOperation, now); + if (isGreaterThanOrEqualTo(elapsed, minimumPeriod)) { + deniedRequests = 0; + return true; + } + deniedRequests++; + return false; + } + + /** + * Trigger the action that is being rate limited. Calling this method will cause {@link #request()} and + * {@link #requestAndTrigger()} to return false for the remainder of the desired rate limit. This method + * does not actually check if enough time has passed to permit the action being triggered. Calling this method + * before the end of a rate limit period will reset the rate limit period. + */ + public void trigger() { + deniedRequests = 0; + lastOperation = time.now(); + } + + /** + * Get the number of times {@link #requestAndTrigger()} and/or {@link #request()} has returned false since the last + * time one of these methods returned true. Immediately after {@link #requestAndTrigger()} or {@link #request()} + * returns true, this method will return 0. * * @return the number of recently denied requests */ diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/LegacyCsvWriterTest.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/LegacyCsvWriterTest.java index dc8839570b78..91db8c215e51 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/LegacyCsvWriterTest.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/LegacyCsvWriterTest.java @@ -22,7 +22,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import com.swirlds.common.internal.SettingsCommon; import com.swirlds.common.metrics.Counter; import com.swirlds.common.metrics.DoubleGauge; import com.swirlds.common.metrics.FunctionGauge; @@ -54,20 +53,21 @@ class LegacyCsvWriterTest { private static final NodeId NODE_ID = new NodeId(42L); private Metrics metrics; private MetricsConfig metricsConfig; + private Configuration configuration; @TempDir private Path tempDir; @BeforeEach void setStandardSettings() { - final Configuration configuration = new TestConfigBuilder() + configuration = new TestConfigBuilder() .withValue("metrics.csvOutputFolder", tempDir.toString()) .withValue("metrics.csvAppend", "false") + .withValue("showInternalStats", "false") + .withValue("verboseStatistics", "false") .getOrCreateConfig(); metricsConfig = configuration.getConfigData(MetricsConfig.class); - SettingsCommon.showInternalStats = false; - SettingsCommon.verboseStatistics = false; final MetricKeyRegistry registry = mock(MetricKeyRegistry.class); when(registry.register(any(), any(), any())).thenReturn(true); metrics = new DefaultMetrics( @@ -77,7 +77,7 @@ void setStandardSettings() { @Test void testToString() throws IOException { // given - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); // then assertThat(writer.toString()).matches("^LegacyCsvWriter\\[csvFilePath=" + tempDir + ".*]$"); @@ -88,7 +88,7 @@ void testParentFolderCreation() throws IOException { // given final Path grandParentPath = Files.createTempDirectory(tempDir, null); final Path parentPath = Files.createTempDirectory(grandParentPath, null); - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, parentPath, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, parentPath, configuration); final Path csvFilePath = writer.getCsvFilePath(); Files.deleteIfExists(csvFilePath); @@ -113,7 +113,7 @@ void testParentFolderCreation() throws IOException { @Test void testWriteDefault() throws IOException { // given - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); final List metrics = createCompleteList(); final List snapshots1 = metrics.stream() @@ -172,7 +172,7 @@ void testWriteDefault() throws IOException { @Test void testWritingOfSpecialValues() throws IOException { // given - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); final List metrics = createShortList(); final DoubleGauge gauge = (DoubleGauge) metrics.get(1); @@ -205,17 +205,17 @@ void testWritingOfSpecialValues() throws IOException { assertThat(content) .matches( """ - (.*\\n){5}.* - ,,0,0.0, - ,,0,0.0, - ,,0,0.0, - """); + (.*\\n){5}.* + ,,0,0.0, + ,,0,0.0, + ,,0,0.0, + """); } @Test void testWriteWithExistingFile() throws IOException { // given - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); Files.writeString(csvFilePath, "Hello World"); final List metrics = createShortList(); @@ -250,8 +250,7 @@ void testWriteWithAppendedModeAndExistingFile() throws IOException { .withValue("metrics.csvOutputFolder", tempDir.toString()) .withValue("metrics.csvAppend", "true") .getOrCreateConfig(); - final MetricsConfig metricsConfig = configuration.getConfigData(MetricsConfig.class); - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); Files.writeString( csvFilePath, @@ -301,8 +300,7 @@ void testWriteWithAppendedModeAndNonExistingFile() throws IOException { .withValue("metrics.csvOutputFolder", tempDir.toString()) .withValue("metrics.csvAppend", "true") .getOrCreateConfig(); - final MetricsConfig metricsConfig = configuration.getConfigData(MetricsConfig.class); - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); Files.deleteIfExists(csvFilePath); final List metrics = createShortList(); @@ -333,7 +331,7 @@ void testWriteWithAppendedModeAndNonExistingFile() throws IOException { @Test void testWriteWithInternalIgnored() throws IOException { // given - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); final List metrics = createListWithInternals(); final List snapshots1 = metrics.stream() @@ -377,9 +375,10 @@ void testWriteWithInternalIgnored() throws IOException { @Test void testWriteWithInternalNotIgnored() throws IOException { + final Configuration configuration = + new TestConfigBuilder().withValue("showInternalStats", "true").getOrCreateConfig(); // given - SettingsCommon.showInternalStats = true; - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); final List metrics = createListWithInternals(); final List snapshots1 = metrics.stream() @@ -426,7 +425,7 @@ void testWriteWithInternalNotIgnored() throws IOException { @Test void testWriteWithSecondaryValuesNotIncluded() throws IOException { // given - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); final List metrics = createListWithSecondaryValues(); final List snapshots1 = metrics.stream() @@ -473,8 +472,9 @@ void testWriteWithSecondaryValuesNotIncluded() throws IOException { @Test void testWriteWithSecondaryValuesIncluded() throws IOException { // given - SettingsCommon.verboseStatistics = true; - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final Configuration configuration = + new TestConfigBuilder().withValue("verboseStatistics", "true").getOrCreateConfig(); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); final List metrics = createListWithSecondaryValues(); final List snapshots1 = metrics.stream() @@ -521,7 +521,7 @@ void testWriteWithSecondaryValuesIncluded() throws IOException { @Test void testBrokenFormatString() throws IOException { // given - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); final DoubleGauge gauge = metrics.getOrCreate(new DoubleGauge.Config(Metrics.PLATFORM_CATEGORY, "DoubleGauge") .withFormat("%d") @@ -543,7 +543,7 @@ void testBrokenFormatString() throws IOException { @Test void testChangedEntriesWithSimpleMetrics() throws IOException { // given - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); final List metrics = createSimpleList(); @@ -591,7 +591,7 @@ void testChangedEntriesWithSimpleMetrics() throws IOException { @Test void testChangedEntriesWithComplexMetricsAndNoSecondaryValues() throws IOException { // given - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); final List metrics = createComplexList(); @@ -639,8 +639,9 @@ void testChangedEntriesWithComplexMetricsAndNoSecondaryValues() throws IOExcepti @Test void testChangedEntriesWithComplexMetricsAndSecondaryValues() throws IOException { // given - SettingsCommon.verboseStatistics = true; - final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); + final Configuration configuration = + new TestConfigBuilder().withValue("verboseStatistics", "true").getOrCreateConfig(); + final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, configuration); final Path csvFilePath = writer.getCsvFilePath(); final List metrics = createComplexList(); diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/system/status/logic/StatusLogicTestUtils.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/system/status/logic/StatusLogicTestUtils.java index 12c7c460fa00..90a199e9e77c 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/system/status/logic/StatusLogicTestUtils.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/system/status/logic/StatusLogicTestUtils.java @@ -22,8 +22,8 @@ import com.swirlds.common.system.status.IllegalPlatformStatusException; import com.swirlds.common.system.status.PlatformStatus; import com.swirlds.common.system.status.actions.PlatformStatusAction; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.function.Function; -import org.checkerframework.checker.nullness.qual.NonNull; /** * Utility methods for testing {@link PlatformStatusLogic} implementations. diff --git a/platform-sdk/swirlds-logging/src/main/java/com/swirlds/logging/payloads/SavedStateLoadedPayload.java b/platform-sdk/swirlds-logging/src/main/java/com/swirlds/logging/payloads/SavedStateLoadedPayload.java index 5e8954e14822..f2d05978d442 100644 --- a/platform-sdk/swirlds-logging/src/main/java/com/swirlds/logging/payloads/SavedStateLoadedPayload.java +++ b/platform-sdk/swirlds-logging/src/main/java/com/swirlds/logging/payloads/SavedStateLoadedPayload.java @@ -16,7 +16,9 @@ package com.swirlds.logging.payloads; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; +import java.util.Objects; /** * This payload is logged when the platform loads a saved state from disk. @@ -25,13 +27,11 @@ public class SavedStateLoadedPayload extends AbstractLogPayload { private long round; private Instant consensusTimestamp; - private Instant willFreezeUntil; - public SavedStateLoadedPayload(final long round, final Instant consensusTimestamp, final Instant willFreezeUntil) { + public SavedStateLoadedPayload(final long round, @NonNull final Instant consensusTimestamp) { super("Platform has loaded a saved state"); this.round = round; - this.consensusTimestamp = consensusTimestamp; - this.willFreezeUntil = willFreezeUntil; + this.consensusTimestamp = Objects.requireNonNull(consensusTimestamp); } public long getRound() { @@ -49,12 +49,4 @@ public Instant getConsensusTimestamp() { public void setConsensusTimestamp(final Instant consensusTimestamp) { this.consensusTimestamp = consensusTimestamp; } - - public Instant getWillFreezeUntil() { - return willFreezeUntil; - } - - public void setWillFreezeUntil(final Instant willFreezeUntil) { - this.willFreezeUntil = willFreezeUntil; - } } diff --git a/platform-sdk/swirlds-platform-core/build.gradle.kts b/platform-sdk/swirlds-platform-core/build.gradle.kts index 8474ca024fc0..efa462a97ab2 100644 --- a/platform-sdk/swirlds-platform-core/build.gradle.kts +++ b/platform-sdk/swirlds-platform-core/build.gradle.kts @@ -15,41 +15,42 @@ */ plugins { - id("com.swirlds.platform.conventions") - id("com.swirlds.platform.library") - id("com.swirlds.platform.maven-publish") - id("org.gradle.java-test-fixtures") + id("com.swirlds.platform.conventions") + id("com.swirlds.platform.library") + id("com.swirlds.platform.maven-publish") + id("org.gradle.java-test-fixtures") } extraJavaModuleInfo { failOnMissingModuleInfo.set(false) } dependencies { - // Individual Dependencies - implementation(project(":swirlds-base")) - api(project(":swirlds-fchashmap")) - api(project(":swirlds-fcqueue")) - api(project(":swirlds-jasperdb")) - api(project(":swirlds-cli")) - api(project(":swirlds-base")) - compileOnly(libs.spotbugs.annotations) - runtimeOnly(project(":swirlds-config-impl")) + // Individual Dependencies + implementation(project(":swirlds-base")) + api(project(":swirlds-fchashmap")) + api(project(":swirlds-fcqueue")) + api(project(":swirlds-jasperdb")) + api(project(":swirlds-cli")) + api(project(":swirlds-base")) + compileOnly(libs.spotbugs.annotations) + runtimeOnly(project(":swirlds-config-impl")) - // Bundle Dependencies - implementation(libs.bundles.logging.impl) - implementation(libs.bundles.javafx) - implementation(libs.bundles.networking) - implementation(libs.bundles.picocli) - implementation(libs.bundles.jackson) + // Bundle Dependencies + implementation(libs.bundles.logging.impl) + implementation(libs.bundles.javafx) + implementation(libs.bundles.networking) + implementation(libs.bundles.picocli) + implementation(libs.bundles.jackson) - // Test Dependencies + // Test Dependencies + testCompileOnly(libs.spotbugs.annotations) - // These should not be implementation() based deps, but this requires refactoring to eliminate. - implementation(project(":swirlds-unit-tests:common:swirlds-common-test")) - implementation(project(":swirlds-unit-tests:common:swirlds-test-framework")) + // These should not be implementation() based deps, but this requires refactoring to eliminate. + implementation(project(":swirlds-unit-tests:common:swirlds-common-test")) + implementation(project(":swirlds-unit-tests:common:swirlds-test-framework")) - testImplementation(testLibs.bundles.junit) - testImplementation(testLibs.bundles.mocking) - testImplementation(testLibs.bundles.utils) - testImplementation(project(":swirlds-config-impl")) - testImplementation(testFixtures(project(":swirlds-common"))) + testImplementation(testLibs.bundles.junit) + testImplementation(testLibs.bundles.mocking) + testImplementation(testLibs.bundles.utils) + testImplementation(project(":swirlds-config-impl")) + testImplementation(testFixtures(project(":swirlds-common"))) } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ApplicationDefinitionLoader.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ApplicationDefinitionLoader.java index e355767274c9..6c8f7a8e7f45 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ApplicationDefinitionLoader.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ApplicationDefinitionLoader.java @@ -39,8 +39,6 @@ /** * This class only contains one method that was extracted from the {@link Browser} class. - * The method uses the {@link Settings} class in some special ways and will be replaced in future by the - * {@link com.swirlds.config.api.Configuration} API. * * @deprecated will be replaced by the {@link com.swirlds.config.api.Configuration} API in near future once the * config.txt has been migrated to the regular config API. If you need to use this class please try to do as less diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java index a28b04e8a31a..1a8341d9c4d7 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java @@ -28,11 +28,13 @@ import static com.swirlds.platform.gui.internal.BrowserWindowManager.setInsets; import static com.swirlds.platform.gui.internal.BrowserWindowManager.setStateHierarchy; import static com.swirlds.platform.gui.internal.BrowserWindowManager.showBrowserWindow; +import static com.swirlds.platform.state.GenesisStateBuilder.buildGenesisState; import static com.swirlds.platform.state.address.AddressBookNetworkUtils.getLocalAddressCount; import static com.swirlds.platform.state.signed.ReservedSignedState.createNullReservation; import static com.swirlds.platform.state.signed.SignedStateFileReader.getSavedStateFiles; import static com.swirlds.platform.system.SystemExitCode.NODE_ADDRESS_MISMATCH; import static com.swirlds.platform.system.SystemExitUtils.exitSystem; +import static com.swirlds.platform.util.BootstrapUtils.detectSoftwareUpgrade; import com.swirlds.common.StartupTime; import com.swirlds.common.config.BasicConfig; @@ -58,6 +60,9 @@ import com.swirlds.common.io.config.RecycleBinConfig; import com.swirlds.common.io.config.TemporaryFileConfig; import com.swirlds.common.io.utility.RecycleBin; +import com.swirlds.common.merkle.MerkleNode; +import com.swirlds.common.merkle.crypto.MerkleCryptoFactory; +import com.swirlds.common.merkle.route.MerkleRouteIterator; import com.swirlds.common.merkle.synchronization.config.ReconnectConfig; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.metrics.MetricsProvider; @@ -79,6 +84,7 @@ import com.swirlds.jasperdb.config.JasperDbConfig; import com.swirlds.logging.payloads.NodeAddressMismatchPayload; import com.swirlds.logging.payloads.NodeStartPayload; +import com.swirlds.logging.payloads.SavedStateLoadedPayload; import com.swirlds.merkledb.config.MerkleDbConfig; import com.swirlds.platform.config.AddressBookConfig; import com.swirlds.platform.config.ThreadConfig; @@ -90,6 +96,7 @@ import com.swirlds.platform.crypto.CryptoConstants; import com.swirlds.platform.dispatch.DispatchConfiguration; import com.swirlds.platform.event.preconsensus.PreconsensusEventStreamConfig; +import com.swirlds.platform.event.tipset.EventCreationConfig; import com.swirlds.platform.gossip.chatter.config.ChatterConfig; import com.swirlds.platform.gossip.sync.config.SyncConfig; import com.swirlds.platform.gui.GuiPlatformAccessor; @@ -106,17 +113,18 @@ import com.swirlds.platform.portforwarding.PortMapping; import com.swirlds.platform.reconnect.emergency.EmergencySignedStateValidator; import com.swirlds.platform.recovery.EmergencyRecoveryManager; +import com.swirlds.platform.state.State; import com.swirlds.platform.state.address.AddressBookInitializer; import com.swirlds.platform.state.address.AddressBookNetworkUtils; import com.swirlds.platform.state.signed.ReservedSignedState; import com.swirlds.platform.state.signed.SavedStateInfo; +import com.swirlds.platform.state.signed.SignedState; import com.swirlds.platform.state.signed.SignedStateFileUtils; import com.swirlds.platform.swirldapp.AppLoaderException; import com.swirlds.platform.swirldapp.SwirldAppLoader; import com.swirlds.platform.system.Shutdown; import com.swirlds.platform.system.SystemExitCode; import com.swirlds.platform.uptime.UptimeConfig; -import com.swirlds.platform.util.BootstrapUtils; import com.swirlds.platform.util.MetricsDocUtils; import com.swirlds.virtualmap.config.VirtualMapConfig; import edu.umd.cs.findbugs.annotations.NonNull; @@ -244,6 +252,7 @@ private Browser(@NonNull final Set localNodesToStart) throws IOException .withConfigDataType(UptimeConfig.class) .withConfigDataType(RecycleBinConfig.class) .withConfigDataType(EventConfig.class) + .withConfigDataType(EventCreationConfig.class) .withConfigDataType(PathsConfig.class) .withConfigDataType(SocketConfig.class) .withConfigDataType(TransactionConfig.class); @@ -280,18 +289,6 @@ private Browser(@NonNull final Set localNodesToStart) throws IOException jframe.dispose(); } - // Read from data/settings.txt (where data is in same directory as .jar, usually sdk/) to change - // the default settings given in the Settings class. This file won't normally exist. But it can - // be used for testing and debugging. This is NOT documented for users. - // - // Also, if the settings.txt file exists, then after reading it and changing the settings, write - // all the current settings to settingsUsed.txt, some of which might have been changed by - // settings.txt - Settings.getInstance().loadSettings(); - - // Provide swirlds.common the settings it needs via the SettingsCommon class - Settings.populateSettingsCommon(); - // Write the settingsUsed.txt file writeSettingsUsed(configuration); @@ -407,8 +404,11 @@ private Map loadSwirldMains( final Map appMains = new HashMap<>(); final AddressBook addressBook = appDefinition.getAddressBook(); for (final Address address : addressBook) { - if (localNodesToStart.contains(address.getNodeId()) || AddressBookNetworkUtils.isLocal(address)) { - appMains.put(address.getNodeId(), buildAppMain(appDefinition, appLoader)); + if (AddressBookNetworkUtils.isLocal(address)) { + // if the local nodes to start are not specified, start all local nodes. Otherwise, start specified. + if (localNodesToStart.isEmpty() || localNodesToStart.contains(address.getNodeId())) { + appMains.put(address.getNodeId(), buildAppMain(appDefinition, appLoader)); + } } } return appMains; @@ -428,7 +428,7 @@ private void writeSettingsUsed(final Configuration configuration) { // Add all settings values to the string builder final PathsConfig pathsConfig = configuration.getConfigData(PathsConfig.class); if (Files.exists(pathsConfig.getSettingsPath())) { - Settings.getInstance().addSettingsUsed(settingsUsedBuilder); + PlatformConfigUtils.generateSettingsUsed(settingsUsedBuilder, configuration); } settingsUsedBuilder.append(System.lineSeparator()); @@ -439,7 +439,8 @@ private void writeSettingsUsed(final Configuration configuration) { ConfigExport.addConfigContents(configuration, settingsUsedBuilder); // Write the settingsUsed.txt file - final Path settingsUsedPath = pathsConfig.getSettingsUsedDir().resolve(SettingConstants.SETTING_USED_FILENAME); + final Path settingsUsedPath = + pathsConfig.getSettingsUsedDir().resolve(PlatformConfigUtils.SETTING_USED_FILENAME); try (final OutputStream outputStream = new FileOutputStream(settingsUsedPath.toFile())) { outputStream.write(settingsUsedBuilder.toString().getBytes(StandardCharsets.UTF_8)); } catch (final IOException | RuntimeException e) { @@ -545,8 +546,7 @@ public static synchronized void launch(final Set localNodesToStart, fina } /** - * Instantiate and start the thread dump generator, if enabled via the {@link Settings#getThreadDumpPeriodMs()} - * setting. + * Instantiate and start the thread dump generator. */ private void startThreadDumpGenerator() { final ThreadConfig threadConfig = configuration.getConfigData(ThreadConfig.class); @@ -661,55 +661,55 @@ private Collection createLocalPlatforms( final EmergencyRecoveryManager emergencyRecoveryManager = new EmergencyRecoveryManager(shutdown::shutdown, basicConfig.getEmergencyRecoveryFileLoadDir()); - final ReservedSignedState loadedSignedState = getUnmodifiedSignedStateFromDisk( + final ReservedSignedState initialState = getInitialState( platformContext, + appMain, mainClassName, swirldName, nodeId, - appVersion, - addressBook.copy(), + addressBook, emergencyRecoveryManager); - // check software version compatibility - final boolean softwareUpgrade = - BootstrapUtils.detectSoftwareUpgrade(appVersion, loadedSignedState.getNullable()); - - if (softwareUpgrade) { - try { - logger.info(STARTUP.getMarker(), "Clearing recycle bin as part of software upgrade workflow."); - recycleBin.clear(); - } catch (final IOException e) { - throw new UncheckedIOException("Failed to clear recycle bin", e); + final SwirldsPlatform platform; + try (initialState) { + // check software version compatibility + final boolean softwareUpgrade = detectSoftwareUpgrade(appVersion, initialState.get()); + + if (softwareUpgrade) { + try { + logger.info( + STARTUP.getMarker(), "Clearing recycle bin as part of software upgrade workflow."); + recycleBin.clear(); + } catch (final IOException e) { + throw new UncheckedIOException("Failed to clear recycle bin", e); + } } - } - // Initialize the address book from the configuration and platform saved state. - final AddressBookInitializer addressBookInitializer = new AddressBookInitializer( - appVersion, - softwareUpgrade, - loadedSignedState.getNullable(), - addressBook.copy(), - platformContext); + // Initialize the address book from the configuration and platform saved state. + final AddressBookInitializer addressBookInitializer = new AddressBookInitializer( + appVersion, softwareUpgrade, initialState.get(), addressBook.copy(), platformContext); - // set here, then given to the state in run(). A copy of it is given to hashgraph. - final AddressBook initialAddressBook = addressBookInitializer.getInitialAddressBook(); + if (!initialState.get().isGenesisState()) { + updateLoadedStateAddressBook( + initialState.get(), addressBookInitializer.getInitialAddressBook()); + } - GuiPlatformAccessor.getInstance().setPlatformName(nodeId, platformName); - GuiPlatformAccessor.getInstance().setSwirldId(nodeId, appDefinition.getSwirldId()); - GuiPlatformAccessor.getInstance().setInstanceNumber(nodeId, instanceNumber); + GuiPlatformAccessor.getInstance().setPlatformName(nodeId, platformName); + GuiPlatformAccessor.getInstance().setSwirldId(nodeId, appDefinition.getSwirldId()); + GuiPlatformAccessor.getInstance().setInstanceNumber(nodeId, instanceNumber); + + platform = new SwirldsPlatform( + platformContext, + crypto.get(nodeId), + recycleBin, + nodeId, + mainClassName, + swirldName, + appVersion, + initialState.get(), + emergencyRecoveryManager); + } - final SwirldsPlatform platform = new SwirldsPlatform( - platformContext, - crypto.get(nodeId), - recycleBin, - initialAddressBook, - nodeId, - mainClassName, - swirldName, - appVersion, - appMain::newState, - loadedSignedState, - emergencyRecoveryManager); platforms.add(platform); new InfoMember(infoSwirld, instanceNumber, platform); @@ -737,6 +737,91 @@ private Collection createLocalPlatforms( return Collections.unmodifiableList(platforms); } + /** + * Update the address book with the current address book read from config.txt. Eventually we will not do this, and + * only transactions will be capable of modifying the address book. + * + * @param signedState the state that was loaded from disk + * @param addressBook the address book specified in config.txt + */ + private static void updateLoadedStateAddressBook( + @NonNull final SignedState signedState, @NonNull final AddressBook addressBook) { + + final State state = signedState.getState(); + + // Update the address book with the current address book read from config.txt. + // Eventually we will not do this, and only transactions will be capable of + // modifying the address book. + state.getPlatformState().setAddressBook(addressBook.copy()); + + // Invalidate a path down to the new address book + new MerkleRouteIterator(state, state.getPlatformState().getAddressBook().getRoute()) + .forEachRemaining(MerkleNode::invalidateHash); + + // We should only have to rehash a few nodes, so simpler to use the synchronous algorithm. + MerkleCryptoFactory.getInstance().digestTreeSync(state); + + // If our hash changes as a result of the new address book then our old signatures may become invalid. + signedState.pruneInvalidSignatures(); + } + + /** + * Get the initial state to be used by this node. May return a state loaded from disk, or may return a genesis state + * if no valid state is found on disk. + * + * @param platformContext the platform context + * @param appMain the app main + * @param mainClassName the name of the app's SwirldMain class + * @param swirldName the name of this swirld + * @param selfId the node id of this node + * @param configAddressBook the address book from config.txt + * @param emergencyRecoveryManager the emergency recovery manager + * @return the initial state to be used by this node + */ + @NonNull + private ReservedSignedState getInitialState( + @NonNull final PlatformContext platformContext, + @NonNull final SwirldMain appMain, + @NonNull final String mainClassName, + @NonNull final String swirldName, + @NonNull final NodeId selfId, + @NonNull final AddressBook configAddressBook, + @NonNull final EmergencyRecoveryManager emergencyRecoveryManager) { + + Objects.requireNonNull(platformContext); + Objects.requireNonNull(mainClassName); + Objects.requireNonNull(swirldName); + Objects.requireNonNull(selfId); + Objects.requireNonNull(configAddressBook); + Objects.requireNonNull(emergencyRecoveryManager); + + final ReservedSignedState loadedState = getUnmodifiedSignedStateFromDisk( + platformContext, + mainClassName, + swirldName, + selfId, + appMain.getSoftwareVersion(), + configAddressBook, + emergencyRecoveryManager); + + if (loadedState.isNotNull()) { + logger.info( + STARTUP.getMarker(), + new SavedStateLoadedPayload( + loadedState.get().getRound(), loadedState.get().getConsensusTimestamp())); + return loadedState; + } + + // Not strictly necessary to close a null reservation, but it's nice to be consistent. + loadedState.close(); + + final State genesisState = + buildGenesisState(platformContext, configAddressBook, appMain.getSoftwareVersion(), appMain.newState()); + + final SignedState signedState = new SignedState(platformContext, genesisState, "genesis state"); + return signedState.reserve("genesis state"); + } + /** * Load the signed state from the disk if it is present. * diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ConsensusImpl.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ConsensusImpl.java index 8e8e4a3c73ee..192c801363e0 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ConsensusImpl.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ConsensusImpl.java @@ -22,11 +22,13 @@ import com.swirlds.common.config.ConsensusConfig; import com.swirlds.common.crypto.Hash; +import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.AddressBook; import com.swirlds.platform.event.EventUtils; import com.swirlds.platform.internal.EventImpl; import com.swirlds.platform.metrics.ConsensusMetrics; import com.swirlds.platform.state.signed.SignedState; +import edu.umd.cs.findbugs.annotations.Nullable; import java.time.Instant; import java.util.ArrayDeque; import java.util.ArrayList; @@ -515,25 +517,28 @@ private List vote(EventImpl event, RoundInfo roundInfo, ArrayList cons = new LinkedList<>(); // all events reaching consensus now, in consensus order - int voterId = (int) event.getCreatorId().id(); + final NodeId voterId = event.getCreatorId(); + final int voterIndex = addressBook.getIndexOfNodeId(voterId); for (RoundInfo.ElectionRound election = roundInfo.elections; election != null; election = election.nextElection) { // for all elections if (election.age == 1) { // first round of an election. Vote TRUE for self-ancestors of those you firstSee. Don't decide. - EventImpl w = firstSee(event, election.event.getCreatorId().id()); + final int electionEventCreatorIdIndex = addressBook.getIndexOfNodeId(election.event.getCreatorId()); + EventImpl w = firstSee(event, electionEventCreatorIdIndex); while (w != null && w.getRoundCreated() > event.getRoundCreated() - 1 && w.getSelfParent() != null) { w = firstSelfWitnessS(w.getSelfParent()); } - election.vote[voterId] = (election.event == w); + election.vote[voterIndex] = (election.event == w); } else { // either a coin round or normal round, so count votes from witnesses you strongly see long yesWeight = 0; // total weight of all members voting yes long noWeight = 0; // total weight of all members voting yes for (EventImpl w : stronglySeen) { - int id = (int) w.getCreatorId().id(); - long weight = addressBook.getAddress(w.getCreatorId()).getWeight(); - if (election.prevRound.vote[id]) { + final NodeId id = w.getCreatorId(); + final long weight = addressBook.getAddress(id).getWeight(); + final int nodeIndex = addressBook.getIndexOfNodeId(id); + if (election.prevRound.vote[nodeIndex]) { yesWeight += weight; } else { noWeight += weight; @@ -543,16 +548,16 @@ private List vote(EventImpl event, RoundInfo roundInfo, ArrayList= noWeight); + election.vote[voterIndex] = (yesWeight >= noWeight); if ((election.age % config.coinFreq()) == 0) { // a coin round. Vote randomly unless you strongly see a supermajority. Don't decide. numCoinRounds++; if (!superMajority) { if ((election.age % (2 * config.coinFreq())) == config.coinFreq()) { - election.vote[voterId] = true; // every other "coin round" is just coin=true + election.vote[voterIndex] = true; // every other "coin round" is just coin=true } else { // coin is one bit from signature (LSB of second of two middle bytes) - election.vote[voterId] = coin(event); + election.vote[voterIndex] = coin(event); } } } else { @@ -564,7 +569,7 @@ private List vote(EventImpl event, RoundInfo roundInfo, ArrayList c = setFamous( election.event, rounds.get(election.event.getRoundCreated()), - election.vote[voterId], + election.vote[voterIndex], election); if (c != null) { cons.addAll(c); @@ -638,7 +643,7 @@ private RoundInfo setRoundCreated(EventImpl event, ArrayList strongly long round; round(event); // find the round, and store it using event.setRoundCreated() - for (long m = 0; m < numMembers; m++) { + for (int m = 0; m < numMembers; m++) { EventImpl s = stronglySeeS1(event, m); if (s != null) { stronglySeen.add(s); @@ -663,7 +668,7 @@ private RoundInfo getOrCreateRoundInfo(long round) { if (roundInfo != null) { return roundInfo; } - roundInfo = new RoundInfo(round, addressBook.getSize()); + roundInfo = new RoundInfo(round, addressBook); rounds.put(round, roundInfo); // create elections in this round based on the previous one @@ -1111,7 +1116,7 @@ private long parentRound(EventImpl x) { } /** - * The last event created by m that is an ancestor of x (function from SWIRLDS-TR-2020-01). + * The last event created by the member at index m that is an ancestor of x (function from SWIRLDS-TR-2020-01). * This has aggressive memoization: the first time it is called with a given x, it immediately calculates and stores * results for all m. * This result is memoized. @@ -1119,10 +1124,10 @@ private long parentRound(EventImpl x) { * @param x * the event being queried * @param m - * the member ID of the creator - * @return the last event created by m that is an ancestor of x, or null if none + * the index of the member ID of the creator + * @return the last event created by member at index m that is an ancestor of x, or null if none */ - private EventImpl lastSee(EventImpl x, long m) { + private EventImpl lastSee(@Nullable final EventImpl x, final int m) { int numMembers; EventImpl sp, op; @@ -1130,7 +1135,7 @@ private EventImpl lastSee(EventImpl x, long m) { return null; } if (x.sizeLastSee() != 0) { // return memoized answer, if available - return x.getLastSee((int) m); + return x.getLastSee(m); } // memoize answers for all choices of m, then return answer for just this m numMembers = getAddressBook().getSize(); @@ -1140,7 +1145,8 @@ private EventImpl lastSee(EventImpl x, long m) { sp = x.getSelfParent(); for (int mm = 0; mm < numMembers; mm++) { - if (x.getCreatorId().id() == mm) { + final int xCreatorIdIndex = addressBook.getIndexOfNodeId(x.getCreatorId()); + if (xCreatorIdIndex == mm) { x.setLastSee(mm, x); } else if (sp == null && op == null) { x.setLastSee(mm, null); @@ -1156,27 +1162,28 @@ private EventImpl lastSee(EventImpl x, long m) { } } } - return x.getLastSee((int) m); + return x.getLastSee(m); } /** - * The witness y created by m that is seen by event x through an event z created by m2 (function from + * The witness y created by the member at index m that is seen by event x through an event z created by the member at index m2 (function from * SWIRLDS-TR-2020-01). * This result is not memoized. * * @param x * the event being queried * @param m - * the creator of y, the event seen + * the index of the creator of y, the event seen * @param m2 - * the creator of z, the intermediate event through which x sees y - * @return the event y that is created by m and seen by x through an event by m2 + * the index of the creator of z, the intermediate event through which x sees y + * @return the event y that is created by the member at index m and seen by x through an event by the member at index m2 */ - private EventImpl seeThru(EventImpl x, long m, long m2) { + private EventImpl seeThru(@Nullable final EventImpl x, final int m, final int m2) { if (x == null) { return null; } - if (m == m2 && m2 == x.getCreatorId().id()) { + final int creatorIndex = addressBook.getIndexOfNodeId(x.getCreatorId()); + if (m == m2 && m2 == creatorIndex) { return firstSelfWitnessS(x.getSelfParent()); } return firstSee(lastSee(x, m2), m); @@ -1193,17 +1200,17 @@ private EventImpl seeThru(EventImpl x, long m, long m2) { * @param x * the event being queried * @param m - * the member ID of the creator - * @return witness created by m in the parent round of x that x strongly sees, or null if none + * the index of the member ID of the creator + * @return witness created by the member at index m in the parent round of x that x strongly sees, or null if none */ - private EventImpl stronglySeeP(EventImpl x, long m) { + private EventImpl stronglySeeP(@Nullable final EventImpl x, final int m) { long t = System.nanoTime(); // Used to update statistic for dot product time EventImpl result; // the witness to return (possibly null) if (x == null) { // if there is no event, then it can't see anything result = null; } else if (x.sizeStronglySeeP() != 0) { // return memoized answer, if available - result = x.getStronglySeeP((int) m); + result = x.getStronglySeeP(m); } else { // calculate the answer, and remember it for next time // find and memoize answers for all choices of m, then return answer for just this m int numMembers = getAddressBook().getSize(); // number of members @@ -1227,9 +1234,10 @@ private EventImpl stronglySeeP(EventImpl x, long m) { x.setStronglySeeP(mm, null); } else { long weight = 0; - for (long m3 = 0; m3 < numMembers; m3++) { + for (int m3 = 0; m3 < numMembers; m3++) { if (seeThru(x, mm, m3) == st) { // only count intermediates that see the canonical witness - weight += addressBook.getAddress(m3).getWeight(); + final NodeId nodeId = addressBook.getNodeId(m3); + weight += addressBook.getAddress(nodeId).getWeight(); } } if (Utilities.isSuperMajority(weight, totalWeight)) { // strongly see supermajority of @@ -1241,7 +1249,7 @@ private EventImpl stronglySeeP(EventImpl x, long m) { } } } - result = x.getStronglySeeP((int) m); + result = x.getStronglySeeP(m); } t = System.nanoTime() - t; // nanoseconds spent doing the dot product consensusMetrics.dotProductTime(t); @@ -1359,9 +1367,10 @@ private long round(EventImpl x) { // parents have equal rounds (not -1), so check if x can strongly see witnesses with a supermajority of weight weight = 0; int numStronglySeen = 0; - for (long m = 0; m < numMembers; m++) { + for (int m = 0; m < numMembers; m++) { if (stronglySeeP(x, m) != null) { - weight += addressBook.getAddress(m).getWeight(); + final NodeId nodeId = addressBook.getNodeId(m); + weight += addressBook.getAddress(nodeId).getWeight(); numStronglySeen++; } } @@ -1435,28 +1444,23 @@ private EventImpl firstWitnessS(EventImpl x) { * @param x * the event being queried * @param m - * the member ID of the creator - * @return event by m that x strongly sees in the round before the created round of x, or null if none + * the index of the member ID of the creator + * @return event by the member at index m that x strongly sees in the round before the created round of x, or null if none */ - private EventImpl stronglySeeS1(EventImpl x, long m) { + private EventImpl stronglySeeS1(EventImpl x, int m) { return stronglySeeP(firstWitnessS(x), m); } /** - * The first witness in round r that is a self-ancestor of x, where r is the round of the last event by m - * that is - * seen by x (function from SWIRLDS-TR-2020-01). - * This result is not memoized. + * The first witness in round r that is a self-ancestor of x, where r is the round of the last event by the member + * at index m that is seen by x (function from SWIRLDS-TR-2020-01). This result is not memoized. * - * @param x - * the event being queried - * @param m - * the member ID of the creator - * @return firstSelfWitnessS(lastSee ( x, m)), which is the first witness in round r that is a - * self-ancestor - * of x, where r is the round of the last event by m that is seen by x, or null if none + * @param x the event being queried + * @param m the index of the member ID of the creator + * @return firstSelfWitnessS(lastSee ( x, m)), which is the first witness in round r that is a self-ancestor of x, + * where r is the round of the last event by the member at index m that is seen by x, or null if none */ - private EventImpl firstSee(EventImpl x, long m) { + private EventImpl firstSee(@Nullable final EventImpl x, final int m) { return firstSelfWitnessS(lastSee(x, m)); } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ConsensusRounds.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ConsensusRounds.java index 6cda513d2982..c0283685e210 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ConsensusRounds.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ConsensusRounds.java @@ -284,7 +284,7 @@ void createRoundsForSignedStateConstructor(final List minGen) { maxRound.set(minGen.get(minGen.size() - 1).round()); for (final MinGenInfo roundGenPair : minGen) { long round = roundGenPair.round(); - RoundInfo roundInfo = new RoundInfo(round, addressBook.getSize()); + RoundInfo roundInfo = new RoundInfo(round, addressBook); rounds.put(round, roundInfo); // set the minGeneration as stored in state diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/PlatformConstructor.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/PlatformConstructor.java index 4e1f0a26aa05..8e388e925626 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/PlatformConstructor.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/PlatformConstructor.java @@ -159,6 +159,7 @@ static QueueThread stateHashSignQueue( * @param metrics reference to the metrics-system * @param transactionConfig the transaction configuration * @param initialState the initial state + * @param softwareVersion the software version * @return the newly constructed instance of {@link SwirldStateManager} */ static SwirldStateManager swirldStateManager( @@ -170,7 +171,8 @@ static SwirldStateManager swirldStateManager( @NonNull final Metrics metrics, @NonNull final TransactionConfig transactionConfig, @NonNull final BooleanSupplier inFreezeChecker, - @NonNull final State initialState) { + @NonNull final State initialState, + @NonNull final SoftwareVersion softwareVersion) { Objects.requireNonNull(platformContext); Objects.requireNonNull(addressBook); @@ -181,6 +183,7 @@ static SwirldStateManager swirldStateManager( Objects.requireNonNull(transactionConfig); Objects.requireNonNull(inFreezeChecker); Objects.requireNonNull(initialState); + Objects.requireNonNull(softwareVersion); return new SwirldStateManagerImpl( platformContext, @@ -191,7 +194,8 @@ static SwirldStateManager swirldStateManager( new SwirldStateMetrics(metrics), transactionConfig, inFreezeChecker, - initialState); + initialState, + softwareVersion); } /** diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/RoundInfo.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/RoundInfo.java index 8afe54744606..926506dc57db 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/RoundInfo.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/RoundInfo.java @@ -16,12 +16,15 @@ package com.swirlds.platform; +import com.swirlds.common.system.address.AddressBook; import com.swirlds.platform.crypto.CryptoConstants; import com.swirlds.platform.event.EventConstants; import com.swirlds.platform.internal.EventImpl; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; /** * Hold all of the information about a round, such as lists of witnesses, statistics about them, and all the @@ -49,6 +52,11 @@ public class RoundInfo { */ private final long round; + /** + * the address book for this round + */ + private final AddressBook addressBook; + /** * are all the famous witnesses known for this round? */ @@ -172,12 +180,13 @@ public long getAge() { * * @param round * the round it will be used to describe - * @param numMembers - * the number of members currently in the address book + * @param addressBook + * the address book to use for the round */ - protected RoundInfo(long round, int numMembers) { + protected RoundInfo(long round, @NonNull final AddressBook addressBook) { this.round = round; - this.judges = new EventImpl[numMembers]; + this.addressBook = Objects.requireNonNull(addressBook); + this.judges = new EventImpl[addressBook.getSize()]; } /** @@ -187,17 +196,17 @@ protected RoundInfo(long round, int numMembers) { * the witness to add */ protected void addFamousWitness(EventImpl w) { - int creator = (int) w.getCreatorId().id(); - if (judges[creator] == null) { - judges[creator] = w; + final int creatorIndex = addressBook.getIndexOfNodeId(w.getCreatorId()); + if (judges[creatorIndex] == null) { + judges[creatorIndex] = w; } else { // if this creator forked, then the judge is the "unique" famous witness, which is the one with minimum hash // (where "minimum" is the lexicographically-least signed byte array) if (Utilities.arrayCompare( w.getBaseHash().getValue(), - judges[creator].getBaseHash().getValue()) + judges[creatorIndex].getBaseHash().getValue()) < 0) { - judges[creator] = w; + judges[creatorIndex] = w; } } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java deleted file mode 100644 index bcef431371a2..000000000000 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (C) 2022-2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.swirlds.platform; - -/** - * @deprecated will be replaced by the {@link com.swirlds.config.api.Configuration} API in near future. If you need - * to use this class please try to do as less static access as possible. - */ -@Deprecated(forRemoval = true) -public final class SettingConstants { - - /** name of the settings used file */ - static final String SETTING_USED_FILENAME = "settingsUsed.txt"; - - static final int DEADLOCK_CHECK_PERIOD_DEFAULT_VALUE = 1000; - static final boolean VERIFY_EVENT_SIGS_DEFAULT_VALUE = true; - static final boolean SHOW_INTERNAL_STATS_DEFAULT_VALUE = false; - static final boolean VERBOSE_STATISTICS_DEFAULT_VALUE = false; - static final int STATS_BUFFER_SIZE_DEFAULT_VALUE = 100; - static final int STATS_RECENT_SECONDS_DEFAULT_VALUE = 63; - static final boolean LOAD_KEYS_FROM_PFX_FILES_DEFAULT_VALUE = true; - - private SettingConstants() {} -} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java deleted file mode 100644 index 4429158bb410..000000000000 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java +++ /dev/null @@ -1,411 +0,0 @@ -/* - * Copyright (C) 2017-2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.swirlds.platform; - -import static com.swirlds.common.io.utility.FileUtils.getAbsolutePath; -import static com.swirlds.common.settings.ParsingUtils.parseDuration; -import static com.swirlds.logging.LogMarker.EXCEPTION; -import static com.swirlds.platform.SettingConstants.DEADLOCK_CHECK_PERIOD_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.LOAD_KEYS_FROM_PFX_FILES_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.SHOW_INTERNAL_STATS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.STATS_BUFFER_SIZE_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.STATS_RECENT_SECONDS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.VERBOSE_STATISTICS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.VERIFY_EVENT_SIGS_DEFAULT_VALUE; - -import com.swirlds.common.config.PathsConfig; -import com.swirlds.common.config.singleton.ConfigurationHolder; -import com.swirlds.common.internal.SettingsCommon; -import com.swirlds.common.settings.SettingsException; -import com.swirlds.common.utility.CommonUtils; -import com.swirlds.common.utility.PlatformVersion; -import com.swirlds.config.api.Configuration; -import com.swirlds.platform.internal.SubSetting; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.lang.reflect.Field; -import java.lang.reflect.Modifier; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.ArrayList; -import java.util.List; -import java.util.Scanner; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -/** - * This purely-static class holds global settings that control how the Platform and sync processes operate. If the file - * sdk/settings.txt exists, then it will read the settings from it, to override one or more of the default settings (and - * to override settings in config.txt). The Browser should call the loadSettings() method to read that file, before it - * instantiates any Platform objects (or anything else). - *

- * Any field that is intended to be a "setting" should be non-final. The settings.txt file will not change any of the - * fields. But it will change all of the final fields (except maxIncomingSyncs, which is a special case which is - * calculated from maxOutgoingSyncs, and cannot be changed directly from settings.txt). - *

- * After the config.txt and settings.txt files have been read and the Platform objects instantiated, the Browser should - * then call writeSettings() to write all the final settings values to settingsUsed.txt (though only if settings.txt - * exists). - * - * @deprecated will be replaced by the {@link Configuration} API in near future. If you need to use this class please - * try to do as less static access as possible. - */ -@Deprecated(forRemoval = true) -public class Settings { - - // The following paths are for 4 files and 2 directories, such as: - // /FULL/PATH/sdk/config.txt - // /FULL/PATH/sdk/settings.txt - // /FULL/PATH/sdk/settingsUsed.txt - // /FULL/PATH/sdk/log4j2.xml - // /FULL/PATH/sdk/data/keys/ - // /FULL/PATH/sdk/data/apps/ - - // useful run configuration arguments for debugging: - // -XX:+HeapDumpOnOutOfMemoryError - // -Djavax.net.debug=ssl,handshake - - /** use this for all logging, as controlled by the optional data/log4j2.xml file */ - private static final Logger logger = LogManager.getLogger(Settings.class); - - private static final Settings INSTANCE = new Settings(); - /** the directory where the settings used file will be created on startup if and only if settings.txt exists */ - private final Path settingsUsedDir = getAbsolutePath(); - - /////////////////////////////////////////// - // settings from settings.txt file - /** verify event signatures (rather than just trusting they are correct)? */ - private boolean verifyEventSigs = VERIFY_EVENT_SIGS_DEFAULT_VALUE; - /** show the user all statistics, including those with category "internal"? */ - private boolean showInternalStats = SHOW_INTERNAL_STATS_DEFAULT_VALUE; - /** show expand statistics values, inlcude mean, min, max, stdDev */ - private boolean verboseStatistics = VERBOSE_STATISTICS_DEFAULT_VALUE; - /** check for deadlocks every this many milliseconds (-1 for never) */ - private int deadlockCheckPeriod = DEADLOCK_CHECK_PERIOD_DEFAULT_VALUE; - /** number of bins to store for the history (in StatsBuffer etc.) */ - private int statsBufferSize = STATS_BUFFER_SIZE_DEFAULT_VALUE; - /** number of seconds covered by "recent" history (in StatsBuffer etc.) */ - private double statsRecentSeconds = STATS_RECENT_SECONDS_DEFAULT_VALUE; - - /** - * When enabled, the platform will try to load node keys from .pfx files located in - * {@link com.swirlds.common.config.PathsConfig.keysDirPath}. If even a - * single key is missing, the platform will warn and exit. - *

- * If disabled, the platform will generate keys deterministically. - */ - private boolean loadKeysFromPfxFiles = LOAD_KEYS_FROM_PFX_FILES_DEFAULT_VALUE; - - private Settings() {} - - public static Settings getInstance() { - return INSTANCE; - } - - public static void main(final String[] args) { - getInstance().loadSettings(); - getInstance().writeSettingsUsed(); - } - - public static void populateSettingsCommon() { - SettingsCommon.showInternalStats = getInstance().isShowInternalStats(); - SettingsCommon.verboseStatistics = getInstance().isVerboseStatistics(); - } - - /** - * Split the given string on its commas, and trim each result - * - * @param line the string of comma-separated values to split - * @return the array of trimmed elements. - */ - public static String[] splitLine(final String line) { - final String[] elms = line.split(","); - for (int i = 0; i < elms.length; i++) { - elms[i] = elms[i].trim(); - } - - return elms; - } - - public void writeSettingsUsed() { - writeSettingsUsed(settingsUsedDir); - } - - public void addSettingsUsed(final StringBuilder builder) { - final String[][] settings = currSettings(); - builder.append(PlatformVersion.locateOrDefault().license()); - builder.append(System.lineSeparator()); - builder.append(System.lineSeparator()); - - builder.append( - "The following are all the settings, as modified by settings.txt, but not reflecting any changes " - + "made by config.txt."); - builder.append(System.lineSeparator()); - builder.append(System.lineSeparator()); - for (final String[] pair : settings) { - builder.append(String.format("%15s = %s%n", pair[1], pair[0])); - } - } - - /** - * Write all the settings to the file settingsUsed.txt, some of which might have been changed by settings.txt. - * - * @param directory the directory to write to - */ - public void writeSettingsUsed(final Path directory) { - final String[][] settings = currSettings(); - try (final BufferedWriter writer = - Files.newBufferedWriter(directory.resolve(SettingConstants.SETTING_USED_FILENAME))) { - writer.write(PlatformVersion.locateOrDefault().license()); - writer.write(System.lineSeparator()); - writer.write(System.lineSeparator()); - - writer.write( - "The following are all the settings, as modified by settings.txt, but not reflecting any changes " - + "made by config.txt."); - writer.write(System.lineSeparator()); - writer.write(System.lineSeparator()); - for (final String[] pair : settings) { - writer.write(String.format("%15s = %s%n", pair[1], pair[0])); - } - writer.flush(); - } catch (final IOException e) { - logger.error(EXCEPTION.getMarker(), "Error in writing to settingsUsed.txt", e); - } - } - - /** - * If the sdk/data/settings.txt file exists, then load settings from it. If it doesn't exist, keep the existing - * settings. If it exists but a setting is missing, keep the default value for it. If a setting is given multiple - * times, use the last one. If the file contains a setting name that doesn't exist, complain to the command line. - *

- * It is intended that this file will not normally exist. Most settings should be controlled by the defaults set in - * this source file. The settings.txt file is only used for testing and debugging. - */ - public void loadSettings() { - final Path settingsPath = - ConfigurationHolder.getConfigData(PathsConfig.class).getSettingsPath(); - loadSettings(settingsPath.toFile()); - } - - public void loadSettings(final Path path) { - CommonUtils.throwArgNull(path, "path"); - loadSettings(path.toFile()); - } - - public void loadSettings(final File settingsFile) { - CommonUtils.throwArgNull(settingsFile, "settingsFile"); - final Scanner scanner; - if (!Files.exists(settingsFile.toPath())) { - return; // normally, the file won't exist, so the defaults are used. - } - - try { - scanner = new Scanner(settingsFile, StandardCharsets.UTF_8.name()); - } catch (final FileNotFoundException e) { // this should never happen - final Path settingsPath = - ConfigurationHolder.getConfigData(PathsConfig.class).getSettingsPath(); - CommonUtils.tellUserConsole("The file " + settingsPath + " exists, but can't be opened. " + e); - return; - } - - CommonUtils.tellUserConsole("Reading the settings from the file: " + settingsFile.getAbsolutePath()); - - int count = 0; - while (scanner.hasNextLine()) { - final String originalLine = scanner.nextLine(); - String line = originalLine; - final int pos = line.indexOf("#"); - if (pos > -1) { - line = line.substring(0, pos); - } - line = line.trim(); - count++; - if (!line.isEmpty()) { - final String[] pars = splitLine(line); - if (pars.length > 0) { // ignore empty lines - try { - handleSetting(pars); - } catch (final Exception e) { - CommonUtils.tellUserConsole( - "syntax error in settings.txt on line " + count + ": " + originalLine); - scanner.close(); - return; - } - } - } - } - scanner.close(); - } - - /** - * handle a single line from the settings.txt file. The line is split by commas, so none of the individual strings - * or values should have commas in them. The first token on the line is intended to state what setting is being - * changed, and the rest is the value for that setting. - * - * @param pars the parameters on that line, split by commas - * @return true if the line is a valid setting assignment - */ - private boolean handleSetting(final String[] pars) { - String name = pars[0]; - String subName = null; - if (name.contains(".")) { - // if the name contains a dot (.), then we need to set a variable that is inside an object - final String[] split = name.split("\\."); - name = split[0]; - subName = split[1]; - } - final String val = pars.length > 1 ? pars[1].trim() : ""; // the first parameter passed in, or "" if none - boolean good = false; // is name a valid name of a non-final static field in Settings? - final Field field = getFieldByName(Settings.class.getDeclaredFields(), name); - if (field != null && !Modifier.isFinal(field.getModifiers())) { - try { - if (subName == null) { - good = setValue(field, this, val); - } else { - final Field subField = getFieldByName(field.getType().getDeclaredFields(), subName); - if (subField != null) { - good = setValue(subField, field.get(this), val); - } - } - } catch (final IllegalArgumentException | IllegalAccessException | SettingsException e) { - logger.error(EXCEPTION.getMarker(), "illegal line in settings.txt: {}, {} {}", pars[0], pars[1], e); - } - } - - return good; - } - - /** - * Finds a field from the array with the given name - * - * @param fields the fields to search in - * @param name the name of the field to look for - * @return the field with the name supplied, or null if such a field cannot be found - */ - private Field getFieldByName(final Field[] fields, final String name) { - for (final Field f : fields) { - if (f.getName().equalsIgnoreCase(name)) { - return f; - } - } - return null; - } - - /** - * Sets the value via reflection, converting the string value into the appropriate type - * - * @param field the field to set - * @param object the object in which to set the field, should be null if the field is static - * @param value the value to set it to - * @return true if the field was set, false otherwise - * @throws IllegalAccessException if this Field object is enforcing Java language access control and the underlying - * field is either inaccessible or final. - */ - private boolean setValue(final Field field, final Object object, final String value) throws IllegalAccessException { - final Class t = field.getType(); - if (t == String.class) { - field.set(object, value); - return true; - } else if (t == char.class) { - field.set(object, value.charAt(0)); - return true; - } else if (t == byte.class) { - field.set(object, Byte.parseByte(value)); - return true; - } else if (t == short.class) { - field.set(object, Short.parseShort(value)); - return true; - } else if (t == int.class) { - field.set(object, Integer.parseInt(value)); - return true; - } else if (t == long.class) { - field.set(object, Long.parseLong(value)); - return true; - } else if (t == boolean.class) { - field.set(object, Utilities.parseBoolean(value)); - return true; - } else if (t == float.class) { - field.set(object, Float.parseFloat(value)); - return true; - } else if (t == double.class) { - field.set(object, Double.parseDouble(value)); - return true; - } else if (t == Duration.class) { - field.set(object, parseDuration(value)); - return true; - } - return false; - } - - /** - * Return all the current settings, as a 2D array of strings, where the first column is the name of the setting, and - * the second column is the value. - * - * @return the current settings - */ - private String[][] currSettings() { - final Field[] fields = Settings.class.getDeclaredFields(); - final List list = new ArrayList<>(); - for (final Field f : fields) { - // every non-setting field should be final, so the following deals with the correct fields - if (!Modifier.isFinal(f.getModifiers())) { - try { - if (SubSetting.class.isAssignableFrom(f.getType())) { - final Field[] subFields = f.getType().getDeclaredFields(); - for (final Field subField : subFields) { - final Object subFieldValue = subField.get(f.get(this)); - list.add(new String[] { - f.getName() + "." + subField.getName(), - subFieldValue == null ? "null" : subFieldValue.toString() - }); - } - } else { - list.add(new String[] {f.getName(), f.get(this).toString()}); - } - } catch (final IllegalArgumentException | IllegalAccessException e) { - logger.error(EXCEPTION.getMarker(), "error while reading settings.txt", e); - } - } - } - return list.toArray(new String[0][0]); - } - - public boolean isVerifyEventSigs() { - return verifyEventSigs; - } - - public boolean isShowInternalStats() { - return showInternalStats; - } - - public boolean isVerboseStatistics() { - return verboseStatistics; - } - - public int getDeadlockCheckPeriod() { - return deadlockCheckPeriod; - } - - public boolean isLoadKeysFromPfxFiles() { - return loadKeysFromPfxFiles; - } -} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java index 5601fd0a525f..2acbc465d508 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java @@ -16,15 +16,17 @@ package com.swirlds.platform; +import static com.swirlds.common.system.InitTrigger.GENESIS; +import static com.swirlds.common.system.InitTrigger.RESTART; +import static com.swirlds.common.system.SoftwareVersion.NO_VERSION; import static com.swirlds.common.threading.interrupt.Uninterruptable.abortAndThrowIfInterrupted; import static com.swirlds.common.threading.manager.AdHocThreadManager.getStaticThreadManager; import static com.swirlds.logging.LogMarker.EXCEPTION; import static com.swirlds.logging.LogMarker.PLATFORM_STATUS; import static com.swirlds.logging.LogMarker.RECONNECT; import static com.swirlds.logging.LogMarker.STARTUP; -import static com.swirlds.platform.state.GenesisStateBuilder.buildGenesisState; +import static com.swirlds.platform.event.tipset.TipsetEventCreationManagerFactory.buildTipsetEventCreationManager; import static com.swirlds.platform.state.address.AddressBookMetrics.registerAddressBookMetrics; -import static com.swirlds.platform.state.signed.ReservedSignedState.createNullReservation; import com.swirlds.base.state.Startable; import com.swirlds.base.time.Time; @@ -37,9 +39,7 @@ import com.swirlds.common.crypto.Hash; import com.swirlds.common.crypto.Signature; import com.swirlds.common.io.utility.RecycleBin; -import com.swirlds.common.merkle.MerkleNode; import com.swirlds.common.merkle.crypto.MerkleCryptoFactory; -import com.swirlds.common.merkle.route.MerkleRouteIterator; import com.swirlds.common.merkle.utility.MerkleTreeVisualizer; import com.swirlds.common.metrics.FunctionGauge; import com.swirlds.common.metrics.Metrics; @@ -70,7 +70,6 @@ import com.swirlds.common.utility.LoggingClearables; import com.swirlds.logging.LogMarker; import com.swirlds.logging.payloads.PlatformStatusPayload; -import com.swirlds.logging.payloads.SavedStateLoadedPayload; import com.swirlds.platform.components.EventCreationRules; import com.swirlds.platform.components.EventCreator; import com.swirlds.platform.components.EventIntake; @@ -89,6 +88,7 @@ import com.swirlds.platform.dispatch.triggers.flow.DiskStateLoadedTrigger; import com.swirlds.platform.dispatch.triggers.flow.ReconnectStateLoadedTrigger; import com.swirlds.platform.event.EventCounter; +import com.swirlds.platform.event.EventDescriptor; import com.swirlds.platform.event.EventIntakeTask; import com.swirlds.platform.event.EventUtils; import com.swirlds.platform.event.linking.EventLinker; @@ -103,6 +103,7 @@ import com.swirlds.platform.event.preconsensus.PreconsensusEventStreamSequencer; import com.swirlds.platform.event.preconsensus.PreconsensusEventWriter; import com.swirlds.platform.event.preconsensus.SyncPreconsensusEventWriter; +import com.swirlds.platform.event.tipset.TipsetEventCreationManager; import com.swirlds.platform.event.validation.AncientValidator; import com.swirlds.platform.event.validation.EventDeduplication; import com.swirlds.platform.event.validation.EventValidator; @@ -116,7 +117,6 @@ import com.swirlds.platform.gossip.Gossip; import com.swirlds.platform.gossip.GossipFactory; import com.swirlds.platform.gossip.chatter.config.ChatterConfig; -import com.swirlds.platform.gossip.chatter.protocol.messages.EventDescriptor; import com.swirlds.platform.gossip.shadowgraph.ShadowGraph; import com.swirlds.platform.gossip.shadowgraph.ShadowGraphEventObserver; import com.swirlds.platform.gui.GuiPlatformAccessor; @@ -141,8 +141,6 @@ import com.swirlds.platform.state.signed.SourceOfSignedState; import com.swirlds.platform.stats.StatConstructor; import com.swirlds.platform.system.Shutdown; -import com.swirlds.platform.system.SystemExitCode; -import com.swirlds.platform.system.SystemExitUtils; import com.swirlds.platform.threading.PauseAndLoad; import com.swirlds.platform.util.PlatformComponents; import edu.umd.cs.findbugs.annotations.NonNull; @@ -151,12 +149,13 @@ import java.io.UncheckedIOException; import java.time.Instant; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; -import java.util.function.Supplier; import org.apache.commons.lang3.tuple.Pair; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -200,23 +199,11 @@ public class SwirldsPlatform implements Platform, Startable { /** the object that contains all key pairs and CSPRNG state for this member */ private final Crypto crypto; - /** - * True if this node started from genesis. - */ - private final boolean startedFromGenesis; - /** - * If a state was loaded from disk, this will have the round of that state. - */ - private final long diskStateRound; /** * If a state was loaded from disk, this is the minimum generation non-ancient for that round. If starting from a * genesis state, this is 0. */ private final long initialMinimumGenerationNonAncient; - /** - * If a state was loaded from disk, this will have the hash of that state. - */ - private final Hash diskStateHash; private final StateManagementComponent stateManagementComponent; private final EventTaskDispatcher eventTaskDispatcher; @@ -285,6 +272,11 @@ public class SwirldsPlatform implements Platform, Startable { */ private final RecycleBin recycleBin; + /** + * Creates new events using the tipset algorithm. + */ + private final TipsetEventCreationManager tipsetEventCreator; + /** * the browser gives the Platform what app to run. There can be multiple Platforms on one computer. * @@ -292,27 +284,22 @@ public class SwirldsPlatform implements Platform, Startable { * @param crypto an object holding all the public/private key pairs and the CSPRNG state for this * member * @param recycleBin used to delete files that may be useful for later debugging - * @param initialAddressBook the address book listing all members in the community - * @param id the ID number for this member (if this computer has multiple members in one - * swirld) + * @param id the ID for this node * @param mainClassName the name of the app class inheriting from SwirldMain * @param swirldName the name of the swirld being run * @param appVersion the current version of the running application - * @param genesisStateBuilder used to construct a genesis state if no suitable state from disk can be found - * @param loadedSignedState used to initialize the loaded state + * @param initialState the initial state of the platform * @param emergencyRecoveryManager used in emergency recovery. */ SwirldsPlatform( @NonNull final PlatformContext platformContext, @NonNull final Crypto crypto, @NonNull final RecycleBin recycleBin, - @NonNull final AddressBook initialAddressBook, @NonNull final NodeId id, @NonNull final String mainClassName, @NonNull final String swirldName, @NonNull final SoftwareVersion appVersion, - @NonNull final Supplier genesisStateBuilder, - @NonNull final ReservedSignedState loadedSignedState, + @NonNull final SignedState initialState, @NonNull final EmergencyRecoveryManager emergencyRecoveryManager) { this.platformContext = Objects.requireNonNull(platformContext, "platformContext"); @@ -340,7 +327,7 @@ public class SwirldsPlatform implements Platform, Startable { this.appVersion = appVersion; this.selfId = id; - this.initialAddressBook = initialAddressBook; + this.initialAddressBook = initialState.getAddressBook(); this.eventMapper = new EventMapper(platformContext.getMetrics(), selfId); @@ -406,12 +393,11 @@ public class SwirldsPlatform implements Platform, Startable { final Address address = getSelfAddress(); final String eventStreamManagerName; - if (address.getMemo() != null && !address.getMemo().isEmpty()) { + if (!address.getMemo().isEmpty()) { eventStreamManagerName = address.getMemo(); } else { eventStreamManagerName = String.valueOf(selfId); } - logger.info(STARTUP.getMarker(), "initialize eventStreamManager"); final EventStreamManager eventStreamManager = new EventStreamManager<>( platformContext, @@ -425,217 +411,210 @@ public class SwirldsPlatform implements Platform, Startable { eventConfig.eventStreamQueueCapacity(), this::isLastEventBeforeRestart); - if (loadedSignedState.isNotNull()) { - diskStateHash = loadedSignedState.get().getState().getHash(); - diskStateRound = loadedSignedState.get().getRound(); - initialMinimumGenerationNonAncient = loadedSignedState - .get() - .getState() - .getPlatformState() - .getPlatformData() - .getMinimumGenerationNonAncient(); - startedFromGenesis = false; - } else { - diskStateHash = null; - diskStateRound = -1; - initialMinimumGenerationNonAncient = 0; - startedFromGenesis = true; - } - - final LoadedState loadedState = initializeLoadedStateFromSignedState(loadedSignedState, stateConfig); - final PreConsensusEventHandler preConsensusEventHandler; - try (loadedState.signedStateFromDisk) { - final SignedState signedStateFromDisk = loadedState.signedStateFromDisk.getNullable(); - - stateHashSignQueue = PlatformConstructor.stateHashSignQueue( - threadManager, selfId, stateManagementComponent::newSignedStateFromTransactions, metrics); - stateHashSignQueue.start(); - - final State stateToLoad; - if (signedStateFromDisk != null) { - logger.debug(STARTUP.getMarker(), () -> new SavedStateLoadedPayload( - signedStateFromDisk.getRound(), - signedStateFromDisk.getConsensusTimestamp(), - startUpEventFrozenManager.getStartUpEventFrozenEndTime()) - .toString()); - - stateToLoad = loadedState.initialState; - - } else { - stateToLoad = buildGenesisState(this, initialAddressBook, appVersion, genesisStateBuilder); - - // if we are not starting from a saved state, don't freeze on startup - startUpEventFrozenManager.setStartUpEventFrozenEndTime(null); - } - - if (stateToLoad == null) { - // this should be impossible - throw new IllegalStateException("stateToLoad is null"); - } - final TransactionConfig transactionConfig = - platformContext.getConfiguration().getConfigData(TransactionConfig.class); - swirldStateManager = PlatformConstructor.swirldStateManager( - platformContext, - initialAddressBook, - selfId, - preConsensusSystemTransactionManager, - postConsensusSystemTransactionManager, - metrics, - transactionConfig, - freezeManager::isFreezeStarted, - stateToLoad); - - // SwirldStateManager will get a copy of the state loaded, that copy will become stateCons. - // The original state will be saved in the SignedStateMgr and will be deleted when it becomes old - - final ThreadConfig threadConfig = platformContext.getConfiguration().getConfigData(ThreadConfig.class); - preConsensusEventHandler = components.add(new PreConsensusEventHandler( - metrics, threadManager, selfId, swirldStateManager, consensusMetrics, threadConfig)); - consensusRoundHandler = components.add(PlatformConstructor.consensusHandler( - platformContext, - threadManager, - selfId, - swirldStateManager, - new ConsensusHandlingMetrics(metrics, time), - eventStreamManager, - stateHashSignQueue, - preconsensusEventWriter::waitUntilDurable, - freezeManager::freezeStarted, - stateManagementComponent::roundAppliedToState, - appVersion)); - - if (signedStateFromDisk != null) { - consensusRoundHandler.loadDataFromSignedState(signedStateFromDisk, false); - } - - final AddedEventMetrics addedEventMetrics = new AddedEventMetrics(this.selfId, metrics); - final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); - - final EventObserverDispatcher eventObserverDispatcher = new EventObserverDispatcher( - new ShadowGraphEventObserver(shadowGraph), - consensusRoundHandler, - preConsensusEventHandler, - eventMapper, - addedEventMetrics, - eventIntakeMetrics, - (PreConsensusEventObserver) event -> { - sequencer.assignStreamSequenceNumber(event); - abortAndThrowIfInterrupted( - preconsensusEventWriter::writeEvent, - event, - "Interrupted while attempting to enqueue preconsensus event for writing"); - }, - (ConsensusRoundObserver) round -> { - abortAndThrowIfInterrupted( - preconsensusEventWriter::setMinimumGenerationNonAncient, - round.getGenerations().getMinGenerationNonAncient(), - "Interrupted while attempting to enqueue change in minimum generation non-ancient"); - - abortAndThrowIfInterrupted( - preconsensusEventWriter::requestFlush, - "Interrupted while requesting preconsensus event flush"); - }); - - final List> isDuplicateChecks = new ArrayList<>(); - isDuplicateChecks.add(d -> shadowGraph.isHashInGraph(d.getHash())); + initializeState(initialState); - eventLinker = buildEventLinker(isDuplicateChecks); + final TransactionConfig transactionConfig = + platformContext.getConfiguration().getConfigData(TransactionConfig.class); - final IntakeCycleStats intakeCycleStats = new IntakeCycleStats(time, metrics); - - final EventIntake eventIntake = new EventIntake( - selfId, - eventLinker, - consensusRef::get, - initialAddressBook, - eventObserverDispatcher, - intakeCycleStats, - shadowGraph); - - final EventCreator eventCreator = buildEventCreator(eventIntake); - final Settings settings = Settings.getInstance(); - - final List validators = new ArrayList<>(); - // it is very important to discard ancient events, otherwise the deduplication will not work, since it - // doesn't track ancient events - validators.add(new AncientValidator(consensusRef::get)); - validators.add(new EventDeduplication(isDuplicateChecks, eventIntakeMetrics)); - validators.add(StaticValidators::isParentDataValid); - validators.add(new TransactionSizeValidator(transactionConfig.maxTransactionBytesPerEvent())); - if (settings.isVerifyEventSigs()) { - validators.add(new SignatureValidator(initialAddressBook)); - } - final GossipEventValidators eventValidators = new GossipEventValidators(validators); - - /* validates events received from gossip */ - final EventValidator eventValidator = new EventValidator(eventValidators, eventIntake::addUnlinkedEvent); - - eventTaskDispatcher = new EventTaskDispatcher( - time, - eventValidator, - eventCreator, - eventIntake::addUnlinkedEvent, - eventIntakeMetrics, - intakeCycleStats); - - intakeQueue = components.add(new QueueThreadConfiguration(threadManager) - .setNodeId(selfId) - .setComponent(PLATFORM_THREAD_POOL_NAME) - .setThreadName("event-intake") - // There is a circular dependency between the intake queue and gossip, - // which the handler lambda sidesteps (since the lambda is not invoked - // until after all things have been constructed). - .setHandler(e -> getGossip().getEventIntakeLambda().accept(e)) - .setCapacity(eventConfig.eventIntakeQueueSize()) - .setLogAfterPauseDuration(threadConfig.logStackTracePauseDuration()) - .setMetricsConfiguration(new QueueThreadMetricsConfiguration(metrics) - .enableMaxSizeMetric() - .enableBusyTimeMetric()) - .build()); - - transactionSubmitter = new SwirldTransactionSubmitter( - currentPlatformStatus::get, - transactionConfig, - swirldStateManager::submitTransaction, - new TransactionMetrics(metrics)); + // This object makes a copy of the state. After this point, initialState becomes immutable. + swirldStateManager = PlatformConstructor.swirldStateManager( + platformContext, + initialAddressBook, + selfId, + preConsensusSystemTransactionManager, + postConsensusSystemTransactionManager, + metrics, + transactionConfig, + freezeManager::isFreezeStarted, + initialState.getState(), + appVersion); + + stateHashSignQueue = components.add(PlatformConstructor.stateHashSignQueue( + threadManager, selfId, stateManagementComponent::newSignedStateFromTransactions, metrics)); + + final ThreadConfig threadConfig = platformContext.getConfiguration().getConfigData(ThreadConfig.class); + final PreConsensusEventHandler preConsensusEventHandler = components.add(new PreConsensusEventHandler( + metrics, threadManager, selfId, swirldStateManager, consensusMetrics, threadConfig)); + consensusRoundHandler = components.add(PlatformConstructor.consensusHandler( + platformContext, + threadManager, + selfId, + swirldStateManager, + new ConsensusHandlingMetrics(metrics, time), + eventStreamManager, + stateHashSignQueue, + preconsensusEventWriter::waitUntilDurable, + freezeManager::freezeStarted, + stateManagementComponent::roundAppliedToState, + appVersion)); + + final AddedEventMetrics addedEventMetrics = new AddedEventMetrics(this.selfId, metrics); + final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); + + final EventObserverDispatcher eventObserverDispatcher = new EventObserverDispatcher( + new ShadowGraphEventObserver(shadowGraph), + consensusRoundHandler, + preConsensusEventHandler, + eventMapper, + addedEventMetrics, + eventIntakeMetrics, + (PreConsensusEventObserver) event -> { + sequencer.assignStreamSequenceNumber(event); + abortAndThrowIfInterrupted( + preconsensusEventWriter::writeEvent, + event, + "Interrupted while attempting to enqueue preconsensus event for writing"); + }, + (ConsensusRoundObserver) round -> { + abortAndThrowIfInterrupted( + preconsensusEventWriter::setMinimumGenerationNonAncient, + round.getGenerations().getMinGenerationNonAncient(), + "Interrupted while attempting to enqueue change in minimum generation non-ancient"); + + abortAndThrowIfInterrupted( + preconsensusEventWriter::requestFlush, + "Interrupted while requesting preconsensus event flush"); + }); + + final List> isDuplicateChecks = new ArrayList<>(); + isDuplicateChecks.add(d -> shadowGraph.isHashInGraph(d.getHash())); + + eventLinker = buildEventLinker(isDuplicateChecks); + + final IntakeCycleStats intakeCycleStats = new IntakeCycleStats(time, metrics); + + final EventIntake eventIntake = new EventIntake( + selfId, + eventLinker, + consensusRef::get, + initialAddressBook, + eventObserverDispatcher, + intakeCycleStats, + shadowGraph); + + final EventCreator eventCreator = buildEventCreator(eventIntake); + final BasicConfig basicConfig = platformContext.getConfiguration().getConfigData(BasicConfig.class); + + final List validators = new ArrayList<>(); + // it is very important to discard ancient events, otherwise the deduplication will not work, since it + // doesn't track ancient events + validators.add(new AncientValidator(consensusRef::get)); + validators.add(new EventDeduplication(isDuplicateChecks, eventIntakeMetrics)); + validators.add(StaticValidators::isParentDataValid); + validators.add(new TransactionSizeValidator(transactionConfig.maxTransactionBytesPerEvent())); + if (basicConfig.verifyEventSigs()) { + validators.add(new SignatureValidator(initialAddressBook)); + } + final GossipEventValidators eventValidators = new GossipEventValidators(validators); + + /* validates events received from gossip */ + final EventValidator eventValidator = new EventValidator(eventValidators, eventIntake::addUnlinkedEvent); + + eventTaskDispatcher = new EventTaskDispatcher( + time, + eventValidator, + eventCreator, + eventIntake::addUnlinkedEvent, + eventIntakeMetrics, + intakeCycleStats); + + intakeQueue = components.add(new QueueThreadConfiguration(threadManager) + .setNodeId(selfId) + .setComponent(PLATFORM_THREAD_POOL_NAME) + .setThreadName("event-intake") + // There is a circular dependency between the intake queue and gossip, + // which the handler lambda sidesteps (since the lambda is not invoked + // until after all things have been constructed). + .setHandler(e -> getGossip().getEventIntakeLambda().accept(e)) + .setCapacity(eventConfig.eventIntakeQueueSize()) + .setLogAfterPauseDuration(threadConfig.logStackTracePauseDuration()) + .setMetricsConfiguration(new QueueThreadMetricsConfiguration(metrics) + .enableMaxSizeMetric() + .enableBusyTimeMetric()) + .build()); + + tipsetEventCreator = buildTipsetEventCreationManager( + platformContext, + threadManager, + time, + this, + initialAddressBook, + selfId, + appVersion, + swirldStateManager.getTransactionPool(), + intakeQueue, + eventObserverDispatcher, + currentPlatformStatus::get, + startUpEventFrozenManager); + + transactionSubmitter = new SwirldTransactionSubmitter( + currentPlatformStatus::get, + transactionConfig, + swirldStateManager::submitTransaction, + new TransactionMetrics(metrics)); + + final boolean startedFromGenesis = initialState.isGenesisState(); + + gossip = GossipFactory.buildGossip( + platformContext, + threadManager, + time, + crypto, + notificationEngine, + initialAddressBook, + selfId, + appVersion, + shadowGraph, + emergencyRecoveryManager, + consensusRef, + intakeQueue, + freezeManager, + startUpEventFrozenManager, + swirldStateManager, + startedFromGenesis, + stateManagementComponent, + eventTaskDispatcher::dispatchTask, + eventObserverDispatcher, + eventMapper, + eventIntakeMetrics, + eventLinker, + this::checkPlatformStatus, + this::loadReconnectState, + this::clearAllPipelines); + + if (startedFromGenesis) { + initialMinimumGenerationNonAncient = 0; - gossip = GossipFactory.buildGossip( - platformContext, - threadManager, - time, - crypto, - notificationEngine, - initialAddressBook, - selfId, - appVersion, - shadowGraph, - emergencyRecoveryManager, - consensusRef, - intakeQueue, - freezeManager, - startUpEventFrozenManager, - swirldStateManager, - startedFromGenesis, - stateManagementComponent, - eventTaskDispatcher::dispatchTask, - eventObserverDispatcher, - eventMapper, - eventIntakeMetrics, - eventLinker, - this::checkPlatformStatus, - this::loadReconnectState, - this::clearAllPipelines); - - if (signedStateFromDisk != null) { - loadIntoConsensusAndEventMapper(signedStateFromDisk); - eventLinker.loadFromSignedState(signedStateFromDisk); - } else { - consensusRef.set(new ConsensusImpl( - platformContext.getConfiguration().getConfigData(ConsensusConfig.class), - consensusMetrics, - consensusRoundHandler::addMinGenInfo, - getAddressBook())); - } + consensusRef.set(new ConsensusImpl( + platformContext.getConfiguration().getConfigData(ConsensusConfig.class), + consensusMetrics, + consensusRoundHandler::addMinGenInfo, + getAddressBook())); + } else { + initialMinimumGenerationNonAncient = + initialState.getState().getPlatformState().getPlatformData().getMinimumGenerationNonAncient(); + + stateManagementComponent.stateToLoad(initialState, SourceOfSignedState.DISK); + consensusRoundHandler.loadDataFromSignedState(initialState, false); + + loadStateIntoConsensusAndEventMapper(initialState); + loadStateIntoEventCreator(initialState); + eventLinker.loadFromSignedState(initialState); + + // We don't want to invoke these callbacks until after we are starting up. + components.add((Startable) () -> { + final long round = initialState.getRound(); + final Hash hash = initialState.getState().getHash(); + + // If we loaded from disk then call the appropriate dispatch. + // It is important that this is sent after the ConsensusHashManager + // is initialized. + diskStateLoadedDispatcher.dispatch(round, hash); + + // Let the app know that a state was loaded. + notificationEngine.dispatch( + StateLoadedFromDiskCompleteListener.class, new StateLoadedFromDiskNotification()); + }); } clearAllPipelines = new LoggingClearables( @@ -687,92 +666,28 @@ private boolean createSystemTransaction( } /** - * A container for the initial state. - * - * @param signedStateFromDisk the initial signed state loaded from disk - * @param initialState the initial {@link State} object. This is a fast copy of the state loaded from disk - */ - private record LoadedState(@NonNull ReservedSignedState signedStateFromDisk, @Nullable State initialState) {} - - /** - * Update the address book with the current address book read from config.txt. Eventually we will not do this, and - * only transactions will be capable of modifying the address book. + * Initialize the state. * - * @param signedState the state that was loaded from disk - * @param addressBook the address book specified in config.txt + * @param signedState the state to initialize */ - private static void updateLoadedStateAddressBook(final SignedState signedState, final AddressBook addressBook) { - final State state = signedState.getState(); - - // Update the address book with the current address book read from config.txt. - // Eventually we will not do this, and only transactions will be capable of - // modifying the address book. - state.getPlatformState().setAddressBook(addressBook.copy()); - - // Invalidate a path down to the new address book - new MerkleRouteIterator(state, state.getPlatformState().getAddressBook().getRoute()) - .forEachRemaining(MerkleNode::invalidateHash); + private void initializeState(@NonNull final SignedState signedState) { - // We should only have to rehash a few nodes, so simpler to use the synchronous algorithm. - MerkleCryptoFactory.getInstance().digestTreeSync(state); + final SoftwareVersion previousSoftwareVersion; + final InitTrigger trigger; - // If our hash changes as a result of the new address book then our old signatures may become invalid. - signedState.pruneInvalidSignatures(); - } - - /** - * Create the LoadedState from the SignedState loaded from disk, if it is present. - * - * @param signedStateFromDisk the SignedState loaded from disk. - * @param stateConfig the state configuration - * @return the LoadedState - */ - @NonNull - private LoadedState initializeLoadedStateFromSignedState( - @NonNull final ReservedSignedState signedStateFromDisk, @NonNull final StateConfig stateConfig) { - try (signedStateFromDisk) { - if (signedStateFromDisk.isNotNull()) { - updateLoadedStateAddressBook(signedStateFromDisk.get(), initialAddressBook); - final State initialState = loadSavedState(signedStateFromDisk.get(), stateConfig); - return new LoadedState( - signedStateFromDisk.getAndReserve("SwirldsPlatform.initializeLoadedStateFromSignedState()"), - initialState); - } - } catch (final Exception e) { - logger.error(EXCEPTION.getMarker(), "Saved state not loaded:", e); - // if requireStateLoad is on, we exit. if not, we just log it - if (stateConfig.requireStateLoad()) { - SystemExitUtils.exitSystem(SystemExitCode.SAVED_STATE_NOT_LOADED); - } + if (signedState.isGenesisState()) { + previousSoftwareVersion = NO_VERSION; + trigger = GENESIS; + } else { + previousSoftwareVersion = + signedState.getState().getPlatformState().getPlatformData().getCreationSoftwareVersion(); + trigger = RESTART; } - return new LoadedState(createNullReservation(), null); - } - - private State loadSavedState( - @NonNull final SignedState signedStateFromDisk, @NonNull final StateConfig stateConfig) { - logger.info( - STARTUP.getMarker(), - "Information for state loaded from disk:\n{}\n{}", - () -> signedStateFromDisk.getState().getPlatformState().getInfoString(), - () -> new MerkleTreeVisualizer(signedStateFromDisk.getState()) - .setDepth(stateConfig.debugHashDepth()) - .render()); - - // The previous version of the software that was run. Null if this is the first time running, or if the previous - // version ran before the concept of application software versioning was introduced. - final SoftwareVersion previousSoftwareVersion = signedStateFromDisk - .getState() - .getPlatformState() - .getPlatformData() - .getCreationSoftwareVersion(); - final State initialState = signedStateFromDisk.getState().copy(); - initialState.getPlatformState().getPlatformData().setCreationSoftwareVersion(appVersion); + final State initialState = signedState.getState(); final Hash initialHash = initialState.getSwirldState().getHash(); - initialState - .getSwirldState() - .init(this, initialState.getSwirldDualState(), InitTrigger.RESTART, previousSoftwareVersion); - initialState.markAsInitialized(); + + initialState.getSwirldState().init(this, initialState.getSwirldDualState(), trigger, previousSoftwareVersion); final Hash currentHash = initialState.getSwirldState().getHash(); @@ -794,9 +709,51 @@ private State loadSavedState( "interrupted while attempting to hash the state"); } - stateManagementComponent.stateToLoad(signedStateFromDisk, SourceOfSignedState.DISK); + final StateConfig stateConfig = platformContext.getConfiguration().getConfigData(StateConfig.class); + logger.info( + STARTUP.getMarker(), + "The platform is using the following initial state:\n{}\n{}", + signedState.getState().getPlatformState().getInfoString(), + new MerkleTreeVisualizer(signedState.getState()) + .setDepth(stateConfig.debugHashDepth()) + .render()); + } + + /** + * Load the signed state (either at reboot or reconnect) into the event creator. + * + * @param signedState the signed state to load from + */ + private void loadStateIntoEventCreator(@NonNull final SignedState signedState) { + Objects.requireNonNull(signedState); + + if (tipsetEventCreator == null) { + // New event creation logic is disabled via settings + return; + } + + try { + tipsetEventCreator.setMinimumGenerationNonAncient( + signedState.getState().getPlatformState().getPlatformData().getMinimumGenerationNonAncient()); + + // The event creator may not be started yet. To avoid filling up queues, only register + // the latest event from each creator. These are the only ones the event creator cares about. - return initialState; + final Map latestEvents = new HashMap<>(); + + for (final EventImpl event : + signedState.getState().getPlatformState().getPlatformData().getEvents()) { + latestEvents.put(event.getCreatorId(), event); + } + + for (final EventImpl event : latestEvents.values()) { + tipsetEventCreator.registerEvent(event); + } + + } catch (final InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException("interrupted while loading state into event creator", e); + } } /** @@ -804,7 +761,9 @@ private State loadSavedState( * * @param signedState the state to get the data from */ - void loadIntoConsensusAndEventMapper(final SignedState signedState) { + private void loadStateIntoConsensusAndEventMapper(@NonNull final SignedState signedState) { + Objects.requireNonNull(signedState); + consensusRef.set(new ConsensusImpl( platformContext.getConfiguration().getConfigData(ConsensusConfig.class), consensusMetrics, @@ -838,7 +797,7 @@ void loadIntoConsensusAndEventMapper(final SignedState signedState) { * * @param signedState the signed state that was received from the sender */ - void loadReconnectState(final SignedState signedState) { + private void loadReconnectState(final SignedState signedState) { // the state was received, so now we load its data into different objects logger.info(LogMarker.STATE_HASH.getMarker(), "RECONNECT: loadReconnectState: reloading state"); logger.debug(RECONNECT.getMarker(), "`loadReconnectState` : reloading state"); @@ -867,13 +826,13 @@ void loadReconnectState(final SignedState signedState) { + reconnectHash + ", new hash is " + signedState.getState().getHash()); } - signedState.getState().markAsInitialized(); swirldStateManager.loadFromSignedState(signedState); stateManagementComponent.stateToLoad(signedState, SourceOfSignedState.RECONNECT); - loadIntoConsensusAndEventMapper(signedState); + loadStateIntoConsensusAndEventMapper(signedState); + loadStateIntoEventCreator(signedState); // eventLinker is not thread safe, which is not a problem regularly because it is only used by a single // thread. after a reconnect, it needs to load the minimum generation from a state on a different thread, // so the intake thread is paused before the data is loaded and unpaused after. this ensures that the @@ -935,6 +894,7 @@ private EventCreator buildEventCreator(@NonNull final EventIntake eventIntake) { return null; } else { return new EventCreator( + platformContext, this.appVersion, selfId, PlatformConstructor.platformSigner(crypto.getKeysAndCerts()), @@ -1032,10 +992,15 @@ private PreconsensusEventWriter buildPreconsensusEventWriter( public void start() { components.start(); - sendStartupNotifications(); - metrics.start(); + if (tipsetEventCreator != null) { + // The event creator is intentionally started before replaying the preconsensus event stream. + // This prevents the event creator's intake queue from filling up and blocking. Note that + // this component won't actually create events until the platform has the appropriate status. + tipsetEventCreator.start(); + } + replayPreconsensusEvents(); configureStartupEventFreeze(); gossip.start(); @@ -1045,21 +1010,6 @@ public void start() { checkPlatformStatus(); } - /** - * Send notifications that can only be sent after components have been started. - */ - private void sendStartupNotifications() { - if (!startedFromGenesis) { - // If we loaded from disk then call the appropriate dispatch. This dispatch - // must wait until after components have been started. - diskStateLoadedDispatcher.dispatch(diskStateRound, diskStateHash); - - // Let the app know that a state was loaded. - notificationEngine.dispatch( - StateLoadedFromDiskCompleteListener.class, new StateLoadedFromDiskNotification()); - } - } - /** * If configured to do so, replay preconsensus events. */ @@ -1126,6 +1076,7 @@ private void checkPlatformStatus() { /** * Change the current platform status. + * * @param newStatus the new platform status */ private void setPlatformStatus(@NonNull final PlatformStatus newStatus) { diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/cli/EventStreamRecoverCommand.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/cli/EventStreamRecoverCommand.java index 581faf62447f..93fd07b3f87a 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/cli/EventStreamRecoverCommand.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/cli/EventStreamRecoverCommand.java @@ -21,9 +21,20 @@ import com.swirlds.cli.commands.EventStreamCommand; import com.swirlds.cli.utility.AbstractCommand; import com.swirlds.cli.utility.SubcommandOf; +import com.swirlds.common.config.ConfigUtils; +import com.swirlds.common.config.singleton.ConfigurationHolder; +import com.swirlds.common.config.sources.LegacyFileConfigSource; +import com.swirlds.common.context.DefaultPlatformContext; +import com.swirlds.common.context.PlatformContext; +import com.swirlds.common.crypto.CryptographyHolder; +import com.swirlds.common.metrics.noop.NoOpMetrics; import com.swirlds.common.system.NodeId; +import com.swirlds.config.api.Configuration; +import com.swirlds.config.api.ConfigurationBuilder; +import java.io.IOException; import java.nio.file.Path; import java.util.List; +import java.util.Set; import picocli.CommandLine; @CommandLine.Command( @@ -115,9 +126,36 @@ private void setLoadSigningKeys(final boolean loadSigningKeys) { this.loadSigningKeys = loadSigningKeys; } + /** + * Build a configuration object from the provided configuration paths. + * + * @return the configuration object + * @throws IOException if there is an error reading the configuration files + */ + private Configuration buildConfiguration() throws IOException { + final ConfigurationBuilder configurationBuilder = ConfigurationBuilder.create(); + ConfigUtils.scanAndRegisterAllConfigTypes(configurationBuilder, Set.of("com.swirlds")); + + for (final Path configurationPath : configurationPaths) { + System.out.printf("Loading configuration from %s%n", configurationPath); + configurationBuilder.withSource(new LegacyFileConfigSource(configurationPath)); + } + + final Configuration configuration = configurationBuilder.build(); + ConfigurationHolder.getInstance().setConfiguration(configuration); + + return configuration; + } + @Override public Integer call() throws Exception { + final Configuration configuration = buildConfiguration(); + + final PlatformContext platformContext = + new DefaultPlatformContext(configuration, new NoOpMetrics(), CryptographyHolder.get()); + recoverState( + platformContext, bootstrapSignedState, configurationPaths, eventStreamDirectory, diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/cli/EventStreamSignCommand.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/cli/EventStreamSignCommand.java index aa9819b3a5b7..5a3f3b5eddc3 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/cli/EventStreamSignCommand.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/cli/EventStreamSignCommand.java @@ -55,7 +55,6 @@ public boolean isFileSupported(@NonNull final Path path) { @Override public Integer call() { EventStreamSigningUtils.initializeSystem(); - return super.call(); } } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventCreator.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventCreator.java index 684f3b5b5437..7abebafaa2a9 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventCreator.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventCreator.java @@ -18,6 +18,7 @@ import static com.swirlds.logging.LogMarker.CREATE_EVENT; +import com.swirlds.common.context.PlatformContext; import com.swirlds.common.crypto.CryptographyHolder; import com.swirlds.common.stream.Signer; import com.swirlds.common.system.EventCreationRuleResponse; @@ -31,6 +32,7 @@ import com.swirlds.platform.event.EventUtils; import com.swirlds.platform.event.SelfEventStorage; import com.swirlds.platform.event.creation.AncientParentsRule; +import com.swirlds.platform.event.tipset.EventCreationConfig; import com.swirlds.platform.internal.EventImpl; import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; @@ -79,33 +81,30 @@ public class EventCreator { /** This object is used for checking whether this node should create an event or not */ private final EventCreationRules eventCreationRules; + /** + * If true, event creation is being handled by the tipset algorithm and this class should not create any events. + */ + private final boolean disabled; + /** * Construct a new EventCreator. * - * @param softwareVersion - * the software version of the node - * @param selfId - * the ID of this node - * @param signer - * responsible for signing new events - * @param graphGenerationsSupplier - * supplies the key generation number from the hashgraph - * @param transactionSupplier - * this method supplies transactions that should be inserted into newly created events - * @param newEventHandler - * this method is passed all newly created events - * @param selfEventStorage - * stores the most recent event created by me - * @param eventMapper - * the object that tracks the most recent events from each node - * @param transactionPool - * the TransactionPool - * @param inFreeze - * indicates if the system is currently in a freeze - * @param eventCreationRules - * the object used for checking if we should create an event or not + * @param platformContext the platform context for this node + * @param softwareVersion the software version of the node + * @param selfId the ID of this node + * @param signer responsible for signing new events + * @param graphGenerationsSupplier supplies the key generation number from the hashgraph + * @param transactionSupplier this method supplies transactions that should be inserted into newly created + * events + * @param newEventHandler this method is passed all newly created events + * @param selfEventStorage stores the most recent event created by me + * @param eventMapper the object that tracks the most recent events from each node + * @param transactionPool the TransactionPool + * @param inFreeze indicates if the system is currently in a freeze + * @param eventCreationRules the object used for checking if we should create an event or not */ public EventCreator( + @NonNull final PlatformContext platformContext, @NonNull final SoftwareVersion softwareVersion, @NonNull final NodeId selfId, @NonNull final Signer signer, @@ -129,15 +128,23 @@ public EventCreator( this.transactionPool = Objects.requireNonNull(transactionPool, "the transaction pool is null"); this.inFreeze = Objects.requireNonNull(inFreeze, "the in freeze is null"); this.eventCreationRules = Objects.requireNonNull(eventCreationRules, "the event creation rules is null"); + this.disabled = platformContext + .getConfiguration() + .getConfigData(EventCreationConfig.class) + .useTipsetAlgorithm(); } /** * Create a new event and push it into the gossip/consensus pipeline. * - * @param otherId - * the node ID that will supply the other parent for this event + * @param otherId the node ID that will supply the other parent for this event */ public boolean createEvent(final NodeId otherId) { + + if (disabled) { + return false; + } + if (eventCreationRules.shouldCreateEvent() == EventCreationRuleResponse.DONT_CREATE) { return false; } @@ -201,11 +208,10 @@ protected EventImpl buildEvent(final EventImpl selfParent, final EventImpl other } /** - * Check if the most recent event from the given node has been used as an other parent by an - * event created by the current node. + * Check if the most recent event from the given node has been used as an other parent by an event created by the + * current node. * - * @param otherId - * the ID of the node supplying the other parent + * @param otherId the ID of the node supplying the other parent */ protected boolean hasOtherParentAlreadyBeenUsed(final NodeId otherId) { return !Objects.equals(selfId, otherId) && eventMapper.hasMostRecentEventBeenUsedAsOtherParent(otherId); @@ -221,8 +227,7 @@ protected boolean hasSignatureTransactionsWhileFrozen() { /** * Write to the log (if configured) every time an event is created. * - * @param event - * the created event to be logged + * @param event the created event to be logged */ protected void logEventCreation(final EventImpl event) { logger.debug(CREATE_EVENT.getMarker(), "Creating {}", event::toMediumString); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/ConfigMappings.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/ConfigMappings.java index dbcdb9dd334f..e595ef58a69f 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/ConfigMappings.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/ConfigMappings.java @@ -81,10 +81,10 @@ private ConfigMappings() {} new ConfigMapping("sync.sleepCallerSkips", "sleepCallerSkips"), new ConfigMapping("jvmPauseDetectorSleepMs", "jVMPauseDetectorSleepMs"), new ConfigMapping("jvmPauseReportMs", "jVMPauseReportMs"), - new ConfigMapping("threadPrioritySync", "thread.threadPrioritySync"), - new ConfigMapping("threadPriorityNonSync", "thread.threadPriorityNonSync"), - new ConfigMapping("threadDumpPeriodMs", "thread.threadDumpPeriodMs"), - new ConfigMapping("threadDumpLogDir", "thread.threadDumpLogDir")); + new ConfigMapping("thread.threadPrioritySync", "threadPrioritySync"), + new ConfigMapping("thread.threadPriorityNonSync", "threadPriorityNonSync"), + new ConfigMapping("thread.threadDumpPeriodMs", "threadDumpPeriodMs"), + new ConfigMapping("thread.threadDumpLogDir", "threadDumpLogDir")); /** * Add all known aliases to the provided config source diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/PlatformConfigUtils.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/PlatformConfigUtils.java index cfa4a8561eed..06cbcf481aab 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/PlatformConfigUtils.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/PlatformConfigUtils.java @@ -20,13 +20,20 @@ import static com.swirlds.logging.LogMarker.STARTUP; import com.swirlds.common.config.reflection.ConfigReflectionUtils; +import com.swirlds.common.config.singleton.ConfigurationHolder; import com.swirlds.common.config.sources.ConfigMapping; +import com.swirlds.common.utility.PlatformVersion; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.BufferedWriter; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.Arrays; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.TreeSet; import java.util.stream.Collectors; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -36,6 +43,7 @@ */ public class PlatformConfigUtils { private static final Logger logger = LogManager.getLogger(PlatformConfigUtils.class); + public static final String SETTING_USED_FILENAME = "settingsUsed.txt"; private PlatformConfigUtils() { // Utility class @@ -102,4 +110,63 @@ private static Set getConfigNames(@NonNull final Configuration configura }) .collect(Collectors.toSet()); } + + /** + * Write all the settings to the file settingsUsed.txt, some of which might have been changed by settings.txt. + * + * @param directory the directory to write to + */ + public static void writeSettingsUsed(@NonNull final Path directory) { + Objects.requireNonNull(directory, "directory should not be null"); + final Configuration configuration = ConfigurationHolder.getInstance().get(); + writeSettingsUsed(directory, configuration); + } + + /** + * Write all the settings to the file settingsUsed.txt, some of which might have been changed by settings.txt. + * + * @param directory the directory to write to + */ + public static void writeSettingsUsed(@NonNull final Path directory, @NonNull final Configuration configuration) { + Objects.requireNonNull(directory, "directory should not be null"); + Objects.requireNonNull(configuration, "configuration should not be null"); + + try (final BufferedWriter writer = Files.newBufferedWriter(directory.resolve(SETTING_USED_FILENAME))) { + final StringBuilder stringBuilder = new StringBuilder(); + generateSettingsUsed(stringBuilder, configuration); + writer.write(stringBuilder.toString()); + + writer.flush(); + } catch (final IOException e) { + logger.error(EXCEPTION.getMarker(), "Error in writing to settingsUsed.txt", e); + } + } + + /** + * Generate the settings used, some of which might have been changed by settings.txt. + * + * @param stringBuilder the string builder to write to + * @param configuration the configuration to use + */ + public static void generateSettingsUsed( + @NonNull final StringBuilder stringBuilder, @NonNull final Configuration configuration) { + Objects.requireNonNull(stringBuilder, "stringBuilder should not be null"); + Objects.requireNonNull(configuration, "configuration should not be null"); + + stringBuilder.append(PlatformVersion.locateOrDefault().license()); + stringBuilder.append(System.lineSeparator()); + stringBuilder.append(System.lineSeparator()); + + stringBuilder.append( + "The following are all the settings, as modified by settings.txt, but not reflecting any changes " + + "made by config.txt."); + stringBuilder.append(System.lineSeparator()); + stringBuilder.append(System.lineSeparator()); + + final Set propertyNames = + configuration.getPropertyNames().collect(Collectors.toCollection(TreeSet::new)); + for (final String propertyName : propertyNames) { + stringBuilder.append(String.format("%15s = %s%n", propertyName, configuration.getValue(propertyName))); + } + } } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/legacy/LegacyConfigPropertiesLoader.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/legacy/LegacyConfigPropertiesLoader.java index 3238ef2b24c0..0ceb91e01bc9 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/legacy/LegacyConfigPropertiesLoader.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/legacy/LegacyConfigPropertiesLoader.java @@ -23,7 +23,7 @@ import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.system.address.AddressBookUtils; import com.swirlds.common.utility.CommonUtils; -import com.swirlds.platform.Settings; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.FileNotFoundException; import java.io.IOException; import java.io.UncheckedIOException; @@ -33,6 +33,7 @@ import java.text.ParseException; import java.util.Arrays; import java.util.Locale; +import java.util.Objects; import java.util.Scanner; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -82,7 +83,7 @@ public static LegacyConfigProperties loadConfigFile(Path configPath) throws Conf while (scanner.hasNextLine()) { final String line = readNextLine(scanner); if (!line.isEmpty()) { - final String[] lineParameters = Settings.splitLine(line); + final String[] lineParameters = splitLine(line); final int len = Math.max(10, lineParameters.length); // pars is the comma-separated parameters, trimmed, lower-cased, then padded with "" to have // at least 10 parameters @@ -166,4 +167,22 @@ private static void setGenesisFreezeTime( private static void onError(String message) { CommonUtils.tellUserConsolePopup("Error", message); } + + /** + * Split the given string on its commas, and trim each result + * + * @param line the string of comma-separated values to split + * @return the array of trimmed elements. + */ + @NonNull + private static String[] splitLine(@NonNull final String line) { + Objects.requireNonNull(line); + + final String[] elms = line.split(","); + for (int i = 0; i < elms.length; i++) { + elms[i] = elms[i].trim(); + } + + return elms; + } } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/CryptoSetup.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/CryptoSetup.java index 4960e7183563..80c11c8700b1 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/CryptoSetup.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/CryptoSetup.java @@ -21,6 +21,7 @@ import static com.swirlds.logging.LogMarker.EXCEPTION; import static com.swirlds.logging.LogMarker.STARTUP; +import com.swirlds.common.config.BasicConfig; import com.swirlds.common.config.PathsConfig; import com.swirlds.common.crypto.CryptographyException; import com.swirlds.common.crypto.config.CryptoConfig; @@ -30,7 +31,6 @@ import com.swirlds.common.utility.CommonUtils; import com.swirlds.config.api.Configuration; import com.swirlds.platform.Crypto; -import com.swirlds.platform.Settings; import com.swirlds.platform.Utilities; import com.swirlds.platform.config.ThreadConfig; import com.swirlds.platform.system.SystemExitCode; @@ -80,6 +80,7 @@ public static Map initNodeSecurity( final ThreadConfig threadConfig = configuration.getConfigData(ThreadConfig.class); final PathsConfig pathsConfig = configuration.getConfigData(PathsConfig.class); final CryptoConfig cryptoConfig = configuration.getConfigData(CryptoConfig.class); + final BasicConfig basicConfig = configuration.getConfigData(BasicConfig.class); final ExecutorService cryptoThreadPool = Executors.newFixedThreadPool( threadConfig.numCryptoThreads(), @@ -91,7 +92,7 @@ public static Map initNodeSecurity( final Map keysAndCerts; try { - if (Settings.getInstance().isLoadKeysFromPfxFiles()) { + if (basicConfig.loadKeysFromPfxFiles()) { try (final Stream list = Files.list(pathsConfig.getKeysDirPath())) { CommonUtils.tellUserConsole("Reading crypto keys from the files here: " + list.filter(path -> path.getFileName().endsWith("pfx")) @@ -129,9 +130,7 @@ public static Map initNodeSecurity( throw new CryptographyException(e); // will never reach this line due to exit above } - final String msg = Settings.getInstance().isLoadKeysFromPfxFiles() - ? "Certificate loaded: {}" - : "Certificate generated: {}"; + final String msg = basicConfig.loadKeysFromPfxFiles() ? "Certificate loaded: {}" : "Certificate generated: {}"; final Map cryptoMap = new HashMap<>(); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/messages/ChatterEventDescriptor.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/EventDescriptor.java similarity index 63% rename from platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/messages/ChatterEventDescriptor.java rename to platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/EventDescriptor.java index d27ad1f048dd..5bf8041f74d8 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/messages/ChatterEventDescriptor.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/EventDescriptor.java @@ -14,24 +14,23 @@ * limitations under the License. */ -package com.swirlds.platform.gossip.chatter.protocol.messages; +package com.swirlds.platform.event; -import static org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE; +import static com.swirlds.common.utility.CommonUtils.hex; import com.swirlds.common.crypto.Hash; +import com.swirlds.common.io.SelfSerializable; import com.swirlds.common.io.streams.SerializableDataInputStream; import com.swirlds.common.io.streams.SerializableDataOutputStream; import com.swirlds.common.system.NodeId; -import com.swirlds.common.utility.CommonUtils; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.util.Objects; -import org.apache.commons.lang3.builder.ToStringBuilder; /** - * A stripped down description of a chatter event. + * A stripped down description an event. Stores hash, generation, and creator ID. */ -public class ChatterEventDescriptor implements EventDescriptor { +public class EventDescriptor implements SelfSerializable { public static final long CLASS_ID = 0x825e17f25c6e2566L; @@ -48,96 +47,103 @@ private static final class ClassVersion { private NodeId creator; private long generation; - private int hashCode; - - public ChatterEventDescriptor() {} + /** + * Zero arg constructor, required for deserialization. Do not use manually. + */ + public EventDescriptor() {} /** - * Create a new gossip event descriptor. + * Create a new event descriptor. * * @param hash the hash of the event * @param creator the creator of the event * @param generation the age of an event, smaller is older */ - public ChatterEventDescriptor(@NonNull final Hash hash, @NonNull final NodeId creator, final long generation) { + public EventDescriptor(@NonNull final Hash hash, @NonNull final NodeId creator, final long generation) { this.hash = Objects.requireNonNull(hash, "hash must not be null"); this.creator = Objects.requireNonNull(creator, "creator must not be null"); this.generation = generation; - - hashCode = Objects.hash(hash, creator, generation); - } - - /** - * {@inheritDoc} - */ - @Override - public long getClassId() { - return CLASS_ID; } /** - * {@inheritDoc} + * Get the hash of the event. + * + * @return the event's hash */ - @Override - public void serialize(final SerializableDataOutputStream out) throws IOException { - out.writeSerializable(hash, false); - out.writeSerializable(creator, false); - out.writeLong(generation); + @NonNull + public Hash getHash() { + if (hash == null) { + throw new IllegalStateException("EventDescriptor improperly initialized: the hash is null"); + } + return hash; } /** - * {@inheritDoc} + * Get the node ID of the event's creator. + * + * @return a node ID */ - @Override - public void deserialize(final SerializableDataInputStream in, final int version) throws IOException { - hash = in.readSerializable(false, Hash::new); - if (version < ClassVersion.SELF_SERIALIZABLE_NODE_ID) { - creator = new NodeId(in.readLong()); - } else { - creator = in.readSerializable(false, NodeId::new); + @NonNull + public NodeId getCreator() { + if (hash == null) { + throw new IllegalStateException("EventDescriptor improperly initialized: the hash is null"); } - generation = in.readLong(); - - hashCode = Objects.hash(hash, creator, generation); + return creator; } /** - * {@inheritDoc} + * Get the generation of the event. + * + * @return the generation of the event */ - @Override - public int getVersion() { - return ClassVersion.SELF_SERIALIZABLE_NODE_ID; + public long getGeneration() { + return generation; } /** * {@inheritDoc} */ @Override - public int getMinimumSupportedVersion() { - return ClassVersion.ORIGINAL; + public long getClassId() { + return CLASS_ID; } /** * {@inheritDoc} */ - public Hash getHash() { - return hash; + @Override + public int getVersion() { + return ClassVersion.SELF_SERIALIZABLE_NODE_ID; } /** * {@inheritDoc} */ @Override - @NonNull - public NodeId getCreator() { - return creator; + public void serialize(@NonNull final SerializableDataOutputStream out) throws IOException { + out.writeSerializable(hash, false); + out.writeSerializable(creator, false); + out.writeLong(generation); } /** * {@inheritDoc} */ - public long getGeneration() { - return generation; + @Override + public void deserialize(@NonNull final SerializableDataInputStream in, final int version) throws IOException { + hash = in.readSerializable(false, Hash::new); + if (hash == null) { + throw new IOException("hash cannot be null"); + } + if (version < ClassVersion.SELF_SERIALIZABLE_NODE_ID) { + creator = new NodeId(in.readLong()); + } else { + creator = in.readSerializable(false, NodeId::new); + if (creator == null) { + throw new IOException("creator cannot be null"); + } + } + generation = in.readLong(); } /** @@ -152,11 +158,7 @@ public boolean equals(final Object o) { return false; } - final ChatterEventDescriptor that = (ChatterEventDescriptor) o; - - if (this.hashCode != that.hashCode) { - return false; - } + final EventDescriptor that = (EventDescriptor) o; return Objects.equals(creator, that.creator) && generation == that.generation && hash.equals(that.hash); } @@ -166,15 +168,16 @@ public boolean equals(final Object o) { */ @Override public int hashCode() { - return hashCode; + if (hash == null) { + throw new IllegalStateException("EventDescriptor improperly initialized: the hash is null"); + } + return hash.hashCode(); } @Override public String toString() { - return new ToStringBuilder(this, SHORT_PREFIX_STYLE) - .append("creator", creator) - .append("generation", generation) - .append("hash", CommonUtils.hex(hash.getValue())) - .toString(); + return "(creator: " + creator + ", generation: " + + generation + ", hash: " + + hex(hash.getValue()).substring(0, 12) + ")"; } } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/EventUtils.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/EventUtils.java index 06c13c68cdf4..74670b147278 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/EventUtils.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/EventUtils.java @@ -19,16 +19,17 @@ import com.swirlds.common.system.NodeId; import com.swirlds.common.system.events.BaseEvent; import com.swirlds.common.system.events.PlatformEvent; -import com.swirlds.common.system.transaction.Transaction; import com.swirlds.logging.LogMarker; import com.swirlds.platform.EventStrings; import com.swirlds.platform.internal.EventImpl; +import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.ListIterator; +import java.util.Objects; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import org.apache.logging.log4j.LogManager; @@ -40,8 +41,7 @@ public abstract class EventUtils { /** * Converts the event to a short string. Should be replaced by {@link EventStrings#toShortString(EventImpl)} * - * @param event - * the event to convert + * @param event the event to convert * @return a short string */ public static String toShortString(final EventImpl event) { @@ -51,8 +51,7 @@ public static String toShortString(final EventImpl event) { /** * Convert an array of events to a single string, using toShortString() on each, and separating with commas. * - * @param events - * array of events to convert + * @param events array of events to convert * @return a single string with a comma separated list of all of the event strings */ public static String toShortStrings(final EventImpl[] events) { @@ -79,8 +78,7 @@ public static int generationComparator(final PlatformEvent e1, final PlatformEve * Prepares consensus events for shadow graph during a restart or reconnect by sorting the events by generation and * checking for generation gaps. * - * @param events - * events supplied by consensus + * @param events events supplied by consensus * @return a list of input events, sorted and checked */ public static List prepareForShadowGraph(final EventImpl[] events) { @@ -106,10 +104,8 @@ public static List prepareForShadowGraph(final EventImpl[] events) { /** * Checks if there is a generation difference of more than 1 between events, if there is, throws an exception * - * @param events - * events to look for generation gaps in, sorted in ascending order by generation - * @throws IllegalArgumentException - * if any problem is found with the signed state events + * @param events events to look for generation gaps in, sorted in ascending order by generation + * @throws IllegalArgumentException if any problem is found with the signed state events */ public static void checkForGenerationGaps(final List events) { if (events == null || events.isEmpty()) { @@ -154,10 +150,8 @@ public static int consensusPriorityComparator(final EventImpl x, final EventImpl /** * Get the creator ID of the event. If null return {@link EventConstants#CREATOR_ID_UNDEFINED}. * - * @param event - * the event - * @return the creator ID as {@code long} of the given event, or the self-ID - * if the given event is {@code null} + * @param event the event + * @return the creator ID as {@code long} of the given event, or the self-ID if the given event is {@code null} */ @Nullable public static NodeId getCreatorId(@Nullable final BaseEvent event) { @@ -171,14 +165,13 @@ public static NodeId getCreatorId(@Nullable final BaseEvent event) { /** * Compute the creation time of a new event. * - * @param now - * a time {@code Instant} - * @param selfParent - * the self-parent of the event to be created + * @param now a time {@code Instant} + * @param selfParent the self-parent of the event to be created * @return a time {@code Instant} which defines the creation time of an event */ - public static Instant getChildTimeCreated(final Instant now, final BaseEvent selfParent) { - Instant timeCreated = now; + public static Instant getChildTimeCreated(@NonNull final Instant now, @Nullable final BaseEvent selfParent) { + + Objects.requireNonNull(now); if (selfParent != null) { // Ensure that events created by self have a monotonically increasing creation time. @@ -197,30 +190,50 @@ public static Instant getChildTimeCreated(final Instant now, final BaseEvent sel // where n is the number of transactions in x (so each can have a different time), // or n=1 if there are no transactions (so each event is a different time). - final Transaction[] transactions = selfParent.getHashedData().getTransactions(); - long minimumTimeIncrement = 1; - if (transactions != null && transactions.length > 0) { - minimumTimeIncrement = transactions.length; - } - - final Instant minimumNextEventTime = - selfParent.getHashedData().getTimeCreated().plusNanos(minimumTimeIncrement); + final int parentTransactionCount = selfParent.getHashedData().getTransactions() == null + ? 0 + : selfParent.getHashedData().getTransactions().length; - if (timeCreated.isBefore(minimumNextEventTime)) { - timeCreated = minimumNextEventTime; - } + return calculateNewEventCreationTime( + now, selfParent.getHashedData().getTimeCreated(), parentTransactionCount); } - return timeCreated; + return now; + } + + /** + * Calculate the creation time for a new event. + *

+ * Regardless of whatever the host computer's clock says, the event creation time must always advance from self + * parent to child. Further, the time in between the self parent and the child must be large enough so that every + * transaction in the parent can be assigned a unique timestamp at nanosecond precision. + * + * @param now the current time + * @param selfParentCreationTime the creation time of the self parent + * @param selfParentTransactionCount the number of transactions in the self parent + * @return the creation time for the new event + */ + @NonNull + public static Instant calculateNewEventCreationTime( + @NonNull final Instant now, + @NonNull final Instant selfParentCreationTime, + final int selfParentTransactionCount) { + + final int minimumIncrement = Math.max(1, selfParentTransactionCount); + final Instant minimumNextEventTime = selfParentCreationTime.plusNanos(minimumIncrement); + if (now.isBefore(minimumNextEventTime)) { + return minimumNextEventTime; + } else { + return now; + } } /** * Get the generation of an event. Returns {@value EventConstants#GENERATION_UNDEFINED} for null events. * - * @param event - * an event - * @return the generation number of the given event, - * or {@value EventConstants#GENERATION_UNDEFINED} is the event is {@code null} + * @param event an event + * @return the generation number of the given event, or {@value EventConstants#GENERATION_UNDEFINED} is the event is + * {@code null} */ public static long getEventGeneration(final BaseEvent event) { if (event == null) { @@ -232,10 +245,9 @@ public static long getEventGeneration(final BaseEvent event) { /** * Get the base hash of an event. Returns null for null events. * - * @param event - * an event - * @return a {@code byte[]} which contains the hash bytes of the given event, or {@code null} - * if the given event is {@code null} + * @param event an event + * @return a {@code byte[]} which contains the hash bytes of the given event, or {@code null} if the given event is + * {@code null} */ public static byte[] getEventHash(final BaseEvent event) { if (event == null) { diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/GossipEvent.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/GossipEvent.java index 5c3077b01f63..a5df07c309ab 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/GossipEvent.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/GossipEvent.java @@ -23,8 +23,6 @@ import com.swirlds.common.system.events.BaseEventUnhashedData; import com.swirlds.platform.EventStrings; import com.swirlds.platform.gossip.chatter.protocol.messages.ChatterEvent; -import com.swirlds.platform.gossip.chatter.protocol.messages.ChatterEventDescriptor; -import com.swirlds.platform.gossip.chatter.protocol.messages.EventDescriptor; import java.io.IOException; import java.time.Instant; import java.util.Objects; @@ -37,7 +35,7 @@ public class GossipEvent implements EventIntakeTask, BaseEvent, ChatterEvent { private static final long ROUND_CREATED_UNDEFINED = -1; private BaseEventHashedData hashedData; private BaseEventUnhashedData unhashedData; - private ChatterEventDescriptor descriptor; + private EventDescriptor descriptor; private Instant timeReceived; private long roundCreated = ROUND_CREATED_UNDEFINED; @@ -96,6 +94,9 @@ public BaseEventUnhashedData getUnhashedData() { */ @Override public EventDescriptor getDescriptor() { + if (descriptor == null) { + throw new IllegalStateException("Can not get descriptor until event has been hashed"); + } return descriptor; } @@ -105,7 +106,7 @@ public EventDescriptor getDescriptor() { */ public void buildDescriptor() { this.descriptor = - new ChatterEventDescriptor(hashedData.getHash(), hashedData.getCreatorId(), hashedData.getGeneration()); + new EventDescriptor(hashedData.getHash(), hashedData.getCreatorId(), hashedData.getGeneration()); } /** @@ -116,6 +117,14 @@ public Instant getTimeReceived() { return timeReceived; } + /** + * {@inheritDoc} + */ + @Override + public long getGeneration() { + return hashedData.getGeneration(); + } + /** * @return true if roundCreated has been set */ diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/InternalEventData.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/InternalEventData.java index 7fae348aa574..d869c56e651e 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/InternalEventData.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/InternalEventData.java @@ -312,8 +312,8 @@ public void setHasUserTransactions(boolean hasUserTransactions) { /** * @param m - * the member ID - * @return last ancestor created by m (memoizes lastSee function from Swirlds-TR-2020-01) + * the index of the member ID + * @return last ancestor created by the member at index m (memoizes lastSee function from Swirlds-TR-2020-01) */ public EventImpl getLastSee(int m) { return lastSee[m]; @@ -323,9 +323,9 @@ public EventImpl getLastSee(int m) { * remember event, the last ancestor created by m (memoizes lastSee function from Swirlds-TR-2020-01) * * @param m - * the member ID + * the index of the member ID * @param event - * the last seen {@link EventImpl} object created by m + * the last seen {@link EventImpl} object created by the member at index m */ public void setLastSee(int m, EventImpl event) { lastSee[m] = event; @@ -351,8 +351,8 @@ public int sizeLastSee() { /** * @param m - * the member ID - * @return strongly-seen witness in parent round by m (memoizes stronglySeeP function from Swirlds-TR-2020-01) + * the index of the member ID + * @return strongly-seen witness in parent round by the member at index m (memoizes stronglySeeP function from Swirlds-TR-2020-01) */ public EventImpl getStronglySeeP(int m) { return stronglySeeP[m]; @@ -363,9 +363,9 @@ public EventImpl getStronglySeeP(int m) { * Swirlds-TR-2020-01) * * @param m - * the member ID + * the index of the member ID * @param event - * the strongly-seen witness in parent round created by m + * the strongly-seen witness in parent round created by the member at index m */ public void setStronglySeeP(int m, EventImpl event) { stronglySeeP[m] = event; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/creation/ChatterEventCreator.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/creation/ChatterEventCreator.java index 8c01fccdabe8..90315d478396 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/creation/ChatterEventCreator.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/creation/ChatterEventCreator.java @@ -19,6 +19,7 @@ import static com.swirlds.logging.LogMarker.CREATE_EVENT; import com.swirlds.base.time.Time; +import com.swirlds.common.context.PlatformContext; import com.swirlds.common.crypto.Cryptography; import com.swirlds.common.stream.Signer; import com.swirlds.common.system.EventCreationRuleResponse; @@ -32,6 +33,7 @@ import com.swirlds.platform.components.transaction.TransactionSupplier; import com.swirlds.platform.event.EventUtils; import com.swirlds.platform.event.GossipEvent; +import com.swirlds.platform.event.tipset.EventCreationConfig; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Objects; import java.util.function.Consumer; @@ -64,7 +66,13 @@ public class ChatterEventCreator { private final Time time; + /** + * If true, event creation is being handled by the tipset algorithm and this class should not create any events. + */ + private final boolean disabled; + public ChatterEventCreator( + @NonNull final PlatformContext platformContext, @NonNull final SoftwareVersion softwareVersion, @NonNull final NodeId selfId, @NonNull final Signer signer, @@ -83,6 +91,10 @@ public ChatterEventCreator( this.eventCreationRules = Objects.requireNonNull(eventCreationRules, "the eventCreationRules is null"); this.hasher = Objects.requireNonNull(hasher, "the hasher is null"); this.time = Objects.requireNonNull(time, "the time is null"); + this.disabled = platformContext + .getConfiguration() + .getConfigData(EventCreationConfig.class) + .useTipsetAlgorithm(); } /** @@ -101,6 +113,11 @@ public void createGenesisEvent() { */ public boolean createEvent(@NonNull final NodeId otherId) { Objects.requireNonNull(otherId, "the otherId must not be null"); + + if (disabled) { + return false; + } + final EventCreationRuleResponse basicRulesResponse = eventCreationRules.shouldCreateEvent(); if (basicRulesResponse == EventCreationRuleResponse.DONT_CREATE) { return false; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/linking/OrphanBufferingLinker.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/linking/OrphanBufferingLinker.java index 2199f09a4367..b77a1343fb4d 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/linking/OrphanBufferingLinker.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/linking/OrphanBufferingLinker.java @@ -22,8 +22,8 @@ import com.swirlds.common.sequence.map.StandardSequenceMap; import com.swirlds.logging.LogMarker; import com.swirlds.platform.consensus.GraphGenerations; +import com.swirlds.platform.event.EventDescriptor; import com.swirlds.platform.event.GossipEvent; -import com.swirlds.platform.gossip.chatter.protocol.messages.EventDescriptor; import com.swirlds.platform.internal.EventImpl; import com.swirlds.platform.state.signed.SignedState; import java.util.ArrayDeque; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/ChildlessEventTracker.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/ChildlessEventTracker.java new file mode 100644 index 000000000000..435ebd3fba0b --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/ChildlessEventTracker.java @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset; + +import com.swirlds.platform.event.EventDescriptor; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; + +/** + * Keeps track of events created that have no children. These events are candidates to be used as parents when creating + * a new event. + * + *

+ * At the surface, this class may appear similar to ChatterEventMapper. But unlike that class, this class specifically + * only tracks events without children, as opposed to tracking the most recent event from each creator. This class + * provides no guarantees that an event from any particular node will always be present. + */ +public class ChildlessEventTracker { + + private final Set childlessEvents = new HashSet<>(); + + /** + * Add a new event. + * + * @param eventDescriptor the event to add + * @param parents the parents of the event being added + */ + public void addEvent(@NonNull final EventDescriptor eventDescriptor, @NonNull final List parents) { + Objects.requireNonNull(eventDescriptor); + childlessEvents.add(eventDescriptor); + + for (final EventDescriptor parent : parents) { + childlessEvents.remove(parent); + } + } + + /** + * Register a self event. Removes parents but does not add the event to the set of childless events. + * + * @param parents the parents of the self event + */ + public void registerSelfEventParents(@NonNull final List parents) { + for (final EventDescriptor parent : parents) { + childlessEvents.remove(parent); + } + } + + /** + * Remove ancient events. + * + * @param minimumGenerationNonAncient the minimum generation of non-ancient events + */ + public void pruneOldEvents(final long minimumGenerationNonAncient) { + childlessEvents.removeIf(event -> event.getGeneration() < minimumGenerationNonAncient); + } + + /** + * Get a list of non-ancient childless events. + * + * @return the childless events, this list is safe to modify + */ + @NonNull + public List getChildlessEvents() { + return new ArrayList<>(childlessEvents); + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/EventCreationConfig.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/EventCreationConfig.java new file mode 100644 index 000000000000..d17d39153a20 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/EventCreationConfig.java @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset; + +import com.swirlds.config.api.ConfigData; +import com.swirlds.config.api.ConfigProperty; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.time.Duration; + +/** + * Configuration for event creation. + * + * @param useTipsetAlgorithm if true, use the tipset event creation algorithm + * @param maxCreationRate the maximum rate (in hz) that a node can create new events. The maximum rate + * for the entire network is equal to this value times the number of nodes. A + * value of 0 means that there is no limit to the number of events that can be + * created (as long as those events are legal to create). + * @param antiBullyingFactor the lower this number, the more likely it is that a new event will be created + * that reduces this node's bully score. Setting this too low may result in a + * suboptimal hashgraph topology. Setting this number too high may lead to some + * nodes being bullied and unable to cause their events to reach consensus. + * @param tipsetSnapshotHistorySize the number of tipsets to keep in the snapshot history. These tipsets are used + * to compute bully scores. + * @param eventIntakeThrottle when the size of the event intake queue equals or exceeds this value, do not + * permit the creation of new self events. + * @param creationQueueSize the size of the intake queue for the event creator + * @param creationQueueBufferSize the size of the buffer for the event creator + * @param creationQueueWaitForWorkPeriod the amount of time the event creator spends waiting for work in its intake + * queue + */ +@ConfigData("event.creation") +public record EventCreationConfig( + @ConfigProperty(defaultValue = "false") boolean useTipsetAlgorithm, + @ConfigProperty(defaultValue = "0") double maxCreationRate, + @ConfigProperty(defaultValue = "10") double antiBullyingFactor, + @ConfigProperty(defaultValue = "10") int tipsetSnapshotHistorySize, + @ConfigProperty(defaultValue = "1024") int eventIntakeThrottle, + @ConfigProperty(defaultValue = "1024") int creationQueueSize, + @ConfigProperty(defaultValue = "1024") int creationQueueBufferSize, + @ConfigProperty(defaultValue = "1 ms") @NonNull Duration creationQueueWaitForWorkPeriod) {} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/Tipset.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/Tipset.java new file mode 100644 index 000000000000..e427dd0c35cc --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/Tipset.java @@ -0,0 +1,188 @@ +/* + * Copyright (C) 2016-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset; + +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.address.Address; +import com.swirlds.common.system.address.AddressBook; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +/** + * Represents a slice of the hashgraph, containing one "tip" from each event creator. + */ +public class Tipset { + + private final AddressBook addressBook; + + /** + * The tip generations, indexed by node index. + */ + private final long[] tips; + + /** + * Create an empty tipset. + * + * @param addressBook the current address book + */ + public Tipset(@NonNull final AddressBook addressBook) { + this.addressBook = Objects.requireNonNull(addressBook); + tips = new long[addressBook.getSize()]; + + // Necessary because we currently start at generation 0, not generation 1. + Arrays.fill(tips, -1); + } + + /** + * Build an empty tipset (i.e. where all generations are -1) using another tipset as a template. + * + * @param tipset the tipset to use as a template + * @return a new empty tipset + */ + private static @NonNull Tipset buildEmptyTipset(@NonNull final Tipset tipset) { + return new Tipset(tipset.addressBook); + } + + /** + *

+ * Merge a list of tipsets together. + * + *

+ * The generation for each node ID will be equal to the maximum generation found for that node ID from all source + * tipsets. + * + * @param tipsets the tipsets to merge, must be non-empty, tipsets must be constructed from the same address book or + * else this method has undefined behavior + * @return a new tipset + */ + public static @NonNull Tipset merge(@NonNull final List tipsets) { + Objects.requireNonNull(tipsets, "tipsets must not be null"); + if (tipsets.isEmpty()) { + throw new IllegalArgumentException("Cannot merge an empty list of tipsets"); + } + + final int length = tipsets.get(0).tips.length; + final Tipset newTipset = buildEmptyTipset(tipsets.get(0)); + + for (int index = 0; index < length; index++) { + long max = -1; + for (final Tipset tipSet : tipsets) { + max = Math.max(max, tipSet.tips[index]); + } + newTipset.tips[index] = max; + } + + return newTipset; + } + + /** + * Get the tip generation for a given node + * + * @param nodeId the node in question + * @return the tip generation for the node + */ + public long getTipGenerationForNode(@NonNull final NodeId nodeId) { + return tips[addressBook.getIndexOfNodeId(nodeId)]; + } + + /** + * Get the number of tips currently being tracked. + * + * @return the number of tips + */ + public int size() { + return tips.length; + } + + /** + * Advance a single tip within the tipset. + * + * @param creator the node ID of the creator of the event + * @param generation the generation of the event + * @return this object + */ + public @NonNull Tipset advance(@NonNull final NodeId creator, final long generation) { + final int index = addressBook.getIndexOfNodeId(creator); + tips[index] = Math.max(tips[index], generation); + return this; + } + + /** + *

+ * Get the combined weight of all nodes which experienced a tip advancement between this tipset and another tipset. + * Note that this method ignores advancement contributions from this node. + *

+ * + *

+ * A tip advancement is defined as an increase in the tip generation for a node ID. The exception to this rule is + * that an increase in generation for the self ID is never counted as a tip advancement. The tip advancement + * weight is defined as the sum of all remaining tip advancements after being appropriately weighted. + *

+ * + *

+ * Advancements of non-zero stake nodes are tracked via {@link TipsetAdvancementWeight#advancementWeight()}, while + * advancements of zero stake nodes are tracked via {@link TipsetAdvancementWeight#zeroWeightAdvancementCount()}. + * + * @param selfId compute the advancement weight relative to this node ID + * @param that the tipset to compare to + * @return the tipset advancement weight + */ + @NonNull + public TipsetAdvancementWeight getTipAdvancementWeight(@NonNull final NodeId selfId, @NonNull final Tipset that) { + long nonZeroWeight = 0; + long zeroWeightCount = 0; + + final int selfIndex = addressBook.getIndexOfNodeId(selfId); + for (int index = 0; index < tips.length; index++) { + if (index == selfIndex) { + // We don't consider self advancement here, since self advancement does nothing to help consensus. + continue; + } + + if (this.tips[index] < that.tips[index]) { + final NodeId nodeId = addressBook.getNodeId(index); + final Address address = addressBook.getAddress(nodeId); + + if (address.getWeight() == 0) { + zeroWeightCount += 1; + } else { + nonZeroWeight += address.getWeight(); + } + } + } + + return TipsetAdvancementWeight.of(nonZeroWeight, zeroWeightCount); + } + + /** + * {@inheritDoc} + */ + @Override + public String toString() { + final StringBuilder sb = new StringBuilder("("); + for (int index = 0; index < tips.length; index++) { + sb.append(addressBook.getNodeId(index)).append(":").append(tips[index]); + if (index < tips.length - 1) { + sb.append(", "); + } + } + sb.append(")"); + return sb.toString(); + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetAdvancementWeight.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetAdvancementWeight.java new file mode 100644 index 000000000000..6dff1e920632 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetAdvancementWeight.java @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset; + +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * Stores a weighed tipset advancement score. + *

+ * If zero weight nodes were not a thing, we could use a long as a tipset advancement score. Or, if it was ok to ignore + * zero weight nodes, we could do the same as well. But since we don't want to allow zero sake nodes to get stale + * events, we need to have a mechanism for separately tracking when zero weight nodes have advanced. + * + * @param advancementWeight the advancement weight provided by nodes with non-zero weight. Contributes to + * meeting the threshold required to advance the current snapshot. + * @param zeroWeightAdvancementCount the advancement provided by nodes with zero weight, this is incremented by one for + * each zero weight node that advances. Does not contribute to meeting the threshold + * required to advance the current snapshot. + */ +public record TipsetAdvancementWeight(long advancementWeight, long zeroWeightAdvancementCount) { + + /** + * Zero advancement weight. For convenience. + */ + public static final TipsetAdvancementWeight ZERO_ADVANCEMENT_WEIGHT = TipsetAdvancementWeight.of(0, 0); + + /** + * Create a new instance of a tipset advancement score + * + * @param advancementWeight the advancement weight provided by nodes with non-zero weight + * @param zeroWeightAdvancementCount the number of advancing zero weight nodes + * @return a new instance of a tipset advancement score + */ + public static TipsetAdvancementWeight of(final long advancementWeight, final long zeroWeightAdvancementCount) { + return new TipsetAdvancementWeight(advancementWeight, zeroWeightAdvancementCount); + } + + /** + * Subtract two weights and return the result. + * + * @param that the weight to subtract from this weight + * @return the result of subtracting the given weight from this weight + */ + @NonNull + public TipsetAdvancementWeight minus(@NonNull final TipsetAdvancementWeight that) { + return new TipsetAdvancementWeight( + advancementWeight - that.advancementWeight, + zeroWeightAdvancementCount - that.zeroWeightAdvancementCount); + } + + /** + * Add two weights and return the result. + * + * @param that the weight to add to this weight + * @return the result of adding the given weight to this weight + */ + @NonNull + public TipsetAdvancementWeight plus(@NonNull final TipsetAdvancementWeight that) { + return new TipsetAdvancementWeight( + advancementWeight + that.advancementWeight, + zeroWeightAdvancementCount + that.zeroWeightAdvancementCount); + } + + /** + * Check if this weight is greater than the given weight. First {@link #advancementWeight()} is compared. If that is + * equal, then {@link #zeroWeightAdvancementCount()} breaks the tie. + * + * @param that the weight to compare to + * @return true if this weight is greater than the given weight + */ + public boolean isGreaterThan(@NonNull final TipsetAdvancementWeight that) { + return advancementWeight > that.advancementWeight + || (advancementWeight == that.advancementWeight + && zeroWeightAdvancementCount > that.zeroWeightAdvancementCount); + } + + /** + * Check if this weight is zero. + */ + public boolean isNonZero() { + return advancementWeight != 0 || zeroWeightAdvancementCount != 0; + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetEventCreationManager.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetEventCreationManager.java new file mode 100644 index 000000000000..33312c44728d --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetEventCreationManager.java @@ -0,0 +1,252 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset; + +import static com.swirlds.base.state.LifecyclePhase.NOT_STARTED; +import static com.swirlds.base.state.LifecyclePhase.STARTED; +import static com.swirlds.base.state.LifecyclePhase.STOPPED; + +import com.swirlds.base.state.Lifecycle; +import com.swirlds.base.state.LifecyclePhase; +import com.swirlds.base.time.Time; +import com.swirlds.common.context.PlatformContext; +import com.swirlds.common.stream.Signer; +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.SoftwareVersion; +import com.swirlds.common.system.address.AddressBook; +import com.swirlds.common.system.status.PlatformStatus; +import com.swirlds.common.threading.framework.BlockingQueueInserter; +import com.swirlds.common.threading.framework.MultiQueueThread; +import com.swirlds.common.threading.framework.config.MultiQueueThreadConfiguration; +import com.swirlds.common.threading.manager.ThreadManager; +import com.swirlds.platform.StartUpEventFrozenManager; +import com.swirlds.platform.event.GossipEvent; +import com.swirlds.platform.event.tipset.rules.AggregateTipsetEventCreationRules; +import com.swirlds.platform.event.tipset.rules.TipsetBackpressureRule; +import com.swirlds.platform.event.tipset.rules.TipsetEventCreationRule; +import com.swirlds.platform.event.tipset.rules.TipsetMaximumRateRule; +import com.swirlds.platform.event.tipset.rules.TipsetPlatformStatusRule; +import com.swirlds.platform.eventhandling.EventTransactionPool; +import com.swirlds.platform.internal.EventImpl; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import java.util.Random; +import java.util.function.Consumer; +import java.util.function.IntSupplier; +import java.util.function.Supplier; + +/** + * Manages the creation of events. + */ +public class TipsetEventCreationManager implements Lifecycle { + + /** + * Tracks the lifecycle of this object. + */ + private LifecyclePhase lifecyclePhase = NOT_STARTED; + + /** + * The core logic for creating events. + */ + private final TipsetEventCreator eventCreator; + + /** + * Contains tasks that need to be run on the processing thread for this component. + */ + private final MultiQueueThread workQueue; + + /** + * The object used to enqueue new events onto the work queue. + */ + private final BlockingQueueInserter eventInserter; + + /** + * The object used to enqueue updates to the minimum generation non-ancient onto the work queue. + */ + private final BlockingQueueInserter minimumGenerationNonAncientInserter; + + /** + * When the event creator makes a new event, pass it to this lambda. + */ + private final Consumer newEventHandler; + + /** + * The rules that determine whether or not a new event should be created. + */ + private final TipsetEventCreationRule eventCreationRules; + + /** + * Constructor. + * + * @param platformContext the platform's context + * @param threadManager manages the creation of new threads + * @param time provides the wall clock time + * @param random a source of randomness, does not need to be cryptographically secure + * @param signer can sign with this node's key + * @param addressBook the current address book + * @param selfId the ID of this node + * @param softwareVersion the current software version + * @param transactionPool provides transactions to be added to new events + * @param newEventHandler when the event creator makes a new event, pass it to this lambda + * @param eventIntakeQueueSize provides the current size of the event intake queue + * @param platformStatusSupplier provides the current platform status + * @param startUpEventFrozenManager prevents event creation when the platform has just started up + */ + public TipsetEventCreationManager( + @NonNull final PlatformContext platformContext, + @NonNull final ThreadManager threadManager, + @NonNull final Time time, + @NonNull final Random random, + @NonNull final Signer signer, + @NonNull final AddressBook addressBook, + @NonNull final NodeId selfId, + @NonNull final SoftwareVersion softwareVersion, + @NonNull final EventTransactionPool transactionPool, + @NonNull final Consumer newEventHandler, + @NonNull final IntSupplier eventIntakeQueueSize, + @NonNull final Supplier platformStatusSupplier, + @NonNull final StartUpEventFrozenManager startUpEventFrozenManager) { + + this.newEventHandler = Objects.requireNonNull(newEventHandler); + + Objects.requireNonNull(platformContext); + Objects.requireNonNull(threadManager); + Objects.requireNonNull(time); + Objects.requireNonNull(random); + Objects.requireNonNull(signer); + Objects.requireNonNull(addressBook); + Objects.requireNonNull(selfId); + Objects.requireNonNull(softwareVersion); + Objects.requireNonNull(transactionPool); + Objects.requireNonNull(eventIntakeQueueSize); + Objects.requireNonNull(platformStatusSupplier); + Objects.requireNonNull(startUpEventFrozenManager); + + eventCreationRules = AggregateTipsetEventCreationRules.of( + new TipsetMaximumRateRule(platformContext, time), + new TipsetBackpressureRule(platformContext, eventIntakeQueueSize), + new TipsetPlatformStatusRule(platformStatusSupplier, transactionPool, startUpEventFrozenManager)); + + eventCreator = new TipsetEventCreatorImpl( + platformContext, + time, + random /* does not need to be cryptographically secure */, + signer, + addressBook, + selfId, + softwareVersion, + transactionPool); + + final EventCreationConfig eventCreationConfig = + platformContext.getConfiguration().getConfigData(EventCreationConfig.class); + + workQueue = new MultiQueueThreadConfiguration(threadManager) + .setThreadName("event-creator") + .setCapacity(eventCreationConfig.creationQueueSize()) + .setMaxBufferSize(eventCreationConfig.creationQueueBufferSize()) + .addHandler(EventImpl.class, this::handleEvent) + .addHandler(Long.class, this::handleMinimumGenerationNonAncient) + .setIdleCallback(this::maybeCreateEvent) + .setBatchHandledCallback(this::maybeCreateEvent) + .setWaitForWorkDuration(eventCreationConfig.creationQueueWaitForWorkPeriod()) + .build(); + + eventInserter = workQueue.getInserter(EventImpl.class); + minimumGenerationNonAncientInserter = workQueue.getInserter(Long.class); + } + + /** + * Add an event from the event intake to the work queue. A background thread will eventually pass this event to the + * event creator on the processing thread. + * + * @param event the event to add + */ + public void registerEvent(@NonNull final EventImpl event) throws InterruptedException { + eventInserter.put(event); + } + + /** + * Update the minimum generation non-ancient + * + * @param minimumGenerationNonAncient the new minimum generation non-ancient + */ + public void setMinimumGenerationNonAncient(final long minimumGenerationNonAncient) throws InterruptedException { + minimumGenerationNonAncientInserter.put(minimumGenerationNonAncient); + } + + /** + * Take an event from the work queue and pass it into the event creator. + * + * @param event the event to pass + */ + private void handleEvent(@NonNull final EventImpl event) { + eventCreator.registerEvent(event); + } + + /** + * Pass a new minimum generation non-ancient into the event creator. + * + * @param minimumGenerationNonAncient the new minimum generation non-ancient + */ + private void handleMinimumGenerationNonAncient(final long minimumGenerationNonAncient) { + eventCreator.setMinimumGenerationNonAncient(minimumGenerationNonAncient); + } + + /** + * Create a new event if it is legal to do so. + */ + private void maybeCreateEvent() { + if (!eventCreationRules.isEventCreationPermitted()) { + return; + } + + final GossipEvent event = eventCreator.maybeCreateEvent(); + if (event != null) { + eventCreationRules.eventWasCreated(); + newEventHandler.accept(event); + } + } + + /** + * {@inheritDoc} + */ + @NonNull + @Override + public LifecyclePhase getLifecyclePhase() { + return lifecyclePhase; + } + + /** + * {@inheritDoc} + */ + @Override + public void start() { + throwIfNotInPhase(NOT_STARTED); + lifecyclePhase = STARTED; + workQueue.start(); + } + + /** + * {@inheritDoc} + */ + @Override + public void stop() { + throwIfNotInPhase(STARTED); + lifecyclePhase = STOPPED; + workQueue.stop(); + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetEventCreationManagerFactory.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetEventCreationManagerFactory.java new file mode 100644 index 000000000000..49fac2784d62 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetEventCreationManagerFactory.java @@ -0,0 +1,136 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset; + +import static com.swirlds.common.threading.interrupt.Uninterruptable.abortAndThrowIfInterrupted; + +import com.swirlds.base.time.Time; +import com.swirlds.common.context.PlatformContext; +import com.swirlds.common.stream.Signer; +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.SoftwareVersion; +import com.swirlds.common.system.address.AddressBook; +import com.swirlds.common.system.status.PlatformStatus; +import com.swirlds.common.threading.framework.QueueThread; +import com.swirlds.common.threading.manager.ThreadManager; +import com.swirlds.platform.StartUpEventFrozenManager; +import com.swirlds.platform.event.EventIntakeTask; +import com.swirlds.platform.event.GossipEvent; +import com.swirlds.platform.eventhandling.EventTransactionPool; +import com.swirlds.platform.observers.ConsensusRoundObserver; +import com.swirlds.platform.observers.EventObserverDispatcher; +import com.swirlds.platform.observers.PreConsensusEventObserver; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Objects; +import java.util.Random; +import java.util.function.Consumer; +import java.util.function.Supplier; + +/** + * A factory for creating {@link TipsetEventCreationManager} instances. + */ +public final class TipsetEventCreationManagerFactory { + + private TipsetEventCreationManagerFactory() {} + + /** + * Create a new tipset event creation manager. + * + * @param platformContext the platform's context + * @param threadManager manages the creation of new threads + * @param time provides the wall clock time + * @param signer can sign with this node's key + * @param addressBook the current address book + * @param selfId the ID of this node + * @param appVersion the current application version + * @param transactionPool provides transactions to be added to new events + * @param eventIntakeQueue the queue to which new events should be added + * @param eventObserverDispatcher wires together event intake logic + * @param platformStatusSupplier provides the current platform status + * @param startUpEventFrozenManager manages the start-up event frozen state + * @return a new tipset event creation manager, or null if tipset event creation is disabled + */ + @Nullable + public static TipsetEventCreationManager buildTipsetEventCreationManager( + @NonNull final PlatformContext platformContext, + @NonNull final ThreadManager threadManager, + @NonNull final Time time, + @NonNull final Signer signer, + @NonNull final AddressBook addressBook, + @NonNull final NodeId selfId, + @NonNull final SoftwareVersion appVersion, + @NonNull final EventTransactionPool transactionPool, + @NonNull final QueueThread eventIntakeQueue, + @NonNull final EventObserverDispatcher eventObserverDispatcher, + @NonNull final Supplier platformStatusSupplier, + @NonNull final StartUpEventFrozenManager startUpEventFrozenManager) { + + Objects.requireNonNull(platformContext); + Objects.requireNonNull(threadManager); + Objects.requireNonNull(time); + Objects.requireNonNull(signer); + Objects.requireNonNull(addressBook); + Objects.requireNonNull(selfId); + Objects.requireNonNull(appVersion); + Objects.requireNonNull(transactionPool); + Objects.requireNonNull(eventIntakeQueue); + Objects.requireNonNull(eventObserverDispatcher); + Objects.requireNonNull(platformStatusSupplier); + Objects.requireNonNull(startUpEventFrozenManager); + + final boolean useTipsetAlgorithm = platformContext + .getConfiguration() + .getConfigData(EventCreationConfig.class) + .useTipsetAlgorithm(); + + if (!useTipsetAlgorithm) { + return null; + } + + final Consumer newEventHandler = + event -> abortAndThrowIfInterrupted(eventIntakeQueue::put, event, "intakeQueue.put() interrupted"); + + final TipsetEventCreationManager manager = new TipsetEventCreationManager( + platformContext, + threadManager, + time, + new Random() /* does not need to be cryptographically secure */, + signer, + addressBook, + selfId, + appVersion, + transactionPool, + newEventHandler, + eventIntakeQueue::size, + platformStatusSupplier, + startUpEventFrozenManager); + + eventObserverDispatcher.addObserver((PreConsensusEventObserver) event -> abortAndThrowIfInterrupted( + manager::registerEvent, + event, + "Interrupted while attempting to register event with tipset event creator")); + + eventObserverDispatcher.addObserver((ConsensusRoundObserver) round -> abortAndThrowIfInterrupted( + manager::setMinimumGenerationNonAncient, + round.getGenerations().getMinGenerationNonAncient(), + "Interrupted while attempting to register minimum generation " + + "non-ancient with tipset event creator")); + + return manager; + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetEventCreator.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetEventCreator.java new file mode 100644 index 000000000000..e41fef177600 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetEventCreator.java @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset; + +import com.swirlds.platform.event.GossipEvent; +import com.swirlds.platform.internal.EventImpl; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; + +/** + * An object that creates new events using the tipset algorithm. + */ +public interface TipsetEventCreator { + + /** + * Register a new event from event intake. + * + * @param event the event to add + */ + void registerEvent(@NonNull EventImpl event); + + /** + * Update the minimum generation non-ancient. + * + * @param minimumGenerationNonAncient the new minimum generation non-ancient + */ + void setMinimumGenerationNonAncient(long minimumGenerationNonAncient); + + /** + * Create a new event if it is legal to do so. + * + * @return the new event, or null if it is not legal to create a new event + */ + @Nullable + GossipEvent maybeCreateEvent(); +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetEventCreatorImpl.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetEventCreatorImpl.java new file mode 100644 index 000000000000..10c093fd6b77 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetEventCreatorImpl.java @@ -0,0 +1,428 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset; + +import static com.swirlds.common.system.NodeId.UNDEFINED_NODE_ID; +import static com.swirlds.logging.LogMarker.EXCEPTION; +import static com.swirlds.platform.event.EventConstants.CREATOR_ID_UNDEFINED; +import static com.swirlds.platform.event.EventConstants.GENERATION_UNDEFINED; +import static com.swirlds.platform.event.tipset.TipsetAdvancementWeight.ZERO_ADVANCEMENT_WEIGHT; +import static com.swirlds.platform.event.tipset.TipsetUtils.buildDescriptor; +import static com.swirlds.platform.event.tipset.TipsetUtils.getParentDescriptors; + +import com.swirlds.base.time.Time; +import com.swirlds.common.context.PlatformContext; +import com.swirlds.common.crypto.Cryptography; +import com.swirlds.common.crypto.Hash; +import com.swirlds.common.stream.Signer; +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.SoftwareVersion; +import com.swirlds.common.system.address.AddressBook; +import com.swirlds.common.system.events.BaseEventHashedData; +import com.swirlds.common.system.events.BaseEventUnhashedData; +import com.swirlds.platform.components.transaction.TransactionSupplier; +import com.swirlds.platform.event.EventDescriptor; +import com.swirlds.platform.event.EventUtils; +import com.swirlds.platform.event.GossipEvent; +import com.swirlds.platform.internal.EventImpl; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Random; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * Responsible for creating new events using the tipset algorithm. + */ +public class TipsetEventCreatorImpl implements TipsetEventCreator { + + private static final Logger logger = LogManager.getLogger(TipsetEventCreatorImpl.class); + + private final Cryptography cryptography; + private final Time time; + private final Random random; + private final Signer signer; + private final NodeId selfId; + private final TipsetTracker tipsetTracker; + private final TipsetWeightCalculator tipsetWeightCalculator; + private final ChildlessEventTracker childlessOtherEventTracker; + private final TransactionSupplier transactionSupplier; + private final SoftwareVersion softwareVersion; + + /** + * The bully score is divided by this number to get the probability of creating an event that reduces the bully + * score. The higher this number is, the lower the probability is that an event will be created that reduces the + * bully score. + */ + private final double antiBullyingFactor; + + private final TipsetMetrics tipsetMetrics; + + /** + * The last event created by this node. + */ + private EventDescriptor lastSelfEvent; + + /** + * The timestamp of the last event created by this node. + */ + private Instant lastSelfEventCreationTime; + + /** + * The number of transactions in the last event created by this node. + */ + private int lastSelfEventTransactionCount; + + /** + * Create a new tipset event creator. + * + * @param platformContext the platform context + * @param time provides wall clock time + * @param random a source of randomness, does not need to be cryptographically secure + * @param signer used for signing things with this node's private key + * @param addressBook the current address book + * @param selfId this node's ID + * @param softwareVersion the current software version of the application + * @param transactionSupplier provides transactions to be included in new events + */ + public TipsetEventCreatorImpl( + @NonNull final PlatformContext platformContext, + @NonNull final Time time, + @NonNull final Random random, + @NonNull final Signer signer, + @NonNull final AddressBook addressBook, + @NonNull final NodeId selfId, + @NonNull final SoftwareVersion softwareVersion, + @NonNull final TransactionSupplier transactionSupplier) { + + this.time = Objects.requireNonNull(time); + this.random = Objects.requireNonNull(random); + this.signer = Objects.requireNonNull(signer); + this.selfId = Objects.requireNonNull(selfId); + this.transactionSupplier = Objects.requireNonNull(transactionSupplier); + this.softwareVersion = Objects.requireNonNull(softwareVersion); + + Objects.requireNonNull(addressBook); + + final EventCreationConfig eventCreationConfig = + platformContext.getConfiguration().getConfigData(EventCreationConfig.class); + + cryptography = platformContext.getCryptography(); + antiBullyingFactor = Math.max(1.0, eventCreationConfig.antiBullyingFactor()); + tipsetMetrics = new TipsetMetrics(platformContext, addressBook); + tipsetTracker = new TipsetTracker(addressBook); + childlessOtherEventTracker = new ChildlessEventTracker(); + tipsetWeightCalculator = new TipsetWeightCalculator( + platformContext, addressBook, selfId, tipsetTracker, childlessOtherEventTracker); + } + + /** + * {@inheritDoc} + */ + @Override + public void registerEvent(@NonNull final EventImpl event) { + + final NodeId eventCreator = event.getHashedData().getCreatorId(); + final boolean selfEvent = eventCreator.equals(selfId); + + if (selfEvent) { + if (lastSelfEvent == null || lastSelfEvent.getGeneration() < event.getGeneration()) { + // Normally we will ingest self events before we get to this point, but it's possible + // to learn of self events for the first time here if we are loading from a restart or reconnect. + lastSelfEvent = buildDescriptor(event); + lastSelfEventCreationTime = event.getHashedData().getTimeCreated(); + lastSelfEventTransactionCount = event.getTransactions() == null ? 0 : event.getTransactions().length; + + if (event.getBaseEventUnhashedData().getOtherId() != UNDEFINED_NODE_ID) { + final EventDescriptor parentDescriptor = new EventDescriptor( + event.getBaseEventHashedData().getOtherParentHash(), + event.getBaseEventUnhashedData().getOtherId(), + event.getBaseEventHashedData().getOtherParentGen()); + + childlessOtherEventTracker.registerSelfEventParents(List.of(parentDescriptor)); + } + } else { + // We already ingested this self event (when it was created), + return; + } + } + + final EventDescriptor descriptor = buildDescriptor(event); + final List parentDescriptors = getParentDescriptors(event); + + tipsetTracker.addEvent(descriptor, parentDescriptors); + + if (!selfEvent) { + childlessOtherEventTracker.addEvent(descriptor, parentDescriptors); + } + } + + /** + * {@inheritDoc} + */ + @Override + public void setMinimumGenerationNonAncient(final long minimumGenerationNonAncient) { + tipsetTracker.setMinimumGenerationNonAncient(minimumGenerationNonAncient); + childlessOtherEventTracker.pruneOldEvents(minimumGenerationNonAncient); + } + + /** + * {@inheritDoc} + */ + @Override + @Nullable + public GossipEvent maybeCreateEvent() { + final long bullyScore = tipsetWeightCalculator.getMaxBullyScore(); + tipsetMetrics.getBullyScoreMetric().update(bullyScore); + + // Never bother with anti-bullying techniques if we have a bully score of 1. We are pretty much guaranteed + // to bully ~1/3 of other nodes by a score of 1. + final double beNiceToNerdChance = (bullyScore - 1) / antiBullyingFactor; + + if (beNiceToNerdChance > 0 && random.nextDouble() < beNiceToNerdChance) { + return createEventToReduceBullyScore(); + } else { + return createEventByOptimizingAdvancementWeight(); + } + } + + /** + * Create an event using the other parent with the best tipset advancement weight. + * + * @return the new event, or null if it is not legal to create a new event + */ + @Nullable + private GossipEvent createEventByOptimizingAdvancementWeight() { + final List possibleOtherParents = childlessOtherEventTracker.getChildlessEvents(); + Collections.shuffle(possibleOtherParents, random); + + EventDescriptor bestOtherParent = null; + TipsetAdvancementWeight bestAdvancementWeight = ZERO_ADVANCEMENT_WEIGHT; + for (final EventDescriptor otherParent : possibleOtherParents) { + final TipsetAdvancementWeight advancementWeight = + tipsetWeightCalculator.getTheoreticalAdvancementWeight(List.of(otherParent)); + if (advancementWeight.isGreaterThan(bestAdvancementWeight)) { + bestOtherParent = otherParent; + bestAdvancementWeight = advancementWeight; + } + } + + if (lastSelfEvent != null && bestOtherParent == null) { + // There exist no parents that can advance consensus, and this is not our first event. + // The only time it's ok to create an event with no other parent is when we are creating + // our first event. + return null; + } + + if (bestOtherParent != null) { + tipsetMetrics.getTipsetParentMetric(bestOtherParent.getCreator()).cycle(); + } + return buildAndProcessEvent(bestOtherParent); + } + + /** + * Create an event that reduces the bully score. + * + * @return the new event, or null if it is not legal to create a new event + */ + @Nullable + private GossipEvent createEventToReduceBullyScore() { + final List possibleOtherParents = childlessOtherEventTracker.getChildlessEvents(); + final List nerds = new ArrayList<>(possibleOtherParents.size()); + + // Choose a random nerd, weighted by how much it is currently being bullied. + + // First, figure out who is a nerd and sum up all bully scores. + int bullyScoreSum = 0; + final List bullyScores = new ArrayList<>(possibleOtherParents.size()); + for (final EventDescriptor possibleNerd : possibleOtherParents) { + final int bullyScore = tipsetWeightCalculator.getBullyScoreForNode(possibleNerd.getCreator()); + + final List theoreticalParents = new ArrayList<>(2); + theoreticalParents.add(possibleNerd); + if (lastSelfEvent == null) { + throw new IllegalStateException("lastSelfEvent is null"); + } + theoreticalParents.add(lastSelfEvent); + + final TipsetAdvancementWeight advancementWeight = + tipsetWeightCalculator.getTheoreticalAdvancementWeight(theoreticalParents); + + if (bullyScore > 1) { + if (advancementWeight.isNonZero()) { + nerds.add(possibleNerd); + bullyScores.add(bullyScore); + bullyScoreSum += bullyScore; + } else { + // Note: if bully score is greater than 1, it is mathematically not possible + // for the advancement score to be zero. But in the interest in extreme caution, + // we check anyway, since it is very important never to create events with + // an advancement score of zero. + logger.error( + EXCEPTION.getMarker(), + "bully score is {} but advancement score is zero for {}", + bullyScore, + possibleNerd); + } + } + } + + if (nerds.isEmpty()) { + // Note: this should be impossible, since we will not enter this method in the first + // place if there are no nerds. But better to be safe than sorry, and returning null + // is an acceptable way of saying "I can't create an event right now". + logger.error(EXCEPTION.getMarker(), "failed to locate eligible nerd to use as a parent"); + return null; + } + + // Choose a random nerd. + final int choice = random.nextInt(bullyScoreSum); + int runningSum = 0; + for (int i = 0; i < nerds.size(); i++) { + runningSum += bullyScores.get(i); + if (choice < runningSum) { + final EventDescriptor nerd = nerds.get(i); + tipsetMetrics.getPityParentMetric(nerd.getCreator()).cycle(); + return buildAndProcessEvent(nerd); + } + } + + // This should be impossible. + throw new IllegalStateException("Failed to find an other parent"); + } + + /** + * Given an other parent, build the next self event and process it. + * + * @param otherParent the other parent, or null if there is no other parent + * @return the new event + */ + private GossipEvent buildAndProcessEvent(@Nullable final EventDescriptor otherParent) { + final List parentDescriptors = new ArrayList<>(2); + if (lastSelfEvent != null) { + parentDescriptors.add(lastSelfEvent); + } + if (otherParent != null) { + parentDescriptors.add(otherParent); + } + + final GossipEvent event = assembleEventObject(otherParent); + + final EventDescriptor descriptor = buildDescriptor(event); + tipsetTracker.addEvent(descriptor, parentDescriptors); + final TipsetAdvancementWeight advancementWeight = + tipsetWeightCalculator.addEventAndGetAdvancementWeight(descriptor); + final double weightRatio = advancementWeight.advancementWeight() + / (double) tipsetWeightCalculator.getMaximumPossibleAdvancementWeight(); + tipsetMetrics.getTipsetAdvancementMetric().update(weightRatio); + + if (otherParent != null) { + childlessOtherEventTracker.registerSelfEventParents(List.of(otherParent)); + } + + lastSelfEvent = descriptor; + lastSelfEventCreationTime = event.getHashedData().getTimeCreated(); + lastSelfEventTransactionCount = event.getHashedData().getTransactions().length; + + return event; + } + + /** + * Given the parents, assemble the event object. + * + * @param otherParent the other parent + * @return the event + */ + @NonNull + private GossipEvent assembleEventObject(@Nullable final EventDescriptor otherParent) { + + final long selfParentGeneration = getGeneration(lastSelfEvent); + final Hash selfParentHash = getHash(lastSelfEvent); + + final long otherParentGeneration = getGeneration(otherParent); + final Hash otherParentHash = getHash(otherParent); + final NodeId otherParentId = getCreator(otherParent); + + final Instant now = time.now(); + final Instant timeCreated; + if (lastSelfEvent == null) { + timeCreated = now; + } else { + timeCreated = EventUtils.calculateNewEventCreationTime( + now, lastSelfEventCreationTime, lastSelfEventTransactionCount); + } + + final BaseEventHashedData hashedData = new BaseEventHashedData( + softwareVersion, + selfId, + selfParentGeneration, + otherParentGeneration, + selfParentHash, + otherParentHash, + timeCreated, + transactionSupplier.getTransactions()); + cryptography.digestSync(hashedData); + + final BaseEventUnhashedData unhashedData = new BaseEventUnhashedData( + otherParentId, signer.sign(hashedData.getHash().getValue()).getSignatureBytes()); + + final GossipEvent event = new GossipEvent(hashedData, unhashedData); + cryptography.digestSync(event); + event.buildDescriptor(); + return event; + } + + /** + * Get the generation of a descriptor, handle null appropriately. + */ + private static long getGeneration(@Nullable final EventDescriptor descriptor) { + if (descriptor == null) { + return GENERATION_UNDEFINED; + } else { + return descriptor.getGeneration(); + } + } + + /** + * Get the hash of a descriptor, handle null appropriately. + */ + @Nullable + private static Hash getHash(@Nullable final EventDescriptor descriptor) { + if (descriptor == null) { + return null; + } else { + return descriptor.getHash(); + } + } + + /** + * Get the creator of a descriptor, handle null appropriately. + */ + @Nullable + private static NodeId getCreator(@Nullable final EventDescriptor descriptor) { + if (descriptor == null) { + return CREATOR_ID_UNDEFINED; + } else { + return descriptor.getCreator(); + } + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetMetrics.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetMetrics.java new file mode 100644 index 000000000000..84f58e2cbeb4 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetMetrics.java @@ -0,0 +1,126 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset; + +import com.swirlds.common.context.PlatformContext; +import com.swirlds.common.metrics.Metrics; +import com.swirlds.common.metrics.RunningAverageMetric; +import com.swirlds.common.metrics.SpeedometerMetric; +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.address.Address; +import com.swirlds.common.system.address.AddressBook; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.HashMap; +import java.util.Map; + +/** + * Encapsulates metrics for the tipset event creator. + */ +public class TipsetMetrics { + + private static final RunningAverageMetric.Config TIPSET_ADVANCEMENT_CONFIG = new RunningAverageMetric.Config( + "platform", "tipsetAdvancement") + .withDescription("The score, based on tipset advancement weight, of each new event created by this " + + "node. A score of 0.0 means the an event has zero advancement weight, while a score " + + "of 1.0 means that the event had the maximum possible advancement weight."); + private final RunningAverageMetric tipsetAdvancementMetric; + + private static final RunningAverageMetric.Config BULLY_SCORE_CONFIG = new RunningAverageMetric.Config( + "platform", "bullyScore") + .withDescription("The score, based on tipset advancements, of how much of a 'bully' " + + "this node is being to other nodes. Bullying is defined as refusing to use a " + + "node's events as other parents."); + private final RunningAverageMetric bullyScoreMetric; + + private final Map tipsetParentMetrics = new HashMap<>(); + private final Map pityParentMetrics = new HashMap<>(); + + /** + * Create metrics for the tipset event creator. + * + * @param platformContext the platform context + */ + public TipsetMetrics(@NonNull final PlatformContext platformContext, @NonNull final AddressBook addressBook) { + + final Metrics metrics = platformContext.getMetrics(); + tipsetAdvancementMetric = metrics.getOrCreate(TIPSET_ADVANCEMENT_CONFIG); + bullyScoreMetric = metrics.getOrCreate(BULLY_SCORE_CONFIG); + + for (final Address address : addressBook) { + final NodeId nodeId = address.getNodeId(); + + final SpeedometerMetric.Config parentConfig = new SpeedometerMetric.Config( + "platform", "tipsetParent" + nodeId.id()) + .withDescription("Cycled when an event from that node is used as a " + + "parent because it optimized the tipset advancement weight."); + final SpeedometerMetric parentMetric = metrics.getOrCreate(parentConfig); + tipsetParentMetrics.put(nodeId, parentMetric); + + final SpeedometerMetric.Config pityParentConfig = new SpeedometerMetric.Config( + "platform", "pityParent" + nodeId.id()) + .withDescription("Cycled when an event from that node is used as a " + + "parent without consideration of tipset advancement weight optimization " + + "(i.e. taking 'pity' on a node that isn't getting its events chosen as parents)."); + final SpeedometerMetric pityParentMetric = metrics.getOrCreate(pityParentConfig); + pityParentMetrics.put(nodeId, pityParentMetric); + } + } + + /** + * Get the metric used to track the tipset score of events created by this node. + * + * @return the tipset advancement metric + */ + @NonNull + public RunningAverageMetric getTipsetAdvancementMetric() { + return tipsetAdvancementMetric; + } + + /** + * Get the metric used to track the bully score of this node. + * + * @return the bully score metric + */ + @NonNull + public RunningAverageMetric getBullyScoreMetric() { + return bullyScoreMetric; + } + + /** + * Get the metric used to track the number of times this node has used an event from the given node as a parent + * because it optimized the tipset score. + * + * @param nodeId the node ID + * @return the parent metric + */ + @NonNull + public SpeedometerMetric getTipsetParentMetric(@NonNull final NodeId nodeId) { + return tipsetParentMetrics.get(nodeId); + } + + /** + * Get the metric used to track the number of times this node has used an event from the given node as a parent + * without consideration of tipset advancement weight optimization. + * + * @param nodeId the node ID + * @return the pity parent metric + */ + @NonNull + public SpeedometerMetric getPityParentMetric(@NonNull final NodeId nodeId) { + return pityParentMetrics.get(nodeId); + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetTracker.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetTracker.java new file mode 100644 index 000000000000..be8a5a0817a0 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetTracker.java @@ -0,0 +1,135 @@ +/* + * Copyright (C) 2016-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset; + +import static com.swirlds.platform.event.tipset.Tipset.merge; + +import com.swirlds.common.sequence.map.SequenceMap; +import com.swirlds.common.sequence.map.StandardSequenceMap; +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.address.AddressBook; +import com.swirlds.platform.event.EventDescriptor; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +/** + * Computes and tracks tipsets for non-ancient events. + */ +public class TipsetTracker { + + private static final int INITIAL_TIPSET_MAP_CAPACITY = 64; + + /** + * Tipsets for all non-ancient events we know about. + */ + private final SequenceMap tipsets; + + /** + * This tipset is equivalent to a tipset that would be created by merging all tipsets of all events that this object + * has ever observed. If you ask this tipset for the generation for a particular node, it will return the highest + * generation of all events we have ever received from that node. + */ + private Tipset latestGenerations; + + private final AddressBook addressBook; + + /** + * Create a new tipset tracker. + * + * @param addressBook the current address book + */ + public TipsetTracker(@NonNull final AddressBook addressBook) { + + this.addressBook = Objects.requireNonNull(addressBook); + + this.latestGenerations = new Tipset(addressBook); + + tipsets = new StandardSequenceMap<>(0, INITIAL_TIPSET_MAP_CAPACITY, true, EventDescriptor::getGeneration); + } + + /** + * Set the minimum generation that is not considered ancient. + * + * @param minimumGenerationNonAncient the minimum non-ancient generation, all lower generations are ancient + */ + public void setMinimumGenerationNonAncient(final long minimumGenerationNonAncient) { + tipsets.shiftWindow(minimumGenerationNonAncient); + } + + /** + * Add a new event to the tracker. + * + * @param eventDescriptor the descriptor of the event to add + * @param parents the parents of the event being added + * @return the tipset for the event that was added + */ + @NonNull + public Tipset addEvent( + @NonNull final EventDescriptor eventDescriptor, @NonNull final List parents) { + final List parentTipsets = new ArrayList<>(parents.size()); + for (final EventDescriptor parent : parents) { + final Tipset parentTipset = tipsets.get(parent); + if (parentTipset != null) { + parentTipsets.add(parentTipset); + } + } + + final Tipset eventTipset; + if (parentTipsets.isEmpty()) { + eventTipset = + new Tipset(addressBook).advance(eventDescriptor.getCreator(), eventDescriptor.getGeneration()); + } else { + eventTipset = merge(parentTipsets).advance(eventDescriptor.getCreator(), eventDescriptor.getGeneration()); + } + + tipsets.put(eventDescriptor, eventTipset); + latestGenerations = latestGenerations.advance(eventDescriptor.getCreator(), eventDescriptor.getGeneration()); + + return eventTipset; + } + + /** + * Get the tipset of an event, or null if the event is not being tracked. + * + * @param eventDescriptor the fingerprint of the event + * @return the tipset of the event, or null if the event is not being tracked + */ + @Nullable + public Tipset getTipset(@NonNull final EventDescriptor eventDescriptor) { + return tipsets.get(eventDescriptor); + } + + /** + * Get the highest generation of all events we have received from a particular node. + * + * @param nodeId the node in question + * @return the highest generation of all events received by a given node + */ + public long getLatestGenerationForNode(@NonNull final NodeId nodeId) { + return latestGenerations.getTipGenerationForNode(nodeId); + } + + /** + * Get number of tipsets being tracked. + */ + public int size() { + return tipsets.getSize(); + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetUtils.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetUtils.java new file mode 100644 index 000000000000..27bb89d31e9c --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetUtils.java @@ -0,0 +1,101 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset; + +import com.swirlds.platform.event.EventDescriptor; +import com.swirlds.platform.event.GossipEvent; +import com.swirlds.platform.internal.EventImpl; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.List; + +/** + * Misc tipset utilities. + */ +public final class TipsetUtils { + + private TipsetUtils() {} + + /** + * Build a descriptor from an EventImpl. + * + * @param event the event + * @return the descriptor + */ + public static EventDescriptor buildDescriptor(@NonNull final EventImpl event) { + if (event.getBaseHash() == null) { + throw new IllegalStateException("event is not hashed"); + } + return new EventDescriptor(event.getBaseHash(), event.getCreatorId(), event.getGeneration()); + } + + /** + * Build a descriptor from a GossipEvent. + * + * @param event the event + * @return the descriptor + */ + public static EventDescriptor buildDescriptor(@NonNull final GossipEvent event) { + event.buildDescriptor(); + return event.getDescriptor(); + } + + /** + * Get the descriptors of an event's parents. + * + * @param event the event to get parent descriptors for + */ + @NonNull + public static List getParentDescriptors(@NonNull final EventImpl event) { + final List parentDescriptors = new ArrayList<>(2); + if (event.getSelfParent() != null) { + parentDescriptors.add(buildDescriptor(event.getSelfParent())); + } + if (event.getOtherParent() != null) { + parentDescriptors.add(buildDescriptor(event.getOtherParent())); + } + return parentDescriptors; + } + + /** + * Get the descriptors of an event's parents. + * + * @param event the event to the parent descriptors for + * @return a list of parent descriptors + */ + @NonNull + public static List getParentDescriptors(@NonNull final GossipEvent event) { + final List parentDescriptors = new ArrayList<>(2); + + if (event.getHashedData().getSelfParentHash() != null) { + final EventDescriptor parent = new EventDescriptor( + event.getHashedData().getSelfParentHash(), + event.getHashedData().getCreatorId(), + event.getHashedData().getSelfParentGen()); + parentDescriptors.add(parent); + } + if (event.getHashedData().getOtherParentHash() != null) { + final EventDescriptor parent = new EventDescriptor( + event.getHashedData().getOtherParentHash(), + event.getUnhashedData().getOtherId(), + event.getHashedData().getOtherParentGen()); + parentDescriptors.add(parent); + } + + return parentDescriptors; + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetWeightCalculator.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetWeightCalculator.java new file mode 100644 index 000000000000..e3e2fda2c9c2 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/TipsetWeightCalculator.java @@ -0,0 +1,287 @@ +/* + * Copyright (C) 2016-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset; + +import static com.swirlds.platform.Utilities.isSuperMajority; +import static com.swirlds.platform.event.tipset.TipsetAdvancementWeight.ZERO_ADVANCEMENT_WEIGHT; + +import com.swirlds.common.context.PlatformContext; +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.address.AddressBook; +import com.swirlds.platform.event.EventDescriptor; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.Deque; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; + +/** + * Calculates tipset advancement weights for events created by a node. + */ +public class TipsetWeightCalculator { + + /** + * The node ID that is being tracked by this object. + */ + private final NodeId selfId; + + /** + * Builds tipsets for each event. Is maintained outside this object. + */ + private final TipsetTracker tipsetTracker; + + /** + * Tracks non-ancient events without children. + */ + private final ChildlessEventTracker childlessEventTracker; + + /** + * The current tipset snapshot. This is updated to the latest self event's tipset whenever the weighted advancement + * between the current snapshot and the new event's tipset exceeds the threshold of 2/3 consensus weight minus the + * self weight. + */ + private Tipset snapshot; + + /** + * The N most recent snapshots. + */ + private final Deque snapshotHistory = new LinkedList<>(); + + /** + * The number of snapshots to keep in {@link #snapshotHistory}. + */ + private final int maxSnapshotHistorySize; + + /** + * The total weight of all nodes. + */ + private final long totalWeight; + + /** + * The weight of the node tracked by this object. + */ + private final long selfWeight; + + /** + * The maximum possible advancement weight for an event. + */ + private final long maximumPossibleAdvancementWeight; + + /** + * The previous tipset advancement weight. + */ + private TipsetAdvancementWeight previousAdvancementWeight = ZERO_ADVANCEMENT_WEIGHT; + + /** + * The tipset of the latest self event, or the starting snapshot if there has not yet been a self event. + */ + private Tipset latestSelfEventTipset; + + /** + * Create a new tipset weight calculator. + * + * @param platformContext the platform context + * @param addressBook the current address book + * @param selfId the ID of the node tracked by this object + * @param tipsetTracker builds tipsets for individual events + * @param childlessEventTracker tracks non-ancient events without children + */ + public TipsetWeightCalculator( + @NonNull final PlatformContext platformContext, + @NonNull final AddressBook addressBook, + @NonNull final NodeId selfId, + @NonNull final TipsetTracker tipsetTracker, + @NonNull final ChildlessEventTracker childlessEventTracker) { + + this.selfId = Objects.requireNonNull(selfId); + this.tipsetTracker = Objects.requireNonNull(tipsetTracker); + this.childlessEventTracker = Objects.requireNonNull(childlessEventTracker); + Objects.requireNonNull(addressBook); + + totalWeight = addressBook.getTotalWeight(); + selfWeight = addressBook.getAddress(selfId).getWeight(); + maximumPossibleAdvancementWeight = totalWeight - selfWeight; + maxSnapshotHistorySize = platformContext + .getConfiguration() + .getConfigData(EventCreationConfig.class) + .tipsetSnapshotHistorySize(); + + snapshot = new Tipset(addressBook); + latestSelfEventTipset = snapshot; + snapshotHistory.add(snapshot); + } + + /** + * Get the maximum possible tipset advancement weight that a new event can achieve. + */ + public long getMaximumPossibleAdvancementWeight() { + return maximumPossibleAdvancementWeight; + } + + /** + * Get the current tipset snapshot. + * + * @return the current tipset snapshot + */ + public @NonNull Tipset getSnapshot() { + return snapshot; + } + + /** + * Add an event created by this node and compute the increase in tipset advancement weight. Higher weight changes + * mean that this event will cause consensus to advance more. An advancement weight change of 0 means that this + * event did not advance consensus. An advancement weight change close to the total weight means that this event + * will not do a very good job at advancing consensus. It's impossible to get a perfect advancement weight, since + * the weight of advancing self events is not included. The maximum advancement weight an event can achieve is equal + * to the sum of all weights minus this node's weight. + *

+ * Whenever the total advancement weight of a new event exceeds the threshold (2/3 minus self weight), the snapshot + * is set to be equal to this event's tipset. + * + * @param event the event that is being added + * @return the change in this event's tipset advancement weight compared to the tipset advancement weight of the + * previous event passed to this method + */ + public TipsetAdvancementWeight addEventAndGetAdvancementWeight(@NonNull final EventDescriptor event) { + Objects.requireNonNull(event); + if (!event.getCreator().equals(selfId)) { + throw new IllegalArgumentException("event creator must be the same as self ID"); + } + + final Tipset eventTipset = tipsetTracker.getTipset(event); + if (eventTipset == null) { + throw new IllegalArgumentException("event " + event + " is not in the tipset tracker"); + } + + final TipsetAdvancementWeight advancementWeight = snapshot.getTipAdvancementWeight(selfId, eventTipset); + if (advancementWeight.advancementWeight() > maximumPossibleAdvancementWeight) { + throw new IllegalStateException("advancement weight " + advancementWeight + + " is greater than the maximum possible weight " + maximumPossibleAdvancementWeight); + } + + final TipsetAdvancementWeight advancementWeightImprovement = advancementWeight.minus(previousAdvancementWeight); + + if (isSuperMajority(advancementWeight.advancementWeight() + selfWeight, totalWeight)) { + snapshot = eventTipset; + snapshotHistory.add(snapshot); + if (snapshotHistory.size() > maxSnapshotHistorySize) { + snapshotHistory.remove(); + } + previousAdvancementWeight = ZERO_ADVANCEMENT_WEIGHT; + } else { + previousAdvancementWeight = advancementWeight; + } + + latestSelfEventTipset = eventTipset; + + return advancementWeightImprovement; + } + + /** + * Figure out what advancement weight we would get if we created an event with a given list of parents. + * + * @param parents the proposed parents of an event + * @return the advancement weight we would get by creating an event with the given parents + */ + public TipsetAdvancementWeight getTheoreticalAdvancementWeight(@NonNull final List parents) { + if (parents.isEmpty()) { + return ZERO_ADVANCEMENT_WEIGHT; + } + + final List parentTipsets = new ArrayList<>(parents.size()); + for (final EventDescriptor parent : parents) { + parentTipsets.add(tipsetTracker.getTipset(parent)); + } + + // Don't bother advancing the self generation in this theoretical tipset, + // since self advancement doesn't contribute to tipset advancement weight. + final Tipset newTipset = Tipset.merge(parentTipsets); + + return snapshot.getTipAdvancementWeight(selfId, newTipset).minus(previousAdvancementWeight); + } + + /** + * Compute the current maximum bully score with respect to all nodes. This is a measure of how well slow nodes' + * events are being incorporated in the hashgraph by faster nodes. A high score means slow nodes are being bullied + * by fast nodes. A low score means slow nodes are being included in consensus. Lower scores are better. + * + * @return the current tipset bully score + */ + public int getMaxBullyScore() { + int bullyScore = 0; + for (final EventDescriptor eventDescriptor : childlessEventTracker.getChildlessEvents()) { + bullyScore = Math.max(bullyScore, getBullyScoreForNode(eventDescriptor.getCreator())); + } + return bullyScore; + } + + /** + * Get the bully score with respect to one node, i.e. how much this node is bullying the specified node. A high + * bully score means that we have access to events that could go into our ancestry, but for whatever reason we have + * decided not to put into our ancestry. + *

+ * The bully score is defined as the number of times the snapshot has been advanced without updating the generation + * of a particular node. For nodes that do not have any events that are legal other parents, the bully score is + * defined to be 0, regardless of how many times the snapshot has been advanced. + * + * @param nodeId the node to compute the bully score for + * @return the bully score with respect to this node + */ + public int getBullyScoreForNode(@NonNull final NodeId nodeId) { + if (latestSelfEventTipset == null) { + // We can't be a bully if we haven't created any events yet. + return 0; + } + + if (latestSelfEventTipset.getTipGenerationForNode(nodeId) + > snapshotHistory.getLast().getTipGenerationForNode(nodeId)) { + // Special case: we have advanced this generation since the snapshot was taken. + return 0; + } + + int bullyScore = 0; + final long latestGeneration = tipsetTracker.getLatestGenerationForNode(nodeId); + + // Iterate backwards in time until we find an event from the node being added to our ancestry, or if + // we find that there are no eligible nodes to be added to our ancestry. + final Iterator iterator = snapshotHistory.descendingIterator(); + Tipset previousTipset = iterator.next(); + while (iterator.hasNext()) { + final Tipset currentTipset = previousTipset; + previousTipset = iterator.next(); + + final long previousGeneration = previousTipset.getTipGenerationForNode(nodeId); + final long currentGeneration = currentTipset.getTipGenerationForNode(nodeId); + + if (currentGeneration == latestGeneration || previousGeneration < currentGeneration) { + // We stop increasing the bully score if we observe one of the two following events: + // + // 1) we find that the latest generation provided by a node matches a snapshot's generation + // (i.e. we've used all events provided by this creator as other parents) + // 2) we observe an advancement between snapshots, which means that we have put one of this node's + // events into our ancestry. + break; + } + + bullyScore++; + } + + return bullyScore; + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/AggregateTipsetEventCreationRules.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/AggregateTipsetEventCreationRules.java new file mode 100644 index 000000000000..4bcfaafd8365 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/AggregateTipsetEventCreationRules.java @@ -0,0 +1,71 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset.rules; + +import edu.umd.cs.findbugs.annotations.Nullable; + +/** + * Combines multiple {@link TipsetEventCreationRule} objects into a single object. Allows event creation if all the + * contained limiters allow event creation. + */ +public class AggregateTipsetEventCreationRules implements TipsetEventCreationRule { + + private final TipsetEventCreationRule[] rules; + + /** + * Create a new {@link AggregateTipsetEventCreationRules} from the given list of rules. + * + * @param rules the rules to combine, if no rules are provided then event creation is always permitted. + * @return an aggregate rule that permits event creation if and only if all rules permit creation. + */ + public static AggregateTipsetEventCreationRules of(@Nullable final TipsetEventCreationRule... rules) { + return new AggregateTipsetEventCreationRules(rules); + } + + /** + * Constructor. + * + * @param rules the limiters to combine + */ + private AggregateTipsetEventCreationRules(@Nullable final TipsetEventCreationRule... rules) { + this.rules = rules == null ? new TipsetEventCreationRule[0] : rules; + } + + /** + * {@inheritDoc} + */ + @Override + public boolean isEventCreationPermitted() { + for (final TipsetEventCreationRule limiter : rules) { + if (!limiter.isEventCreationPermitted()) { + return false; + } + } + + return true; + } + + /** + * {@inheritDoc} + */ + @Override + public void eventWasCreated() { + for (final TipsetEventCreationRule limiter : rules) { + limiter.eventWasCreated(); + } + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/TipsetBackpressureRule.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/TipsetBackpressureRule.java new file mode 100644 index 000000000000..9cb010806bbd --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/TipsetBackpressureRule.java @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset.rules; + +import com.swirlds.common.context.PlatformContext; +import com.swirlds.platform.event.tipset.EventCreationConfig; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import java.util.function.IntSupplier; + +/** + * Prevents event creations when the system is stressed and unable to keep up with its work load. + */ +public class TipsetBackpressureRule implements TipsetEventCreationRule { + + /** + * Prevent new events from being created if the event intake queue ever meets or exceeds this size. + */ + private final int eventIntakeThrottle; + + private final IntSupplier eventIntakeQueueSize; + + /** + * Constructor. + * + * @param platformContext the platform's context + * @param eventIntakeQueueSize provides the size of the event intake queue + */ + public TipsetBackpressureRule( + @NonNull final PlatformContext platformContext, @NonNull final IntSupplier eventIntakeQueueSize) { + + final EventCreationConfig eventCreationConfig = + platformContext.getConfiguration().getConfigData(EventCreationConfig.class); + + eventIntakeThrottle = eventCreationConfig.eventIntakeThrottle(); + + this.eventIntakeQueueSize = Objects.requireNonNull(eventIntakeQueueSize); + } + + /** + * {@inheritDoc} + */ + @Override + public boolean isEventCreationPermitted() { + return eventIntakeQueueSize.getAsInt() < eventIntakeThrottle; + } + + /** + * {@inheritDoc} + */ + @Override + public void eventWasCreated() { + // no-op + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/TipsetEventCreationRule.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/TipsetEventCreationRule.java new file mode 100644 index 000000000000..7d7ce6f8b29f --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/TipsetEventCreationRule.java @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset.rules; + +/** + * An object used to limit or prevent the creation of new events. + */ +public interface TipsetEventCreationRule { + + /** + * Check if event creation is currently permitted. + * + * @return true if event creation is permitted, false otherwise + */ + boolean isEventCreationPermitted(); + + /** + * This method is called whenever an event is created. + */ + void eventWasCreated(); +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/TipsetMaximumRateRule.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/TipsetMaximumRateRule.java new file mode 100644 index 000000000000..2e80b8505f97 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/TipsetMaximumRateRule.java @@ -0,0 +1,72 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset.rules; + +import com.swirlds.base.time.Time; +import com.swirlds.common.context.PlatformContext; +import com.swirlds.common.utility.throttle.RateLimiter; +import com.swirlds.platform.event.tipset.EventCreationConfig; +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * Throttles event creation rate over time. + */ +public class TipsetMaximumRateRule implements TipsetEventCreationRule { + + private final RateLimiter rateLimiter; + + /** + * Constructor. + * + * @param platformContext the platform context for this node + * @param time provides wall clock time + */ + public TipsetMaximumRateRule(@NonNull final PlatformContext platformContext, @NonNull final Time time) { + + final EventCreationConfig eventCreationConfig = + platformContext.getConfiguration().getConfigData(EventCreationConfig.class); + + final double maxCreationRate = eventCreationConfig.maxCreationRate(); + if (maxCreationRate > 0) { + rateLimiter = new RateLimiter(time, maxCreationRate); + } else { + // No brakes! + rateLimiter = null; + } + } + + /** + * {@inheritDoc} + */ + @Override + public boolean isEventCreationPermitted() { + if (rateLimiter != null) { + return rateLimiter.request(); + } + return true; + } + + /** + * {@inheritDoc} + */ + @Override + public void eventWasCreated() { + if (rateLimiter != null) { + rateLimiter.trigger(); + } + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/TipsetPlatformStatusRule.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/TipsetPlatformStatusRule.java new file mode 100644 index 000000000000..4cf71049f764 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/tipset/rules/TipsetPlatformStatusRule.java @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.event.tipset.rules; + +import com.swirlds.common.system.EventCreationRuleResponse; +import com.swirlds.common.system.status.PlatformStatus; +import com.swirlds.platform.StartUpEventFrozenManager; +import com.swirlds.platform.eventhandling.EventTransactionPool; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import java.util.function.Supplier; + +/** + * Limits the creation of new events depending on the current platform status. + */ +public class TipsetPlatformStatusRule implements TipsetEventCreationRule { + + private final Supplier platformStatusSupplier; + private final EventTransactionPool transactionPool; + + // Note: this will eventually be handled by platform statuses + private final StartUpEventFrozenManager startUpEventFrozenManager; + + /** + * Constructor. + * + * @param platformStatusSupplier provides the current platform status + * @param startUpEventFrozenManager tells us if we are in the "start up frozen" phase + * @param transactionPool provides transactions to be added to new events + */ + public TipsetPlatformStatusRule( + @NonNull final Supplier platformStatusSupplier, + @NonNull final EventTransactionPool transactionPool, + @NonNull final StartUpEventFrozenManager startUpEventFrozenManager) { + + this.platformStatusSupplier = Objects.requireNonNull(platformStatusSupplier); + this.startUpEventFrozenManager = Objects.requireNonNull(startUpEventFrozenManager); + this.transactionPool = Objects.requireNonNull(transactionPool); + } + + /** + * {@inheritDoc} + */ + @Override + public boolean isEventCreationPermitted() { + final PlatformStatus currentStatus = platformStatusSupplier.get(); + + if (startUpEventFrozenManager.shouldCreateEvent() == EventCreationRuleResponse.DONT_CREATE) { + // Eventually this behavior will be enforced using platform statuses + return false; + } + + if (currentStatus == PlatformStatus.FREEZING) { + return transactionPool.numSignatureTransEvent() > 0; + } + + if (currentStatus != PlatformStatus.ACTIVE && currentStatus != PlatformStatus.CHECKING) { + return false; + } + + return true; + } + + /** + * {@inheritDoc} + */ + @Override + public void eventWasCreated() { + // no-op + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/validation/EventDeduplication.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/validation/EventDeduplication.java index f1db5eda23e0..b7457293c9c0 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/validation/EventDeduplication.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/validation/EventDeduplication.java @@ -16,8 +16,8 @@ package com.swirlds.platform.event.validation; +import com.swirlds.platform.event.EventDescriptor; import com.swirlds.platform.event.GossipEvent; -import com.swirlds.platform.gossip.chatter.protocol.messages.EventDescriptor; import com.swirlds.platform.metrics.EventIntakeMetrics; import java.util.List; import java.util.function.Predicate; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/AbstractGossip.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/AbstractGossip.java index a856f43aedef..7ec711699fef 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/AbstractGossip.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/AbstractGossip.java @@ -37,7 +37,6 @@ import com.swirlds.platform.Crypto; import com.swirlds.platform.FreezeManager; import com.swirlds.platform.PlatformConstructor; -import com.swirlds.platform.Settings; import com.swirlds.platform.StartUpEventFrozenManager; import com.swirlds.platform.components.CriticalQuorum; import com.swirlds.platform.components.EventCreationRules; @@ -91,7 +90,6 @@ public abstract class AbstractGossip implements ConnectionTracker, Gossip { protected final AddressBook addressBook; protected final NodeId selfId; protected final NetworkTopology topology; - protected final Settings settings = Settings.getInstance(); protected final CriticalQuorum criticalQuorum; protected final NetworkMetrics networkMetrics; protected final SyncMetrics syncMetrics; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/ChatterGossip.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/ChatterGossip.java index b4696732a64f..990dd7974a97 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/ChatterGossip.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/ChatterGossip.java @@ -306,6 +306,7 @@ public ChatterGossip( new AncientParentsRule(consensusRef::get), criticalQuorum)); final ChatterEventCreator chatterEventCreator = new ChatterEventCreator( + platformContext, appVersion, selfId, PlatformConstructor.platformSigner(crypto.getKeysAndCerts()), diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/ChatterCore.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/ChatterCore.java index e2b6fa0fbf3d..7014e7a9b0e0 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/ChatterCore.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/ChatterCore.java @@ -22,6 +22,7 @@ import com.swirlds.common.metrics.extensions.CountPerSecond; import com.swirlds.common.sequence.Shiftable; import com.swirlds.common.system.NodeId; +import com.swirlds.platform.event.EventDescriptor; import com.swirlds.platform.gossip.chatter.config.ChatterConfig; import com.swirlds.platform.gossip.chatter.protocol.heartbeat.HeartbeatMessage; import com.swirlds.platform.gossip.chatter.protocol.heartbeat.HeartbeatSendReceive; @@ -29,7 +30,6 @@ import com.swirlds.platform.gossip.chatter.protocol.input.InputDelegateBuilder; import com.swirlds.platform.gossip.chatter.protocol.input.MessageTypeHandlerBuilder; import com.swirlds.platform.gossip.chatter.protocol.messages.ChatterEvent; -import com.swirlds.platform.gossip.chatter.protocol.messages.EventDescriptor; import com.swirlds.platform.gossip.chatter.protocol.output.MessageOutput; import com.swirlds.platform.gossip.chatter.protocol.output.OtherEventDelay; import com.swirlds.platform.gossip.chatter.protocol.output.PriorityOutputAggregator; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/messages/ChatterEvent.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/messages/ChatterEvent.java index ee1df53e3404..65afd48d2ba8 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/messages/ChatterEvent.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/messages/ChatterEvent.java @@ -17,6 +17,7 @@ package com.swirlds.platform.gossip.chatter.protocol.messages; import com.swirlds.common.io.SelfSerializable; +import com.swirlds.platform.event.EventDescriptor; import java.time.Instant; /** @@ -41,7 +42,5 @@ public interface ChatterEvent extends SelfSerializable { * * @return the generation of the event */ - default long getGeneration() { - return getDescriptor().getGeneration(); - } + long getGeneration(); } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/messages/EventDescriptor.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/messages/EventDescriptor.java deleted file mode 100644 index 511e7b9cbc45..000000000000 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/messages/EventDescriptor.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (C) 2016-2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.swirlds.platform.gossip.chatter.protocol.messages; - -import com.swirlds.common.crypto.Hash; -import com.swirlds.common.io.SelfSerializable; -import com.swirlds.common.system.NodeId; -import edu.umd.cs.findbugs.annotations.NonNull; - -/** - * A stripped down description of an event. - */ -public interface EventDescriptor extends SelfSerializable { - - /** - * Get the hash of the event. - * - * @return the event's hash - */ - Hash getHash(); - - /** - * Get the node ID of the event's creator. - * - * @return a node ID - */ - @NonNull - NodeId getCreator(); - - /** - * Get the generation of the event described - * - * @return the generation of the event described - */ - long getGeneration(); -} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/peer/PeerGossipState.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/peer/PeerGossipState.java index ffdbe26e4b1d..20742b50d7e7 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/peer/PeerGossipState.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/peer/PeerGossipState.java @@ -20,8 +20,8 @@ import com.swirlds.common.sequence.map.SequenceMap; import com.swirlds.common.sequence.map.StandardSequenceMap; import com.swirlds.platform.consensus.GraphGenerations; +import com.swirlds.platform.event.EventDescriptor; import com.swirlds.platform.gossip.chatter.protocol.messages.ChatterEvent; -import com.swirlds.platform.gossip.chatter.protocol.messages.EventDescriptor; import org.apache.commons.lang3.ObjectUtils; /** diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/internal/EventImpl.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/internal/EventImpl.java index aaa769339dda..3c5305bab880 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/internal/EventImpl.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/internal/EventImpl.java @@ -791,18 +791,18 @@ public void setReachedConsTimestamp(final Instant reachedConsTimestamp) { } /** - * @param m the member ID - * @return last ancestor created by m (memoizes lastSee function from Swirlds-TR-2020-01) + * @param m the index of the member ID + * @return last ancestor created by the member at index m (memoizes lastSee function from Swirlds-TR-2020-01) */ public EventImpl getLastSee(final int m) { return internalEventData.getLastSee(m); } /** - * remember event, the last ancestor created by m (memoizes lastSee function from Swirlds-TR-2020-01) + * remember event, the last ancestor created by the member at index m (memoizes lastSee function from Swirlds-TR-2020-01) * - * @param m the member ID of the creator - * @param event the last seen {@link EventImpl} object created by m + * @param m the index of the member ID of the creator + * @param event the last seen {@link EventImpl} object created by member at index m */ public void setLastSee(final int m, final EventImpl event) { internalEventData.setLastSee(m, event); @@ -826,19 +826,19 @@ public int sizeLastSee() { } /** - * @param m the member ID - * @return strongly-seen witness in parent round by m (memoizes stronglySeeP function from Swirlds-TR-2020-01) + * @param m the index of the member ID + * @return strongly-seen witness in parent round by the member at index m (memoizes stronglySeeP function from Swirlds-TR-2020-01) */ public EventImpl getStronglySeeP(final int m) { return internalEventData.getStronglySeeP(m); } /** - * remember event, the strongly-seen witness in parent round by m (memoizes stronglySeeP function from + * remember event, the strongly-seen witness in parent round by the member at index m (memoizes stronglySeeP function from * Swirlds-TR-2020-01) * - * @param m the member ID of the creator - * @param event the strongly-seen witness in parent round created by m + * @param m the index of the member ID of the creator + * @param event the strongly-seen witness in parent round created by the member at index m */ public void setStronglySeeP(final int m, final EventImpl event) { internalEventData.setStronglySeeP(m, event); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectProtocol.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectProtocol.java index b1bcf5b51dfc..257fcf504695 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectProtocol.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectProtocol.java @@ -122,15 +122,7 @@ public boolean shouldAccept() { return false; } - if (!teacherState.get().getState().isInitialized()) { - teacherState.close(); - teacherState = null; - logger.warn( - RECONNECT.getMarker(), - "Rejecting reconnect request from node {} " + "due to lack of an initialized signed state.", - peerId); - return false; - } else if (!teacherState.get().isComplete()) { + if (!teacherState.get().isComplete()) { // this is only possible if signed state manager violates its contractual obligations teacherState.close(); teacherState = null; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectProtocolResponder.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectProtocolResponder.java index 4a5f2db07e14..ab83ca15ecaf 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectProtocolResponder.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectProtocolResponder.java @@ -97,14 +97,7 @@ public void protocolInitiated(final byte initialByte, final Connection connectio return; } - if (!state.get().getState().isInitialized()) { - ReconnectUtils.denyReconnect(connection); - logger.warn( - RECONNECT.getMarker(), - "Rejecting reconnect request from node {} due to lack of an initialized signed state.", - connection.getOtherId()); - return; - } else if (!state.get().isComplete()) { + if (!state.get().isComplete()) { // this is only possible if signed state manager violates its contractual obligations ReconnectUtils.denyReconnect(connection); logger.error( diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EventRecoveryWorkflow.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EventRecoveryWorkflow.java index 3469d4698338..6315024f8dc1 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EventRecoveryWorkflow.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EventRecoveryWorkflow.java @@ -21,31 +21,19 @@ import static com.swirlds.platform.util.BootstrapUtils.loadAppMain; import static com.swirlds.platform.util.BootstrapUtils.setupConstructableRegistry; -import com.swirlds.common.config.ConfigUtils; import com.swirlds.common.config.ConsensusConfig; -import com.swirlds.common.config.singleton.ConfigurationHolder; -import com.swirlds.common.config.sources.LegacyFileConfigSource; -import com.swirlds.common.context.DefaultPlatformContext; import com.swirlds.common.context.PlatformContext; -import com.swirlds.common.crypto.CryptographyHolder; import com.swirlds.common.crypto.Hash; import com.swirlds.common.io.IOIterator; import com.swirlds.common.merkle.crypto.MerkleCryptoFactory; -import com.swirlds.common.metrics.noop.NoOpMetrics; import com.swirlds.common.notification.NotificationEngine; import com.swirlds.common.stream.RunningHashCalculatorForStream; -import com.swirlds.common.system.InitTrigger; -import com.swirlds.common.system.NodeId; -import com.swirlds.common.system.Round; -import com.swirlds.common.system.SwirldDualState; -import com.swirlds.common.system.SwirldMain; -import com.swirlds.common.system.SwirldState; +import com.swirlds.common.system.*; import com.swirlds.common.system.events.ConsensusEvent; import com.swirlds.common.system.state.notifications.NewRecoveredStateListener; import com.swirlds.common.system.state.notifications.NewRecoveredStateNotification; import com.swirlds.common.utility.CompareTo; import com.swirlds.config.api.Configuration; -import com.swirlds.config.api.ConfigurationBuilder; import com.swirlds.platform.internal.EventImpl; import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; import com.swirlds.platform.recovery.internal.EventStreamRoundIterator; @@ -65,7 +53,6 @@ import java.util.Iterator; import java.util.List; import java.util.Objects; -import java.util.Set; import java.util.concurrent.ExecutionException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -85,6 +72,7 @@ private EventRecoveryWorkflow() {} * Read a signed state from disk and apply events from an event stream on disk. Write the resulting signed state to * disk. * + * @param platformContext the platform context * @param signedStateFile the bootstrap signed state file * @param configurationFiles files containing configuration * @param eventStreamDirectory a directory containing the event stream @@ -98,6 +86,7 @@ private EventRecoveryWorkflow() {} * @param loadSigningKeys if true then load the signing keys */ public static void recoverState( + @NonNull final PlatformContext platformContext, @NonNull final Path signedStateFile, @NonNull final List configurationFiles, @NonNull final Path eventStreamDirectory, @@ -108,6 +97,7 @@ public static void recoverState( @NonNull final NodeId selfId, final boolean loadSigningKeys) throws IOException { + Objects.requireNonNull(platformContext); Objects.requireNonNull(signedStateFile, "signedStateFile must not be null"); Objects.requireNonNull(configurationFiles, "configurationFiles must not be null"); Objects.requireNonNull(eventStreamDirectory, "eventStreamDirectory must not be null"); @@ -125,22 +115,8 @@ public static void recoverState( Files.createDirectories(resultingStateDirectory); } - final ConfigurationBuilder configurationBuilder = ConfigurationBuilder.create(); - ConfigUtils.scanAndRegisterAllConfigTypes(configurationBuilder, Set.of("com.swirlds")); - - for (final Path configurationFile : configurationFiles) { - logger.info(STARTUP.getMarker(), "Loading configuration from {}", configurationFile); - configurationBuilder.withSource(new LegacyFileConfigSource(configurationFile)); - } - - final Configuration configuration = configurationBuilder.build(); - ConfigurationHolder.getInstance().setConfiguration(configuration); - logger.info(STARTUP.getMarker(), "Loading state from {}", signedStateFile); - final PlatformContext platformContext = - new DefaultPlatformContext(configuration, new NoOpMetrics(), CryptographyHolder.get()); - try (final ReservedSignedState initialState = SignedStateFileReader.readStateFile( platformContext, signedStateFile) .reservedSignedState()) { @@ -157,14 +133,7 @@ public static void recoverState( logger.info(STARTUP.getMarker(), "Reapplying transactions"); final ReservedSignedState resultingState = reapplyTransactions( - platformContext, - configuration, - initialState, - appMain, - roundIterator, - finalRound, - selfId, - loadSigningKeys); + platformContext, initialState, appMain, roundIterator, finalRound, selfId, loadSigningKeys); logger.info( STARTUP.getMarker(), @@ -231,7 +200,6 @@ private static void notifyStateRecovered( * Apply transactions on top of a state to produce a new state * * @param platformContext the platform context - * @param configuration the configuration for the node * @param initialState the starting signed state * @param appMain the {@link SwirldMain} for the app. Ignored if null. * @param roundIterator an iterator that walks over transactions @@ -245,7 +213,6 @@ private static void notifyStateRecovered( @NonNull public static ReservedSignedState reapplyTransactions( @NonNull final PlatformContext platformContext, - @NonNull final Configuration configuration, @NonNull final ReservedSignedState initialState, @NonNull final SwirldMain appMain, @NonNull final IOIterator roundIterator, @@ -255,12 +222,13 @@ public static ReservedSignedState reapplyTransactions( throws IOException { Objects.requireNonNull(platformContext, "platformContext must not be null"); - Objects.requireNonNull(configuration, "configuration must not be null"); Objects.requireNonNull(initialState, "initialState must not be null"); Objects.requireNonNull(appMain, "appMain must not be null"); Objects.requireNonNull(roundIterator, "roundIterator must not be null"); Objects.requireNonNull(selfId, "selfId must not be null"); + final Configuration configuration = platformContext.getConfiguration(); + final long roundsNonAncient = configuration.getConfigData(ConsensusConfig.class).roundsNonAncient(); @@ -284,7 +252,6 @@ public static ReservedSignedState reapplyTransactions( .getPlatformState() .getPlatformData() .getCreationSoftwareVersion()); - initialState.get().getState().markAsInitialized(); appMain.init(platform, platform.getSelfId()); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/GenesisStateBuilder.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/GenesisStateBuilder.java index 72d8308e4ef1..a1e64159c2d5 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/GenesisStateBuilder.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/GenesisStateBuilder.java @@ -17,15 +17,14 @@ package com.swirlds.platform.state; import com.swirlds.common.config.BasicConfig; -import com.swirlds.common.system.InitTrigger; -import com.swirlds.common.system.Platform; +import com.swirlds.common.context.PlatformContext; import com.swirlds.common.system.SoftwareVersion; import com.swirlds.common.system.SwirldState; import com.swirlds.common.system.address.AddressBook; import com.swirlds.platform.internal.EventImpl; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; import java.util.List; -import java.util.function.Supplier; /** * Responsible for building the genesis state. @@ -80,28 +79,24 @@ private static DualStateImpl buildGenesisDualState(final BasicConfig configurati /** * Build and initialize a genesis state. * - * @param platform the platform running this node - * @param addressBook the current address book - * @param appVersion the software version of the app - * @param genesisSwirldStateBuilder builds the genesis application state + * @param platformContext the platform context + * @param addressBook the current address book + * @param appVersion the software version of the app + * @param swirldState the application's genesis state * @return a genesis state */ public static State buildGenesisState( - final Platform platform, - final AddressBook addressBook, - final SoftwareVersion appVersion, - final Supplier genesisSwirldStateBuilder) { + @NonNull final PlatformContext platformContext, + @NonNull final AddressBook addressBook, + @NonNull final SoftwareVersion appVersion, + @NonNull final SwirldState swirldState) { - final BasicConfig basicConfig = platform.getContext().getConfiguration().getConfigData(BasicConfig.class); + final BasicConfig basicConfig = platformContext.getConfiguration().getConfigData(BasicConfig.class); final State state = new State(); state.setPlatformState(buildGenesisPlatformState(addressBook, appVersion)); - state.setSwirldState(genesisSwirldStateBuilder.get()); + state.setSwirldState(swirldState); state.setDualState(buildGenesisDualState(basicConfig)); - state.getSwirldState() - .init(platform, state.getSwirldDualState(), InitTrigger.GENESIS, SoftwareVersion.NO_VERSION); - state.markAsInitialized(); - return state; } } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/PlatformData.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/PlatformData.java index 9a53b3c1895f..ca35e4bc50e6 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/PlatformData.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/PlatformData.java @@ -46,6 +46,11 @@ public class PlatformData extends PartialMerkleLeaf implements MerkleLeaf { private static final long CLASS_ID = 0x1f89d0c43a8c08bdL; + /** + * The round of the genesis state. + */ + public static final long GENESIS_ROUND = 0; + private static final class ClassVersion { public static final int ORIGINAL = 1; public static final int EPOCH_HASH = 2; @@ -58,7 +63,7 @@ private static final class ClassVersion { * (genesis state) has a round of 0 because the first round is round defined as round 1, and the genesis state is * before any transactions are handled. */ - private long round; + private long round = GENESIS_ROUND; /** * how many consensus events have there been throughout all of history, up through the round received that this @@ -133,6 +138,7 @@ private PlatformData(final PlatformData that) { this.minGenInfo = new ArrayList<>(that.minGenInfo); } this.lastTransactionTimestamp = that.lastTransactionTimestamp; + this.creationSoftwareVersion = that.creationSoftwareVersion; this.epochHash = that.epochHash; this.nextEpochHash = that.nextEpochHash; this.roundsNonAncient = that.roundsNonAncient; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/State.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/State.java index 046b21c8c805..03e1aeacb615 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/State.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/State.java @@ -71,8 +71,6 @@ private static class ChildIndices { */ private final RuntimeObjectRecord registryRecord; - private boolean initialized = false; - public State() { registryRecord = RuntimeObjectRegistry.createRecord(getClass()); } @@ -81,7 +79,6 @@ private State(final State that) { super(that); registryRecord = RuntimeObjectRegistry.createRecord(getClass()); - this.initialized = that.initialized; if (that.getSwirldState() != null) { this.setSwirldState(that.getSwirldState().copy()); @@ -230,20 +227,6 @@ public static void linkParents(final EventImpl[] events) { } } - /** - * Mark this state as having been initialized. - */ - public void markAsInitialized() { - initialized = true; - } - - /** - * Has this state been initialized? - */ - public boolean isInitialized() { - return initialized; - } - /** * {@inheritDoc} */ diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/SwirldStateManagerImpl.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/SwirldStateManagerImpl.java index 8ae8908c7d63..83a9f857b0bf 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/SwirldStateManagerImpl.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/SwirldStateManagerImpl.java @@ -23,6 +23,7 @@ import com.swirlds.common.config.TransactionConfig; import com.swirlds.common.context.PlatformContext; import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.SoftwareVersion; import com.swirlds.common.system.SwirldState; import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.system.transaction.internal.ConsensusTransactionImpl; @@ -86,6 +87,11 @@ public class SwirldStateManagerImpl implements SwirldStateManager { */ private final PostConsensusSystemTransactionManager postConsensusSystemTransactionManager; + /** + * The current software version. + */ + private final SoftwareVersion softwareVersion; + // Used for creating mock instances in unit testing public SwirldStateManagerImpl() { stats = null; @@ -94,6 +100,7 @@ public SwirldStateManagerImpl() { postConsensusSystemTransactionManager = null; transactionHandler = null; uptimeTracker = null; + softwareVersion = null; } /** @@ -108,6 +115,7 @@ public SwirldStateManagerImpl() { * @param transactionConfig the transaction configuration * @param inFreeze indicates if the system is currently in a freeze * @param state the genesis state + * @param softwareVersion the current software version */ public SwirldStateManagerImpl( @NonNull final PlatformContext platformContext, @@ -118,7 +126,8 @@ public SwirldStateManagerImpl( @NonNull final SwirldStateMetrics swirldStateMetrics, @NonNull final TransactionConfig transactionConfig, @NonNull final BooleanSupplier inFreeze, - @NonNull final State state) { + @NonNull final State state, + @NonNull final SoftwareVersion softwareVersion) { Objects.requireNonNull(platformContext); Objects.requireNonNull(addressBook); @@ -129,6 +138,7 @@ public SwirldStateManagerImpl( Objects.requireNonNull(transactionConfig); Objects.requireNonNull(inFreeze); Objects.requireNonNull(state); + this.softwareVersion = Objects.requireNonNull(softwareVersion); this.transactionPool = new EventTransactionPool(platformContext.getMetrics(), transactionConfig, inFreeze); this.transactionHandler = new TransactionHandler(selfId, stats); @@ -265,7 +275,7 @@ private void initialState(final State state) { } private void fastCopyAndUpdateRefs(final State state) { - final State consState = fastCopy(state, stats); + final State consState = fastCopy(state, stats, softwareVersion); // Set latest immutable first to prevent the newly immutable state from being deleted between setting the // stateRef and the latestImmutableState diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/SwirldStateManagerUtils.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/SwirldStateManagerUtils.java index ff3dba780808..9f372d2d362e 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/SwirldStateManagerUtils.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/SwirldStateManagerUtils.java @@ -18,8 +18,11 @@ import static com.swirlds.common.utility.Units.NANOSECONDS_TO_MICROSECONDS; +import com.swirlds.common.system.SoftwareVersion; import com.swirlds.platform.metrics.SwirldStateMetrics; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; +import java.util.Objects; /** * A utility class with useful methods for implementations of {@link SwirldStateManager}. @@ -32,17 +35,23 @@ private SwirldStateManagerUtils() {} /** * Performs a fast copy on a {@link State}. The {@code state} must not be modified during execution of this method. * - * @param state - * the state object to fast copy - * @param stats - * object to record stats in + * @param state the state object to fast copy + * @param stats object to record stats in + * @param softwareVersion the current software version * @return the newly created state copy */ - public static State fastCopy(final State state, final SwirldStateMetrics stats) { + public static State fastCopy( + @NonNull final State state, + @NonNull final SwirldStateMetrics stats, + @NonNull final SoftwareVersion softwareVersion) { + + Objects.requireNonNull(softwareVersion); + final long copyStart = System.nanoTime(); // Create a fast copy final State copy = state.copy(); + state.getPlatformState().getPlatformData().setCreationSoftwareVersion(softwareVersion); // Increment the reference count because this reference becomes the new value copy.reserve(); @@ -57,10 +66,8 @@ public static State fastCopy(final State state, final SwirldStateMetrics stats) /** * Determines if a {@code timestamp} is in a freeze period according to the provided state. * - * @param timestamp - * the timestamp to check - * @param consensusState - * the state that contains the freeze periods + * @param timestamp the timestamp to check + * @param consensusState the state that contains the freeze periods * @return true is the {@code timestamp} is in a freeze period */ public static boolean isInFreezePeriod(final Instant timestamp, final State consensusState) { diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/ConsensusHashManager.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/ConsensusHashManager.java index 77531a6da964..f922af3f3261 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/ConsensusHashManager.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/ConsensusHashManager.java @@ -286,7 +286,7 @@ private void handleSelfIss(final RoundHashValidator roundHashValidator) { final Hash consensusHash = roundHashValidator.getConsensusHash(); final long skipCount = selfIssRateLimiter.getDeniedRequests(); - if (selfIssRateLimiter.request()) { + if (selfIssRateLimiter.requestAndTrigger()) { final StringBuilder sb = new StringBuilder(); sb.append("Invalid State Signature (ISS): this node has the wrong hash for round ") @@ -317,7 +317,7 @@ private void handleCatastrophic(final RoundHashValidator roundHashValidator) { final Hash selfHash = roundHashValidator.getSelfStateHash(); final long skipCount = catastrophicIssRateLimiter.getDeniedRequests(); - if (catastrophicIssRateLimiter.request()) { + if (catastrophicIssRateLimiter.requestAndTrigger()) { final StringBuilder sb = new StringBuilder(); sb.append("Catastrophic Invalid State Signature (ISS)\n"); @@ -341,7 +341,7 @@ private void handleCatastrophic(final RoundHashValidator roundHashValidator) { */ private void handleLackOfData(final RoundHashValidator roundHashValidator) { final long skipCount = lackingSignaturesRateLimiter.getDeniedRequests(); - if (!lackingSignaturesRateLimiter.request()) { + if (!lackingSignaturesRateLimiter.requestAndTrigger()) { return; } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/IssHandler.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/IssHandler.java index f8c5f18dbd7d..717a9888fe4c 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/IssHandler.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/IssHandler.java @@ -124,7 +124,7 @@ public void stateHashValidityObserver( haltRequestedConsumer.haltRequested("other node observed with ISS"); halted = true; - } else if (stateConfig.dumpStateOnAnyISS() && issDumpRateLimiter.request()) { + } else if (stateConfig.dumpStateOnAnyISS() && issDumpRateLimiter.requestAndTrigger()) { stateDumpRequestedDispatcher.dispatch(round, ISS_DUMP_CATEGORY, false); } } @@ -157,7 +157,7 @@ public void selfIssObserver( // If we are powering down, always do a state dump. stateDumpRequestedDispatcher.dispatch(round, ISS_DUMP_CATEGORY, true); fatalErrorConsumer.fatalError("Self ISS", null, SystemExitCode.ISS); - } else if (stateConfig.dumpStateOnAnyISS() && issDumpRateLimiter.request()) { + } else if (stateConfig.dumpStateOnAnyISS() && issDumpRateLimiter.requestAndTrigger()) { stateDumpRequestedDispatcher.dispatch(round, ISS_DUMP_CATEGORY, false); } } @@ -235,7 +235,7 @@ public void catastrophicIssObserver(@NonNull final Long round, @NonNull final Ha stateDumpRequestedDispatcher.dispatch(round, ISS_DUMP_CATEGORY, false); haltRequestedConsumer.haltRequested("catastrophic ISS observed"); halted = true; - } else if (stateConfig.dumpStateOnAnyISS() && issDumpRateLimiter.request()) { + } else if (stateConfig.dumpStateOnAnyISS() && issDumpRateLimiter.requestAndTrigger()) { stateDumpRequestedDispatcher.dispatch(round, ISS_DUMP_CATEGORY, stateConfig.automatedSelfIssRecovery()); } } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedState.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedState.java index 85c54bdec38c..17a5413ae5b0 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedState.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedState.java @@ -18,6 +18,7 @@ import static com.swirlds.logging.LogMarker.EXCEPTION; import static com.swirlds.logging.LogMarker.SIGNED_STATE; +import static com.swirlds.platform.state.PlatformData.GENESIS_ROUND; import static com.swirlds.platform.state.signed.SignedStateHistory.SignedStateAction.CREATION; import static com.swirlds.platform.state.signed.SignedStateHistory.SignedStateAction.RELEASE; import static com.swirlds.platform.state.signed.SignedStateHistory.SignedStateAction.RESERVE; @@ -197,6 +198,15 @@ public long getRound() { return state.getPlatformState().getPlatformData().getRound(); } + /** + * Check if this state is the genesis state. + * + * @return true if this is the genesis state + */ + public boolean isGenesisState() { + return state.getPlatformState().getPlatformData().getRound() == GENESIS_ROUND; + } + /** * {@inheritDoc} */ diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileWriter.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileWriter.java index f49cac258dcc..2604517df722 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileWriter.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileWriter.java @@ -20,6 +20,7 @@ import static com.swirlds.common.io.utility.FileUtils.writeAndFlush; import static com.swirlds.logging.LogMarker.EXCEPTION; import static com.swirlds.logging.LogMarker.STATE_TO_DISK; +import static com.swirlds.platform.config.internal.PlatformConfigUtils.writeSettingsUsed; import static com.swirlds.platform.state.signed.SignedStateFileUtils.CURRENT_ADDRESS_BOOK_FILE_NAME; import static com.swirlds.platform.state.signed.SignedStateFileUtils.FILE_VERSION; import static com.swirlds.platform.state.signed.SignedStateFileUtils.HASH_INFO_FILE_NAME; @@ -33,7 +34,6 @@ import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.AddressBook; import com.swirlds.logging.payloads.StateSavedToDiskPayload; -import com.swirlds.platform.Settings; import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; import com.swirlds.platform.state.State; import edu.umd.cs.findbugs.annotations.NonNull; @@ -143,8 +143,8 @@ public static void writeSignedStateFilesToDirectory( writeHashInfoFile(directory, signedState.getState()); writeMetadataFile(selfId, directory, signedState); writeEmergencyRecoveryFile(directory, signedState); - Settings.getInstance().writeSettingsUsed(directory); writeStateAddressBookFile(directory, signedState.getAddressBook()); + writeSettingsUsed(directory); } /** diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateSentinel.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateSentinel.java index 7da027128f45..120830ba1967 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateSentinel.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateSentinel.java @@ -100,7 +100,8 @@ private void checkSignedStates() { return; } - if (CompareTo.isGreaterThan(objectRecord.getAge(time.now()), maxSignedStateAge) && rateLimiter.request()) { + if (CompareTo.isGreaterThan(objectRecord.getAge(time.now()), maxSignedStateAge) + && rateLimiter.requestAndTrigger()) { final SignedStateHistory history = objectRecord.getMetadata(); logger.error(EXCEPTION.getMarker(), "old signed state detected, memory leak probable.\n{}", history); } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/uptime/UptimeMetrics.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/uptime/UptimeMetrics.java index 7b8829c5c8a3..6da4f55f78a4 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/uptime/UptimeMetrics.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/uptime/UptimeMetrics.java @@ -38,21 +38,11 @@ class UptimeMetrics { private final Metrics metrics; - /** - * A map from node to the time since the last consensus event was observed from that node. - */ - private final Map timeSinceLastConsensusEvent = new HashMap<>(); - /** * A map from node to the number of rounds since the last consensus event was observed from that node. */ private final Map roundsSinceLastConsensusEvent = new HashMap<>(); - /** - * A map from node to the time since the last judge was observed from that node. - */ - private final Map timeSinceLastJudge = new HashMap<>(); - /** * A map from node to the number of rounds since the last judge was observed from that node. */ @@ -71,8 +61,6 @@ class UptimeMetrics { .withDescription("The time, in microseconds, required to compute uptime information each round."); private final RunningAverageMetric uptimeComputationTime; - private static final String TIME_SINCE_LAST_CONSENSUS_EVENT = "timeSinceLastConsensusEvent-"; - private static final String TIME_SINCE_LAST_JUDGE = "timeSinceLastJudge-"; private static final String ROUNDS_SINCE_LAST_CONSENSUS_EVENT = "roundsSinceLastConsensusEvent-"; private static final String ROUNDS_SINCE_LAST_JUDGE = "roundsSinceLastJudge-"; @@ -114,12 +102,6 @@ public UptimeMetrics( */ public void addMetricsForNode(@NonNull final NodeId nodeId) { Objects.requireNonNull(nodeId, "nodeId must not be null"); - final RunningAverageMetric.Config timeSinceLastConensusEventConfig = new RunningAverageMetric.Config( - CATEGORY, TIME_SINCE_LAST_CONSENSUS_EVENT + nodeId) - .withUnit("seconds") - .withDescription("The consensus time in seconds since the " - + "last consensus event created by this node was observed"); - timeSinceLastConsensusEvent.put(nodeId, metrics.getOrCreate(timeSinceLastConensusEventConfig)); final RunningAverageMetric.Config roundsSinceLastConensusEventConfig = new RunningAverageMetric.Config( CATEGORY, ROUNDS_SINCE_LAST_CONSENSUS_EVENT + nodeId) @@ -128,13 +110,6 @@ public void addMetricsForNode(@NonNull final NodeId nodeId) { "The number of rounds since the last consensus event created by this node was observed"); roundsSinceLastConsensusEvent.put(nodeId, metrics.getOrCreate(roundsSinceLastConensusEventConfig)); - final RunningAverageMetric.Config timeSinceLastJudgeConfig = new RunningAverageMetric.Config( - CATEGORY, TIME_SINCE_LAST_JUDGE + nodeId) - .withUnit("seconds") - .withDescription( - "The consensus time in seconds since the last judge created by this node was observed"); - timeSinceLastJudge.put(nodeId, metrics.getOrCreate(timeSinceLastJudgeConfig)); - final RunningAverageMetric.Config roundsSinceLastJudgeConfig = new RunningAverageMetric.Config( CATEGORY, ROUNDS_SINCE_LAST_JUDGE + nodeId) .withUnit("rounds") @@ -149,35 +124,14 @@ public void addMetricsForNode(@NonNull final NodeId nodeId) { */ public void removeMetricsForNode(@NonNull final NodeId nodeId) { Objects.requireNonNull(nodeId, "nodeId must not be null"); - timeSinceLastConsensusEvent.remove(nodeId); - metrics.remove(new RunningAverageMetric.Config(CATEGORY, TIME_SINCE_LAST_CONSENSUS_EVENT + nodeId)); roundsSinceLastConsensusEvent.remove(nodeId); metrics.remove(new RunningAverageMetric.Config(CATEGORY, ROUNDS_SINCE_LAST_CONSENSUS_EVENT + nodeId)); - timeSinceLastJudge.remove(nodeId); - metrics.remove(new RunningAverageMetric.Config(CATEGORY, TIME_SINCE_LAST_JUDGE + nodeId)); - roundsSinceLastJudge.remove(nodeId); metrics.remove(new RunningAverageMetric.Config(CATEGORY, ROUNDS_SINCE_LAST_JUDGE + nodeId)); } - /** - * Get the metric that tracks the time since the last consensus event was observed from a node. - * - * @param id the id of the node - * @return the metric - * @throws NoSuchElementException if no metric for the node is found - */ - public @NonNull RunningAverageMetric getTimeSinceLastConsensusEventMetric(@NonNull final NodeId id) { - Objects.requireNonNull(id, "id must not be null"); - final RunningAverageMetric metric = timeSinceLastConsensusEvent.get(id); - if (metric == null) { - throw new NoSuchElementException("No metric for node " + id + " found."); - } - return metric; - } - /** * Get the metric that tracks the number of rounds since the last consensus event was observed from a node. * @@ -194,22 +148,6 @@ public void removeMetricsForNode(@NonNull final NodeId nodeId) { return metric; } - /** - * Get the metric that tracks the time since the last judge was observed from a node. - * - * @param id the id of the node - * @return the metric - * @throws NoSuchElementException if no metric for the node is found - */ - public @NonNull RunningAverageMetric getTimeSinceLastJudgeMetric(@NonNull final NodeId id) { - Objects.requireNonNull(id, "id must not be null"); - final RunningAverageMetric metric = timeSinceLastJudge.get(id); - if (metric == null) { - throw new NoSuchElementException("No metric for node " + id + " found."); - } - return metric; - } - /** * Get the metric that tracks the number of rounds since the last judge was observed from a node. * diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/uptime/UptimeTracker.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/uptime/UptimeTracker.java index 9b57b42592e5..3e6db8605b76 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/uptime/UptimeTracker.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/uptime/UptimeTracker.java @@ -18,9 +18,7 @@ import static com.swirlds.common.system.UptimeData.NO_ROUND; import static com.swirlds.common.units.TimeUnit.UNIT_MICROSECONDS; -import static com.swirlds.common.units.TimeUnit.UNIT_MILLISECONDS; import static com.swirlds.common.units.TimeUnit.UNIT_NANOSECONDS; -import static com.swirlds.common.units.TimeUnit.UNIT_SECONDS; import com.swirlds.base.time.Time; import com.swirlds.common.context.PlatformContext; @@ -223,9 +221,6 @@ private void reportUptime( final Instant lastConsensusEventTime = uptimeData.getLastEventTime(id); if (lastConsensusEventTime != null) { final Duration timeSinceLastConsensusEvent = Duration.between(lastConsensusEventTime, lastRoundEndTime); - uptimeMetrics - .getTimeSinceLastConsensusEventMetric(id) - .update(UNIT_MILLISECONDS.convertTo(timeSinceLastConsensusEvent.toMillis(), UNIT_SECONDS)); if (CompareTo.isLessThanOrEqualTo(timeSinceLastConsensusEvent, degradationThreshold)) { nonDegradedConsensusWeight += addressBook.getAddress(id).getWeight(); @@ -237,14 +232,6 @@ private void reportUptime( uptimeMetrics.getRoundsSinceLastConsensusEventMetric(id).update(currentRound - lastEventRound); } - final Instant lastJudgeTime = uptimeData.getLastJudgeTime(id); - if (lastJudgeTime != null) { - final Duration timeSinceLastJudge = Duration.between(lastJudgeTime, lastRoundEndTime); - uptimeMetrics - .getTimeSinceLastJudgeMetric(id) - .update(UNIT_MILLISECONDS.convertTo(timeSinceLastJudge.toMillis(), UNIT_SECONDS)); - } - final long lastJudgeRound = uptimeData.getLastJudgeRound(id); if (lastJudgeRound != NO_ROUND) { uptimeMetrics.getRoundsSinceLastJudgeMetric(id).update(currentRound - lastJudgeRound); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/util/EventStreamSigningUtils.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/util/EventStreamSigningUtils.java index 0fa1abb61352..93cfca05fd8e 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/util/EventStreamSigningUtils.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/util/EventStreamSigningUtils.java @@ -24,7 +24,6 @@ import com.swirlds.common.crypto.Hash; import com.swirlds.common.crypto.SignatureType; -import com.swirlds.common.internal.SettingsCommon; import com.swirlds.common.stream.EventStreamType; import com.swirlds.common.stream.internal.InvalidStreamFileException; import edu.umd.cs.findbugs.annotations.NonNull; @@ -52,7 +51,7 @@ private EventStreamSigningUtils() {} private static final int SUPPORTED_STREAM_FILE_VERSION = 5; /** - * Sets up the constructable registry, and configures {@link SettingsCommon} + * Sets up the constructable registry *

* Should be called before using stream utilities *

diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/util/PlatformComponents.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/util/PlatformComponents.java index b4af49540942..14ba8f2d2f55 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/util/PlatformComponents.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/util/PlatformComponents.java @@ -16,13 +16,13 @@ package com.swirlds.platform.util; -import static com.swirlds.common.utility.CommonUtils.throwArgNull; - import com.swirlds.base.state.Mutable; import com.swirlds.base.state.Startable; import com.swirlds.platform.dispatch.DispatchBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.LinkedList; import java.util.List; +import java.util.Objects; /** * A helper class for wiring platform components together. @@ -53,9 +53,10 @@ public PlatformComponents(final DispatchBuilder dispatchBuilder) { * the type of the component * @return the component */ - public T add(final T component) { + @NonNull + public T add(@NonNull final T component) { throwIfImmutable(); - throwArgNull(component, "component"); + Objects.requireNonNull(component); components.add(component); dispatchBuilder.registerObservers(component); return component; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/module-info.java b/platform-sdk/swirlds-platform-core/src/main/java/module-info.java index 58bbe251a0e6..dd95d5fa4630 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/module-info.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/module-info.java @@ -59,12 +59,6 @@ exports com.swirlds.platform.util; /* Targeted Exports to External Libraries */ - exports com.swirlds.platform.event to - com.swirlds.platform.test, - com.swirlds.common, - com.swirlds.common.test, - com.fasterxml.jackson.core, - com.fasterxml.jackson.databind; exports com.swirlds.platform.internal to com.swirlds.platform.test, com.fasterxml.jackson.core, @@ -126,6 +120,7 @@ exports com.swirlds.platform.reconnect; exports com.swirlds.platform.gossip.shadowgraph; exports com.swirlds.platform.recovery.emergencyfile; + exports com.swirlds.platform.event; /* Swirlds Libraries */ requires com.swirlds.base; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/AddressBookInitializerTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/AddressBookInitializerTest.java index c6e6ad6d7510..32486df950bb 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/AddressBookInitializerTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/AddressBookInitializerTest.java @@ -43,6 +43,8 @@ import com.swirlds.platform.state.signed.SignedState; import com.swirlds.test.framework.config.TestConfigBuilder; import com.swirlds.test.framework.context.TestPlatformContextBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.io.File; import java.io.IOException; import java.nio.file.Files; @@ -51,8 +53,6 @@ import java.util.Objects; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; -import org.checkerframework.checker.nullness.qual.NonNull; -import org.checkerframework.checker.nullness.qual.Nullable; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; @@ -393,7 +393,6 @@ private Supplier getMockSwirldStateSupplier(int scenario) { @NonNull private AddressBook getRandomAddressBook() { return new RandomAddressBookGenerator() - .setSequentialIds(false) .setSize(5) .setCustomWeightGenerator(i -> i.id()) .build(); diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/CriticalQuorumTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/CriticalQuorumTest.java index af2d68a9c29e..1ed96d4d75b6 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/CriticalQuorumTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/CriticalQuorumTest.java @@ -35,6 +35,7 @@ import com.swirlds.platform.internal.EventImpl; import com.swirlds.test.framework.TestComponentTags; import com.swirlds.test.framework.TestTypeTags; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; import java.util.ArrayList; import java.util.Collection; @@ -44,7 +45,6 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Stream; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Tag; import org.junit.jupiter.params.ParameterizedTest; @@ -274,7 +274,6 @@ private static Collection oneThirdNodesZeroWeight() { final AtomicInteger index = new AtomicInteger(0); final AddressBook addressBook = new RandomAddressBookGenerator() .setSize(numNodes) - .setSequentialIds(false) .setCustomWeightGenerator(id -> weights.get(index.getAndIncrement())) .build(); final String name = numNodes + " nodes, one third of nodes are zero-weight, remaining have random weight " @@ -297,7 +296,6 @@ private static Collection threeNodesWithStrongMinority() { final AtomicInteger index = new AtomicInteger(0); final AddressBook addressBook = new RandomAddressBookGenerator() .setSize(numNodes) - .setSequentialIds(false) .setCustomWeightGenerator(id -> weights.get(index.getAndIncrement())) .build(); final String name = @@ -320,7 +318,6 @@ private static Collection singleNodeWithStrongMinority() { final AtomicInteger index = new AtomicInteger(0); final AddressBook addressBook = new RandomAddressBookGenerator() .setSize(numNodes) - .setSequentialIds(false) .setCustomWeightGenerator(id -> weights.get(index.getAndIncrement())) .build(); final String name = numNodes + " nodes, one node has strong minority, remaining weight evenly distributed"; @@ -342,7 +339,6 @@ private static Collection unbalancedWeightWithOneZeroWeight() { final AtomicInteger index = new AtomicInteger(0); final AddressBook addressBook = new RandomAddressBookGenerator() .setSize(numNodes) - .setSequentialIds(false) .setCustomWeightGenerator(id -> weights.get(index.getAndIncrement())) .build(); final String name = numNodes + " node" + (numNodes == 1 ? "" : "s") + " unbalanced"; @@ -361,7 +357,6 @@ private static Collection balancedWeightArgs() { for (int numNodes = 1; numNodes <= 9; numNodes++) { final AddressBook addressBook = new RandomAddressBookGenerator() .setSize(numNodes) - .setSequentialIds(false) .setCustomWeightGenerator(id -> 1L) .build(); diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/CryptoArgsProvider.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/CryptoArgsProvider.java index 284fab76d284..a58000f6e6ff 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/CryptoArgsProvider.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/CryptoArgsProvider.java @@ -63,7 +63,6 @@ private static AddressBook createAddressBook() { final AddressBook addresses = new RandomAddressBookGenerator() .setSize(NUMBER_OF_ADDRESSES) .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); for (int i = 0; i < addresses.getSize(); i++) { diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/DummyHashgraph.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/DummyHashgraph.java index 7e6b311885c1..e7e820d24b50 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/DummyHashgraph.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/DummyHashgraph.java @@ -19,9 +19,9 @@ import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.test.RandomAddressBookGenerator; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.HashMap; import java.util.Random; -import org.checkerframework.checker.nullness.qual.NonNull; public class DummyHashgraph { diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/KeysAndCertsTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/KeysAndCertsTest.java index 7c0b97313858..0fe3bd923a2f 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/KeysAndCertsTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/KeysAndCertsTest.java @@ -27,11 +27,11 @@ import com.swirlds.platform.crypto.CryptoStatic; import com.swirlds.platform.crypto.KeysAndCerts; import com.swirlds.platform.crypto.PlatformSigner; +import edu.umd.cs.findbugs.annotations.NonNull; import java.security.PublicKey; import java.util.Map; import java.util.Objects; import java.util.Random; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SavedStateMetadataTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SavedStateMetadataTests.java index 9202d3b725bc..19bfe5a9d4ae 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SavedStateMetadataTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SavedStateMetadataTests.java @@ -36,6 +36,7 @@ import com.swirlds.platform.state.signed.SavedStateMetadataField; import com.swirlds.platform.state.signed.SigSet; import com.swirlds.platform.state.signed.SignedState; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -47,7 +48,6 @@ import java.util.Random; import java.util.Set; import java.util.stream.Collectors; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java deleted file mode 100644 index db97cc2c36b3..000000000000 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java +++ /dev/null @@ -1,290 +0,0 @@ -/* - * Copyright (C) 2022-2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.swirlds.platform; - -import static com.swirlds.platform.SettingConstants.DEADLOCK_CHECK_PERIOD_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.LOAD_KEYS_FROM_PFX_FILES_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.SHOW_INTERNAL_STATS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.VERBOSE_STATISTICS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.VERIFY_EVENT_SIGS_DEFAULT_VALUE; - -import com.swirlds.common.config.ConsensusConfig; -import com.swirlds.common.config.StateConfig; -import com.swirlds.common.config.sources.LegacyFileConfigSource; -import com.swirlds.common.crypto.config.CryptoConfig; -import com.swirlds.common.io.config.TemporaryFileConfig; -import com.swirlds.config.api.Configuration; -import com.swirlds.platform.config.AddressBookConfig; -import com.swirlds.test.framework.TestTypeTags; -import com.swirlds.test.framework.config.TestConfigBuilder; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.nio.file.Path; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Test; - -class SettingsTest { - - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that loading settings from an not existing files does not throw an exception") - public void checkNotExistingFile() { - // given - final Settings settings = Settings.getInstance(); - final File notExistingFile = - new File(new StringBuffer(SettingsTest.class.getPackageName().replace('.', '/')) - .append("/not-existing.txt") - .toString()); - - // then - Assertions.assertDoesNotThrow(() -> settings.loadSettings(notExistingFile)); - } - - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that loading settings from an empty files does not throw an exception") - public void checkEmptyFile() { - // given - final Settings settings = Settings.getInstance(); - final File emptyFile = - new File(SettingsTest.class.getResource("settings1.txt").getFile()); - - // then - Assertions.assertTrue(emptyFile.exists()); - Assertions.assertDoesNotThrow(() -> settings.loadSettings(emptyFile)); - } - - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that loading settings with migrated settings does not throw an exception") - public void checkOnlyConfigSettingsFile() { - // given - final Settings settings = Settings.getInstance(); - final File emptyFile = - new File(SettingsTest.class.getResource("settings13.txt").getFile()); - - // then - Assertions.assertTrue(emptyFile.exists()); - Assertions.assertDoesNotThrow(() -> settings.loadSettings(emptyFile)); - } - - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that null value for file not allowed") - public void checkNullFile() { - // given - final Settings settings = Settings.getInstance(); - final File nullFile = null; - - // when - Assertions.assertThrows(IllegalArgumentException.class, () -> settings.loadSettings(nullFile)); - } - - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that null value for path not allowed") - public void checkNullPath() { - // given - final Settings settings = Settings.getInstance(); - final Path nullPath = null; - - // when - Assertions.assertThrows(IllegalArgumentException.class, () -> settings.loadSettings(nullPath)); - } - - /** - * Currently disabled until the Settings class gets rewritten to not use a singleton design pattern. There are tests - * that are run that modify these default values before this test is run, therefore resulting in this test failing. - */ - @Test - @Disabled - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that default settings are retrieved correctly") - public void checkGetDefaultSettings() { - // given - final Settings settings = Settings.getInstance(); - final Configuration configuration = new TestConfigBuilder().getOrCreateConfig(); - - // then - Assertions.assertEquals(VERIFY_EVENT_SIGS_DEFAULT_VALUE, settings.isVerifyEventSigs()); - Assertions.assertEquals(SHOW_INTERNAL_STATS_DEFAULT_VALUE, settings.isShowInternalStats()); - Assertions.assertEquals(VERBOSE_STATISTICS_DEFAULT_VALUE, settings.isVerboseStatistics()); - Assertions.assertEquals( - Integer.parseInt(ConsensusConfig.COIN_FREQ_DEFAULT_VALUE), - configuration.getConfigData(ConsensusConfig.class).coinFreq()); - Assertions.assertEquals(DEADLOCK_CHECK_PERIOD_DEFAULT_VALUE, settings.getDeadlockCheckPeriod()); - Assertions.assertEquals(LOAD_KEYS_FROM_PFX_FILES_DEFAULT_VALUE, settings.isLoadKeysFromPfxFiles()); - } - - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that loaded settings are retrieved correctly") - public void checkGetLoadedSettings() throws IOException { - // given - final Settings settings = Settings.getInstance(); - final File settingsFile = - new File(SettingsTest.class.getResource("settings4.txt").getFile()); - Assertions.assertTrue(settingsFile.exists()); - final Configuration configuration = new TestConfigBuilder() - .withSource(new LegacyFileConfigSource(settingsFile.toPath())) - .getOrCreateConfig(); - - // when - settings.loadSettings(settingsFile); - - // then - // These values should change - Assertions.assertFalse(settings.isVerifyEventSigs()); - Assertions.assertTrue(settings.isShowInternalStats()); - Assertions.assertTrue(settings.isVerboseStatistics()); - Assertions.assertEquals(2000, settings.getDeadlockCheckPeriod()); - Assertions.assertFalse(settings.isLoadKeysFromPfxFiles()); - } - - /** - * Currently disabled until the Settings class gets rewritten to not use a singleton design pattern. There are tests - * that are run that modify these default values before this test is run, therefore resulting in this test failing. - */ - @Test - @Disabled - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that default crypto sub-settings are retrieved correctly") - public void checkGetDefaultCryptoSubSettings() { - // given - final CryptoConfig cryptoConfig = - new TestConfigBuilder().getOrCreateConfig().getConfigData(CryptoConfig.class); - - // then - Assertions.assertEquals(0.5, cryptoConfig.cpuVerifierThreadRatio()); - Assertions.assertEquals(0.5, cryptoConfig.cpuDigestThreadRatio()); - Assertions.assertEquals(100, cryptoConfig.cpuVerifierQueueSize()); - Assertions.assertEquals(100, cryptoConfig.cpuDigestQueueSize()); - Assertions.assertTrue(cryptoConfig.forceCpu()); - } - - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that loaded crypto sub-settings are retrieved correctly") - public void checkGetLoadedCryptoSubSettings() throws IOException { - // given - final CryptoConfig cryptoConfig = new TestConfigBuilder() - .withSource(new LegacyFileConfigSource( - SettingsTest.class.getResource("settings6.txt").getFile())) - .getOrCreateConfig() - .getConfigData(CryptoConfig.class); - - // then - Assertions.assertEquals(0.75, cryptoConfig.cpuVerifierThreadRatio()); - Assertions.assertEquals(0.25, cryptoConfig.cpuDigestThreadRatio()); - Assertions.assertEquals(150, cryptoConfig.cpuVerifierQueueSize()); - Assertions.assertEquals(150, cryptoConfig.cpuDigestQueueSize()); - Assertions.assertFalse(cryptoConfig.forceCpu()); - } - - /** - * Currently disabled until the Settings class gets rewritten to not use a singleton design pattern. There are tests - * that are run that modify these default values before this test is run, therefore resulting in this test failing. - */ - @Test - @Disabled - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that default address book sub-settings are retrieved correctly") - public void checkGetDefaultAddressBookSubSettings() { - // given - final Configuration configuration = new TestConfigBuilder().getOrCreateConfig(); - - // when - final AddressBookConfig addressBookConfig = configuration.getConfigData(AddressBookConfig.class); - - // then - Assertions.assertTrue(addressBookConfig.updateAddressBookOnlyAtUpgrade()); - } - - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that loaded address book sub-settings are retrieved correctly") - public void checkGetLoadedAddressBookSubSettings() throws IOException { - // given - final Configuration configuration = new TestConfigBuilder() - .withSource(new LegacyFileConfigSource( - SettingsTest.class.getResource("settings10.txt").getFile())) - .getOrCreateConfig(); - - // when - final AddressBookConfig addressBookConfig = configuration.getConfigData(AddressBookConfig.class); - - // then - Assertions.assertFalse(addressBookConfig.updateAddressBookOnlyAtUpgrade()); - } - - /** - * Currently disabled until the Settings class gets rewritten to not use a singleton design pattern. There are tests - * that are run that modify these default values before this test is run, therefore resulting in this test failing. - */ - @Test - @Disabled - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that default temporary file sub-settings are retrieved correctly") - public void checkGetDefaultTemporaryFileSubSettings() { - // given - final Configuration configuration = new TestConfigBuilder().getOrCreateConfig(); - - // when - final TemporaryFileConfig temporaryFileConfig = configuration.getConfigData(TemporaryFileConfig.class); - - // then - Assertions.assertEquals( - "data/saved/swirlds-tmp", - temporaryFileConfig.getTemporaryFilePath(configuration.getConfigData(StateConfig.class))); - } - - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @DisplayName("Checks that loaded temporary file sub-settings are retrieved correctly") - public void checkGetLoadedTemporaryFileSubSettings() throws IOException { - // given - final Configuration configuration = new TestConfigBuilder() - .withSource(new LegacyFileConfigSource( - SettingsTest.class.getResource("settings12.txt").getFile())) - .getOrCreateConfig(); - - // when - final TemporaryFileConfig temporaryFileConfig = configuration.getConfigData(TemporaryFileConfig.class); - - // then - Assertions.assertEquals( - Path.of("data", "saved", "badSwirldsTmp").toString(), - temporaryFileConfig.getTemporaryFilePath(configuration.getConfigData(StateConfig.class))); - } - - private String readValueFromFile(Path settingsPath, String propertyName) throws IOException { - try (BufferedReader br = new BufferedReader(new FileReader(settingsPath.toFile()))) { - return br.lines() - .filter(line -> line.endsWith(propertyName)) - .findAny() - .map(line -> line.substring(0, line.length() - ("= " + propertyName).length())) - .map(line -> line.trim()) - .orElseThrow(() -> new IllegalStateException( - "Property '" + propertyName + "' not found in saved settings file")); - } - } -} diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/TestUtils.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/TestUtils.java index 20b51003162e..b4d8514d613c 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/TestUtils.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/TestUtils.java @@ -23,11 +23,11 @@ import com.swirlds.common.system.events.BaseEventUnhashedData; import com.swirlds.common.system.transaction.internal.SwirldTransaction; import com.swirlds.platform.internal.EventImpl; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.time.Instant; import java.util.Arrays; import java.util.Objects; -import org.checkerframework.checker.nullness.qual.NonNull; -import org.checkerframework.checker.nullness.qual.Nullable; public class TestUtils { diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/components/state/StateManagementComponentTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/components/state/StateManagementComponentTests.java index 603158fc2fbe..f429e4419922 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/components/state/StateManagementComponentTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/components/state/StateManagementComponentTests.java @@ -49,6 +49,8 @@ import com.swirlds.platform.state.signed.SourceOfSignedState; import com.swirlds.test.framework.config.TestConfigBuilder; import com.swirlds.test.framework.context.TestPlatformContextBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.file.Path; import java.time.Duration; import java.util.HashMap; @@ -58,8 +60,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.IntStream; -import org.checkerframework.checker.nullness.qual.NonNull; -import org.checkerframework.checker.nullness.qual.Nullable; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; @@ -113,7 +113,6 @@ void newStateFromTransactionsSubmitsSystemTransaction() { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(NUM_NODES) .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); final DefaultStateManagementComponent component = newStateManagementComponent(addressBook); @@ -161,7 +160,6 @@ void signedStateToLoadIsLatestComplete() { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(NUM_NODES) .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); final DefaultStateManagementComponent component = newStateManagementComponent(addressBook); @@ -256,7 +254,6 @@ void stateSignaturesAppliedAndTracked() { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(NUM_NODES) .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); final DefaultStateManagementComponent component = newStateManagementComponent(addressBook); @@ -303,7 +300,6 @@ void signedStateFromTransactionsCodePath() { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(NUM_NODES) .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); final DefaultStateManagementComponent component = newStateManagementComponent(addressBook); @@ -382,7 +378,6 @@ void testIssConsumer() { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(NUM_NODES) .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); final DefaultStateManagementComponent component = newStateManagementComponent(addressBook); @@ -402,7 +397,6 @@ void testReconnectStateSaved() throws InterruptedException { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(NUM_NODES) .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); final DefaultStateManagementComponent component = newStateManagementComponent( addressBook, defaultConfigBuilder().withValue("state.saveReconnectStateToDisk", true)); diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/components/state/StateToDiskAttempt.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/components/state/StateToDiskAttempt.java index ea5c27136017..197f5852f481 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/components/state/StateToDiskAttempt.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/components/state/StateToDiskAttempt.java @@ -17,8 +17,8 @@ package com.swirlds.platform.components.state; import com.swirlds.platform.state.signed.SignedState; +import edu.umd.cs.findbugs.annotations.NonNull; import java.nio.file.Path; -import org.checkerframework.checker.nullness.qual.NonNull; /** * A record of an attempt to write a signed state to disk. diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/components/state/TestStateToDiskAttemptConsumer.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/components/state/TestStateToDiskAttemptConsumer.java index 7c45c2b549c7..56c04306e41f 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/components/state/TestStateToDiskAttemptConsumer.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/components/state/TestStateToDiskAttemptConsumer.java @@ -18,11 +18,11 @@ import com.swirlds.platform.components.state.output.StateToDiskAttemptConsumer; import com.swirlds.platform.state.signed.SignedState; +import edu.umd.cs.findbugs.annotations.NonNull; import java.nio.file.Path; import java.util.Objects; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; -import org.checkerframework.checker.nullness.qual.NonNull; /** * A {@link StateToDiskAttemptConsumer} that stores the {@link StateToDiskAttempt}s in a {@link BlockingQueue} for testing purposes diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/eventhandling/ConsensusRoundHandlerTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/eventhandling/ConsensusRoundHandlerTests.java index 72bf51f8cfe2..88d84297dea6 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/eventhandling/ConsensusRoundHandlerTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/eventhandling/ConsensusRoundHandlerTests.java @@ -42,6 +42,8 @@ import com.swirlds.platform.internal.ConsensusRound; import com.swirlds.platform.internal.EventImpl; import com.swirlds.platform.metrics.SwirldStateMetrics; +import com.swirlds.platform.state.PlatformData; +import com.swirlds.platform.state.PlatformState; import com.swirlds.platform.state.State; import com.swirlds.platform.state.SwirldStateManager; import com.swirlds.platform.state.SwirldStateManagerImpl; @@ -189,6 +191,15 @@ private void initConsensusHandler(final SwirldState swirldState) { final State state = new State(); state.setSwirldState(swirldState); + final PlatformState platformState = mock(PlatformState.class); + when(platformState.getClassId()).thenReturn(PlatformState.CLASS_ID); + when(platformState.copy()).thenReturn(platformState); + + state.setPlatformState(platformState); + + final PlatformData platformData = mock(PlatformData.class); + when(platformState.getPlatformData()).thenReturn(platformData); + final AddressBook addressBook = new RandomAddressBookGenerator().build(); final Configuration configuration = new TestConfigBuilder() @@ -208,7 +219,8 @@ private void initConsensusHandler(final SwirldState swirldState) { mock(SwirldStateMetrics.class), transactionConfig, () -> false, - state); + state, + new BasicSoftwareVersion(1)); consensusRoundHandler = new ConsensusRoundHandler( platformContext, diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/DefaultSignedStateValidatorTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/DefaultSignedStateValidatorTests.java index ca974bfc573b..a5fc2237a64d 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/DefaultSignedStateValidatorTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/DefaultSignedStateValidatorTests.java @@ -236,7 +236,6 @@ void testSignedStateValidationRandom(final String desc, final List nodes, addressBook = new RandomAddressBookGenerator() .setNodeIds(nodeWeights.keySet()) .setCustomWeightGenerator(nodeWeights::get) - .setSequentialIds(false) .build(); validator = new DefaultSignedStateValidator(); diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/ReconnectProtocolTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/ReconnectProtocolTests.java index 70d7453d39a3..2aa05a183db3 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/ReconnectProtocolTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/ReconnectProtocolTests.java @@ -85,10 +85,8 @@ private static Stream acceptParams() { for (final boolean teacherIsThrottled : List.of(true, false)) { for (final boolean selfIsBehind : List.of(true, false)) { for (final boolean teacherHasValidState : List.of(true, false)) { - for (final boolean stateIsInitialized : List.of(true, false)) { - arguments.add(Arguments.of(new AcceptParams( - teacherIsThrottled, selfIsBehind, teacherHasValidState, stateIsInitialized))); - } + arguments.add( + Arguments.of(new AcceptParams(teacherIsThrottled, selfIsBehind, teacherHasValidState))); } } } @@ -96,22 +94,17 @@ private static Stream acceptParams() { return arguments.stream(); } - private record AcceptParams( - boolean teacherIsThrottled, - boolean selfIsBehind, - boolean teacherHasValidState, - boolean stateIsInitialized) { + private record AcceptParams(boolean teacherIsThrottled, boolean selfIsBehind, boolean teacherHasValidState) { public boolean shouldAccept() { - return !teacherIsThrottled && !selfIsBehind && teacherHasValidState && stateIsInitialized; + return !teacherIsThrottled && !selfIsBehind && teacherHasValidState; } @Override public String toString() { return (teacherIsThrottled ? "throttled teacher" : "un-throttled teacher") + ", " + (selfIsBehind ? "teacher is behind" : "teacher not behind") - + ", " + (teacherHasValidState ? "teacher has valid state" : "teacher has no valid state") - + ", " + (stateIsInitialized ? "state is initialized" : "state is not initialized"); + + ", " + (teacherHasValidState ? "teacher has valid state" : "teacher has no valid state"); } } @@ -160,9 +153,6 @@ void testShouldAccept(final AcceptParams params) { if (params.teacherHasValidState) { signedState = spy(new RandomSignedStateGenerator().build()); when(signedState.isComplete()).thenReturn(true); - if (params.stateIsInitialized) { - signedState.getState().markAsInitialized(); - } } else { signedState = null; } @@ -252,7 +242,6 @@ void testTeacherThrottleReleased() { when(signedState.isComplete()).thenReturn(true); final State state = mock(State.class); when(signedState.getState()).thenReturn(state); - when(state.isInitialized()).thenReturn(true); final ReservedSignedState reservedSignedState = signedState.reserve("test"); @@ -331,7 +320,6 @@ void abortedTeacher() { final SignedState signedState = spy(new RandomSignedStateGenerator().build()); when(signedState.isComplete()).thenReturn(true); - signedState.getState().markAsInitialized(); final ReservedSignedState reservedSignedState = signedState.reserve("test"); diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/ReconnectTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/ReconnectTest.java index 586ec6bce5c6..76bd0be8af6d 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/ReconnectTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/ReconnectTest.java @@ -104,7 +104,6 @@ private void executeReconnect(final ReconnectMetrics reconnectMetrics) throws In .setAverageWeight(weightPerNode) .setWeightDistributionStrategy(RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED) .setHashStrategy(RandomAddressBookGenerator.HashStrategy.REAL_HASH) - .setSequentialIds(false) .build(); try (final PairedStreams pairedStreams = new PairedStreams()) { diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectTests.java index e05a93a3dc30..7ec38f07b50a 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectTests.java @@ -307,7 +307,6 @@ private AddressBook newAddressBook(final Random random, final int numNodes) { .setAverageWeight(100L) .setWeightDistributionStrategy(RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED) .setHashStrategy(RandomAddressBookGenerator.HashStrategy.REAL_HASH) - .setSequentialIds(false) .build(); } diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencySignedStateValidatorTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencySignedStateValidatorTests.java index 9106fba0a3d1..5ce51c480db3 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencySignedStateValidatorTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencySignedStateValidatorTests.java @@ -50,7 +50,6 @@ void setup() { .setSize(NUM_NODES) .setAverageWeight(WEIGHT_PER_NODE) .setWeightDistributionStrategy(RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); } diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/EventStreamMultiFileIteratorTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/EventStreamMultiFileIteratorTest.java index 39b29b49c943..5ae1daca93c0 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/EventStreamMultiFileIteratorTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/EventStreamMultiFileIteratorTest.java @@ -38,6 +38,7 @@ import com.swirlds.platform.recovery.internal.EventStreamMultiFileIterator; import com.swirlds.platform.recovery.internal.EventStreamRoundLowerBound; import com.swirlds.platform.recovery.internal.EventStreamTimestampLowerBound; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -50,7 +51,6 @@ import java.util.NoSuchElementException; import java.util.Objects; import java.util.Random; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/EventStreamPathIteratorTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/EventStreamPathIteratorTest.java index f528dad8a593..397fb31eb0f5 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/EventStreamPathIteratorTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/EventStreamPathIteratorTest.java @@ -33,6 +33,7 @@ import com.swirlds.platform.recovery.internal.EventStreamPathIterator; import com.swirlds.platform.recovery.internal.EventStreamRoundLowerBound; import com.swirlds.platform.recovery.internal.EventStreamTimestampLowerBound; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.nio.file.Path; import java.security.NoSuchAlgorithmException; @@ -45,7 +46,6 @@ import java.util.NoSuchElementException; import java.util.Objects; import java.util.Random; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/RandomSignedStateGenerator.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/RandomSignedStateGenerator.java index 77dbc3510c26..776667912f5d 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/RandomSignedStateGenerator.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/RandomSignedStateGenerator.java @@ -36,6 +36,7 @@ import com.swirlds.platform.internal.EventImpl; import com.swirlds.platform.state.signed.SignedState; import com.swirlds.test.framework.context.TestPlatformContextBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; import java.util.ArrayList; import java.util.HashMap; @@ -44,7 +45,6 @@ import java.util.Map; import java.util.Objects; import java.util.Random; -import org.checkerframework.checker.nullness.qual.NonNull; /** * A utility for generating random signed states. @@ -101,7 +101,6 @@ public SignedState build() { addressBookInstance = new RandomAddressBookGenerator(random) .setWeightDistributionStrategy(RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED) .setHashStrategy(RandomAddressBookGenerator.HashStrategy.REAL_HASH) - .setSequentialIds(false) .build(); } else { addressBookInstance = addressBook; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/StateSigningTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/StateSigningTests.java index 529758f085ef..471f18bc74b3 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/StateSigningTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/StateSigningTests.java @@ -40,6 +40,7 @@ import com.swirlds.platform.state.signed.SigSet; import com.swirlds.platform.state.signed.SignedState; import com.swirlds.platform.state.signed.SignedStateInvalidException; +import edu.umd.cs.findbugs.annotations.NonNull; import java.security.PublicKey; import java.util.ArrayList; import java.util.Collections; @@ -48,7 +49,6 @@ import java.util.List; import java.util.Random; import java.util.Set; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.params.ParameterizedTest; @@ -70,7 +70,6 @@ void addValidSignaturesTest(final boolean evenWeighting) { evenWeighting ? RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED : RandomAddressBookGenerator.WeightDistributionStrategy.GAUSSIAN) - .setSequentialIds(false) .setSize(nodeCount) .build(); @@ -171,7 +170,6 @@ void addInvalidSignaturesTest(final boolean evenWeighting) { evenWeighting ? RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED : RandomAddressBookGenerator.WeightDistributionStrategy.GAUSSIAN) - .setSequentialIds(false) .setSize(nodeCount) .build(); @@ -265,7 +263,6 @@ void signatureBecomesInvalidTest(final boolean evenWeighting) { evenWeighting ? RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED : RandomAddressBookGenerator.WeightDistributionStrategy.GAUSSIAN) - .setSequentialIds(false) .setSize(nodeCount) .build(); @@ -347,7 +344,6 @@ void allSignaturesBecomeInvalidTest(final boolean evenWeighting) { evenWeighting ? RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED : RandomAddressBookGenerator.WeightDistributionStrategy.GAUSSIAN) - .setSequentialIds(false) .setSize(nodeCount) .build(); @@ -399,7 +395,6 @@ void signaturesInvalidWithDifferentAddressBookTest(final boolean evenWeighting) evenWeighting ? RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED : RandomAddressBookGenerator.WeightDistributionStrategy.GAUSSIAN) - .setSequentialIds(false) .setSize(nodeCount) .build(); @@ -458,7 +453,6 @@ void signaturesInvalidDueToZeroWeightTest(final boolean evenWeighting) { evenWeighting ? RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED : RandomAddressBookGenerator.WeightDistributionStrategy.GAUSSIAN) - .setSequentialIds(false) .setSize(nodeCount) .build(); diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/SwirldStateManagerImplTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/SwirldStateManagerImplTests.java index d85a7b9d88b3..94307ef3d0ba 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/SwirldStateManagerImplTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/SwirldStateManagerImplTests.java @@ -22,6 +22,7 @@ import com.swirlds.common.config.TransactionConfig; import com.swirlds.common.context.PlatformContext; +import com.swirlds.common.system.BasicSoftwareVersion; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.test.RandomAddressBookGenerator; @@ -61,7 +62,8 @@ void setup() { mock(SwirldStateMetrics.class), transactionConfig, () -> false, - initialState); + initialState, + new BasicSoftwareVersion(1)); } @Test @@ -115,6 +117,16 @@ void loadFromSignedStateRefCount() { private static State newState() { final State state = new State(); state.setSwirldState(new DummySwirldState()); + + final PlatformState platformState = mock(PlatformState.class); + when(platformState.getClassId()).thenReturn(PlatformState.CLASS_ID); + when(platformState.copy()).thenReturn(platformState); + + final PlatformData platformData = mock(PlatformData.class); + when(platformState.getPlatformData()).thenReturn(platformData); + + state.setPlatformState(platformState); + assertEquals(0, state.getReservationCount(), "A brand new state should have no references."); return state; } diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/SwirldStateManagerUtilsTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/SwirldStateManagerUtilsTests.java index 6e66a20a597f..f55af93f55f4 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/SwirldStateManagerUtilsTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/SwirldStateManagerUtilsTests.java @@ -19,7 +19,9 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import com.swirlds.common.system.BasicSoftwareVersion; import com.swirlds.common.system.SwirldState; import com.swirlds.common.test.state.DummySwirldState; import com.swirlds.platform.metrics.SwirldStateMetrics; @@ -28,24 +30,32 @@ public class SwirldStateManagerUtilsTests { - private final State state = new State(); - private final SwirldStateMetrics stats = mock(SwirldStateMetrics.class); + @BeforeEach + void setup() {} - private final PlatformState platformState = new PlatformState(); - private final SwirldState swirldState = new DummySwirldState(); + @Test + void testFastCopyIsMutable() { - @BeforeEach - void setup() { - final DualStateImpl dualState = new DualStateImpl(); + final State state = new State(); + + final PlatformState platformState = mock(PlatformState.class); + when(platformState.getClassId()).thenReturn(PlatformState.CLASS_ID); + when(platformState.copy()).thenReturn(platformState); + state.setPlatformState(platformState); + + final PlatformData platformData = mock(PlatformData.class); + when(platformState.getPlatformData()).thenReturn(platformData); + + final SwirldState swirldState = new DummySwirldState(); state.setSwirldState(swirldState); + + final DualStateImpl dualState = new DualStateImpl(); state.setDualState(dualState); - state.setPlatformState(platformState); - } - @Test - void testFastCopyIsMutable() { state.reserve(); - final State result = SwirldStateManagerUtils.fastCopy(state, stats); + final SwirldStateMetrics stats = mock(SwirldStateMetrics.class); + final State result = SwirldStateManagerUtils.fastCopy(state, stats, new BasicSoftwareVersion(1)); + assertFalse(result.isImmutable(), "The copy state should be mutable."); assertEquals( 1, diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/AbstractSignedStateManagerTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/AbstractSignedStateManagerTest.java index 4ae75abcbdc2..715e3a4c70ae 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/AbstractSignedStateManagerTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/AbstractSignedStateManagerTest.java @@ -30,6 +30,7 @@ import com.swirlds.platform.state.signed.SignedState; import com.swirlds.platform.state.signed.SignedStateManager; import com.swirlds.test.framework.config.TestConfigBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Duration; import java.util.Map; import java.util.Objects; @@ -39,7 +40,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Predicate; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.AfterEach; /** diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/AddIncompleteStateTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/AddIncompleteStateTest.java index 87f44ae705d8..e7e30cce45fe 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/AddIncompleteStateTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/AddIncompleteStateTest.java @@ -53,7 +53,6 @@ class AddIncompleteStateTest extends AbstractSignedStateManagerTest { private final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(4) .setWeightDistributionStrategy(RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); private final long firstRound = 50; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/EarlySignaturesTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/EarlySignaturesTest.java index 27a142ef91d1..d7ccf1c38154 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/EarlySignaturesTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/EarlySignaturesTest.java @@ -49,7 +49,6 @@ public class EarlySignaturesTest extends AbstractSignedStateManagerTest { private final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(4) .setWeightDistributionStrategy(RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); /** diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/EmergencyStateFinderTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/EmergencyStateFinderTests.java index 9dcb02f7f059..f03373b545e0 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/EmergencyStateFinderTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/EmergencyStateFinderTests.java @@ -37,7 +37,6 @@ public class EmergencyStateFinderTests extends AbstractSignedStateManagerTest { private final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(4) .setWeightDistributionStrategy(RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); @DisplayName("Emergency State Finder Test") diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/OldCompleteStateEventuallyReleasedTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/OldCompleteStateEventuallyReleasedTest.java index d136e184e078..0671a68dd21d 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/OldCompleteStateEventuallyReleasedTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/OldCompleteStateEventuallyReleasedTest.java @@ -46,10 +46,8 @@ class OldCompleteStateEventuallyReleasedTest extends AbstractSignedStateManagerT // the class file with other tests. // DO NOT ADD ADDITIONAL UNIT TESTS TO THIS CLASS! - private final AddressBook addressBook = new RandomAddressBookGenerator(random) - .setSize(4) - .setSequentialIds(false) - .build(); + private final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(4).build(); /** * Called on each state as it gets too old without collecting enough signatures. diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/RegisterStatesWithoutSignaturesTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/RegisterStatesWithoutSignaturesTest.java index 060d25ab1cff..941a6213d622 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/RegisterStatesWithoutSignaturesTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/RegisterStatesWithoutSignaturesTest.java @@ -42,10 +42,8 @@ public class RegisterStatesWithoutSignaturesTest extends AbstractSignedStateMana // the class file with other tests. // DO NOT ADD ADDITIONAL UNIT TESTS TO THIS CLASS! - private final AddressBook addressBook = new RandomAddressBookGenerator(random) - .setSize(4) - .setSequentialIds(false) - .build(); + private final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(4).build(); /** * Called on each state as it gets too old without collecting enough signatures. diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/SequentialSignaturesRestartTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/SequentialSignaturesRestartTest.java index efc0f4aa801a..107c430bd774 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/SequentialSignaturesRestartTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/SequentialSignaturesRestartTest.java @@ -52,7 +52,6 @@ public class SequentialSignaturesRestartTest extends AbstractSignedStateManagerT private final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(4) .setWeightDistributionStrategy(RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); private final long firstRound = 50; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/SequentialSignaturesTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/SequentialSignaturesTest.java index 771150b9d0a2..7b11b8d2cf57 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/SequentialSignaturesTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/manager/SequentialSignaturesTest.java @@ -50,7 +50,6 @@ public class SequentialSignaturesTest extends AbstractSignedStateManagerTest { private final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(4) .setWeightDistributionStrategy(RandomAddressBookGenerator.WeightDistributionStrategy.BALANCED) - .setSequentialIds(false) .build(); /** diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/uptime/UptimeTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/uptime/UptimeTests.java index 8c3b9f393337..1f5b7478c380 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/uptime/UptimeTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/uptime/UptimeTests.java @@ -38,6 +38,7 @@ import com.swirlds.platform.internal.ConsensusRound; import com.swirlds.platform.internal.EventImpl; import com.swirlds.test.framework.context.TestPlatformContextBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -49,7 +50,6 @@ import java.util.Objects; import java.util.Random; import java.util.Set; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; @@ -107,10 +107,8 @@ void roundScanTest() { TestPlatformContextBuilder.create().build(); final FakeTime time = new FakeTime(); - final AddressBook addressBook = new RandomAddressBookGenerator(random) - .setSize(10) - .setSequentialIds(false) - .build(); + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(10).build(); final NodeId selfId = addressBook.getNodeId(0); final UptimeTracker uptimeTracker = new UptimeTracker(platformContext, addressBook, selfId, time); @@ -242,10 +240,8 @@ void roundScanChangingAddressBookTest() { TestPlatformContextBuilder.create().build(); final FakeTime time = new FakeTime(); - final AddressBook addressBook = new RandomAddressBookGenerator(random) - .setSize(10) - .setSequentialIds(false) - .build(); + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(10).build(); final NodeId selfId = addressBook.getNodeId(0); final UptimeTracker uptimeTracker = new UptimeTracker(platformContext, addressBook, selfId, time); @@ -589,10 +585,8 @@ void degradedTest() { TestPlatformContextBuilder.create().build(); final FakeTime time = new FakeTime(); - final AddressBook addressBook = new RandomAddressBookGenerator(random) - .setSize(3) - .setSequentialIds(false) - .build(); + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(3).build(); final NodeId selfId = addressBook.getNodeId(0); final UptimeTracker uptimeTracker = new UptimeTracker(platformContext, addressBook, selfId, time); diff --git a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/build.gradle.kts b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/build.gradle.kts index 111acc2c51e9..007c00270e01 100644 --- a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/build.gradle.kts +++ b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/build.gradle.kts @@ -15,36 +15,37 @@ */ plugins { - id("com.swirlds.platform.conventions") - id("com.swirlds.platform.library") - id("com.swirlds.platform.maven-publish") - id("org.gradle.java-test-fixtures") + id("com.swirlds.platform.conventions") + id("com.swirlds.platform.library") + id("com.swirlds.platform.maven-publish") + id("org.gradle.java-test-fixtures") } extraJavaModuleInfo { failOnMissingModuleInfo.set(false) } dependencies { - // Individual Dependencies - implementation(project(":swirlds-base")) - implementation(project(":swirlds-common")) - implementation(project(":swirlds-base")) - api(testLibs.junit.jupiter.api) - compileOnly(libs.spotbugs.annotations) + // Individual Dependencies + implementation(project(":swirlds-base")) + implementation(project(":swirlds-common")) + implementation(project(":swirlds-base")) + api(testLibs.junit.jupiter.api) + compileOnly(libs.spotbugs.annotations) - // Test Dependencies + // Test Dependencies + testCompileOnly(libs.spotbugs.annotations) - // These should not be implementation() based deps, but this requires refactoring to eliminate. - implementation(project(":swirlds-unit-tests:common:swirlds-test-framework")) + // These should not be implementation() based deps, but this requires refactoring to eliminate. + implementation(project(":swirlds-unit-tests:common:swirlds-test-framework")) - testImplementation(libs.bundles.logging.impl) - testImplementation(testLibs.bundles.junit) - testImplementation(testLibs.bundles.mocking) - testImplementation(testLibs.bundles.utils) - testImplementation(project(":swirlds-config-impl")) - testImplementation(testFixtures(project(":swirlds-common"))) + testImplementation(libs.bundles.logging.impl) + testImplementation(testLibs.bundles.junit) + testImplementation(testLibs.bundles.mocking) + testImplementation(testLibs.bundles.utils) + testImplementation(project(":swirlds-config-impl")) + testImplementation(testFixtures(project(":swirlds-common"))) - testImplementation(libs.prometheus.httpserver) { - exclude("io.prometheus", "simpleclient_tracer_otel") - exclude("io.prometheus", "simpleclient_tracer_otel_agent") - } + testImplementation(libs.prometheus.httpserver) { + exclude("io.prometheus", "simpleclient_tracer_otel") + exclude("io.prometheus", "simpleclient_tracer_otel_agent") + } } diff --git a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/main/java/com/swirlds/common/test/RandomAddressBookGenerator.java b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/main/java/com/swirlds/common/test/RandomAddressBookGenerator.java index 73606b494353..cf2311757066 100644 --- a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/main/java/com/swirlds/common/test/RandomAddressBookGenerator.java +++ b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/main/java/com/swirlds/common/test/RandomAddressBookGenerator.java @@ -69,12 +69,6 @@ public enum HashStrategy { */ private HashStrategy hashStrategy = HashStrategy.NO_HASH; - /** - * If true then IDs will be generated 0, 1, 2, 3, etc. with no gaps. If false then there may be some gaps - * and the first node ID may not be 0. - */ - private boolean sequentialIds = false; - /** * Describes different ways that the random address book has its weight distributed if the custom strategy * lambda is unset. @@ -223,13 +217,9 @@ public static Address addressWithRandomData( */ private NodeId getNextNodeId() { final NodeId nextId; - if (sequentialIds) { - nextId = previousNodeId == null ? NodeId.FIRST_NODE_ID : new NodeId(previousNodeId.id() + 1); - } else { - // randomly advance between 1 and 3 steps - final int offset = random.nextInt(3); - nextId = previousNodeId == null ? new NodeId(offset) : new NodeId(previousNodeId.id() + offset + 1L); - } + // randomly advance between 1 and 3 steps + final int offset = random.nextInt(3); + nextId = previousNodeId == null ? new NodeId(offset) : new NodeId(previousNodeId.id() + offset + 1L); previousNodeId = nextId; return nextId; } @@ -382,27 +372,6 @@ public RandomAddressBookGenerator setHashStrategy(final HashStrategy hashStrateg return this; } - /** - *

- * Specify if sequential IDs should be used. If true then IDs will start at 0 and will not have any gaps. - * If false then there may be some gaps between IDs. - *

- * - *

- * FUTURE WORK: eventually, most if not all tests should be resilient with respect to non-sequential node IDs. - * When it is necessary to support non-sequential node IDs in production then we may want to remove this setter - * and always generate non-sequentially. - *

- * - * @return this object - * @deprecated for removal in 0.40.0; there is no alternative, addresses must be able to be non-sequential - */ - @Deprecated(forRemoval = true, since = "0.40.0") - public RandomAddressBookGenerator setSequentialIds(final boolean sequentialIds) { - this.sequentialIds = sequentialIds; - return this; - } - /** * Set the average weight for an address. * diff --git a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/sequence/SequenceMapTests.java b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/sequence/SequenceMapTests.java index 13f5d80b0436..7a00982684a3 100644 --- a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/sequence/SequenceMapTests.java +++ b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/sequence/SequenceMapTests.java @@ -44,7 +44,6 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Stream; import org.junit.jupiter.api.DisplayName; @@ -77,8 +76,13 @@ public String toString() { } } - private record MapBuilder( - String name, BiFunction> constructor) { + @FunctionalInterface + private interface MapConstructor { + SequenceMap newMap( + final long initialFirstSequenceNumber, final int sequenceNumberCapacity, final boolean allowExpansion); + } + + private record MapBuilder(String name, MapConstructor constructor) { @Override public String toString() { return name; @@ -89,10 +93,12 @@ static Stream testConfiguration() { return Stream.of( Arguments.of(new MapBuilder( "standard", - (min, capacity) -> new StandardSequenceMap<>(min, capacity, SequenceMapKey::sequence))), + (min, capacity, allowExpansion) -> + new StandardSequenceMap<>(min, capacity, allowExpansion, SequenceMapKey::sequence))), Arguments.of(new MapBuilder( "concurrent", - (min, capacity) -> new ConcurrentSequenceMap<>(min, capacity, SequenceMapKey::sequence)))); + (min, capacity, allowExpansion) -> + new ConcurrentSequenceMap<>(min, capacity, allowExpansion, SequenceMapKey::sequence)))); } private static boolean isKeyPresent(final SequenceMap map, final Long sequenceNumber) { @@ -104,17 +110,13 @@ private static boolean isKeyPresent(final SequenceMap m /** * Do validation on a map. * - * @param map - * the map being validated - * @param smallestKeyToCheck - * the smallest key to check - * @param keyToCheckUpperBound - * the upper bound (exclusive) of keys to check - * @param getSequenceNumber - * provides the expected sequence number for a key, or null if the key is not expected to be in the map - * @param getValue - * provides the expected value for a key (result is ignored if sequence number is reported as null or the - * sequence number falls outside the map's bounds) + * @param map the map being validated + * @param smallestKeyToCheck the smallest key to check + * @param keyToCheckUpperBound the upper bound (exclusive) of keys to check + * @param getSequenceNumber provides the expected sequence number for a key, or null if the key is not expected + * to be in the map + * @param getValue provides the expected value for a key (result is ignored if sequence number is + * reported as null or the sequence number falls outside the map's bounds) */ private void validateMapContents( final SequenceMap map, @@ -150,7 +152,7 @@ private void validateMapContents( // Note: the sequence number in the key is unused when we are just querying. So it's // ok to lie and provide a sequence number of 0 here, even though 0 may not be the // correct sequence number for the given key. - assertNull(map.get(new SequenceMapKey(key, 0)), "unexpected value"); + assertNull(map.get(new SequenceMapKey(key, 0)), "unexpected value for key " + key); assertFalse(map.containsKey(new SequenceMapKey(key, 0)), "should not contain key"); } } @@ -205,7 +207,7 @@ void simpleAccessTest(final MapBuilder mapBuilder) { // The number of things inserted into the map final int size = 100; - final SequenceMap map = mapBuilder.constructor.apply(0L, size); + final SequenceMap map = mapBuilder.constructor.newMap(0, size, false); // The number of keys for each sequence number final int keysPerSeq = 5; @@ -240,8 +242,7 @@ void positiveStartAccessTest(final MapBuilder mapBuilder) { // The number of keys for each sequence number final int keysPerSeq = 5; - final SequenceMap map = - mapBuilder.constructor.apply((long) (start / keysPerSeq), size); + final SequenceMap map = mapBuilder.constructor.newMap(start / keysPerSeq, size, false); for (int offset = 0; offset < size; offset++) { @@ -266,8 +267,6 @@ void positiveStartAccessTest(final MapBuilder mapBuilder) { key -> -key); } - // FUTURE WORK negative sequence number tests - @ParameterizedTest @MethodSource("testConfiguration") @DisplayName("Negative Start Access Test") @@ -279,8 +278,7 @@ void negativeStartAccessTest(final MapBuilder mapBuilder) { // The number of keys for each sequence number final int keysPerSeq = 5; - final SequenceMap map = - mapBuilder.constructor.apply((long) (start / keysPerSeq), size); + final SequenceMap map = mapBuilder.constructor.newMap(start / keysPerSeq, size, false); for (int offset = 0; offset < size; offset++) { @@ -305,8 +303,6 @@ void negativeStartAccessTest(final MapBuilder mapBuilder) { key -> -key); } - // FUTURE WORK start at high positive number - @ParameterizedTest @MethodSource("testConfiguration") @DisplayName("Single Shift Test") @@ -317,7 +313,7 @@ void singleShiftTest(final MapBuilder mapBuilder) { final int keysPerSeq = 5; final int capacity = size / keysPerSeq; - final SequenceMap map = mapBuilder.constructor.apply(0L, capacity); + final SequenceMap map = mapBuilder.constructor.newMap(0, capacity, false); assertEquals(0, map.getFirstSequenceNumberInWindow(), "unexpected lower bound"); for (int i = 0; i < size; i++) { @@ -358,7 +354,7 @@ void bigShiftTest(final MapBuilder mapBuilder) { final int keysPerSeq = 5; final int capacity = size / keysPerSeq; - final SequenceMap map = mapBuilder.constructor.apply(0L, capacity); + final SequenceMap map = mapBuilder.constructor.newMap(0, capacity, false); assertEquals(0, map.getFirstSequenceNumberInWindow(), "unexpected lower bound"); for (int i = 0; i < size; i++) { @@ -397,7 +393,7 @@ void purgeWithCallbackTest(final MapBuilder mapBuilder) { // The number of things inserted into the map final int size = 100; - final SequenceMap map = mapBuilder.constructor.apply(0L, size); + final SequenceMap map = mapBuilder.constructor.newMap(0, size, false); assertEquals(0, map.getFirstSequenceNumberInWindow(), "unexpected lower bound"); // The number of keys for each sequence number @@ -446,7 +442,7 @@ void upperLowerBoundTest(final MapBuilder mapBuilder) { // The number of keys for each sequence number final int keysPerSeq = 5; - final SequenceMap map = mapBuilder.constructor.apply(5L, 5); + final SequenceMap map = mapBuilder.constructor.newMap(5, 5, false); assertEquals(5, map.getFirstSequenceNumberInWindow(), "unexpected lower bound"); assertEquals(9, map.getLastSequenceNumberInWindow(), "unexpected upper bound"); @@ -474,7 +470,7 @@ void shiftingWindowTest(final MapBuilder mapBuilder) { int initialLowerBound = -10; int lowerBound = initialLowerBound; - final SequenceMap map = mapBuilder.constructor.apply((long) lowerBound, capacity); + final SequenceMap map = mapBuilder.constructor.newMap(lowerBound, capacity, false); for (int iteration = 0; iteration < 10; iteration++) { // shift the lower bound @@ -515,7 +511,7 @@ void shiftingWindowTest(final MapBuilder mapBuilder) { @MethodSource("testConfiguration") @DisplayName("One Directional Shift Test") void oneDirectionalShiftTest(final MapBuilder mapBuilder) { - final SequenceMap map = mapBuilder.constructor.apply(-10L, 10); + final SequenceMap map = mapBuilder.constructor.newMap(-10, 10, false); // Shifting in the positive direction should not cause problems map.shiftWindow(-5); @@ -547,7 +543,7 @@ void clearTest(final MapBuilder mapBuilder) { final int lowerBound = 50; final int capacity = 5; - final SequenceMap map = mapBuilder.constructor.apply((long) lowerBound, capacity); + final SequenceMap map = mapBuilder.constructor.newMap(lowerBound, capacity, false); assertEquals(lowerBound, map.getFirstSequenceNumberInWindow(), "unexpected lower bound"); @@ -617,7 +613,7 @@ void removeTest(final MapBuilder mapBuilder) { final int lowerBound = 0; final int capacity = 20; - final SequenceMap map = mapBuilder.constructor.apply((long) lowerBound, capacity); + final SequenceMap map = mapBuilder.constructor.newMap(lowerBound, capacity, false); // removing values from an empty map shouldn't cause problems assertNull(map.remove(new SequenceMapKey(-100, 0)), "value should not be in map"); @@ -675,7 +671,7 @@ void replacingPutTest(final MapBuilder mapBuilder) { final int lowerBound = 0; final int capacity = 20; - final SequenceMap map = mapBuilder.constructor.apply((long) lowerBound, capacity); + final SequenceMap map = mapBuilder.constructor.newMap(lowerBound, capacity, false); assertNull(map.put(new SequenceMapKey(10, 2), -10), "no value should currently be in map"); @@ -724,7 +720,7 @@ void computeIfAbsentTest(final MapBuilder mapBuilder) { // The highest permitted sequence number final int upperBound = 100; - final SequenceMap map = mapBuilder.constructor.apply((long) lowerBound, capacity); + final SequenceMap map = mapBuilder.constructor.newMap(lowerBound, capacity, false); // Value that is in a legal range and not present assertEquals(-10, map.computeIfAbsent(new SequenceMapKey(10, 2), key -> -key.key), "incorrect value returned"); @@ -802,7 +798,7 @@ void putIfAbsentTest(final MapBuilder mapBuilder) { // The highest permitted sequence number final int upperBound = 100; - final SequenceMap map = mapBuilder.constructor.apply((long) lowerBound, capacity); + final SequenceMap map = mapBuilder.constructor.newMap(lowerBound, capacity, false); // Value that is in a legal range and not present assertTrue(map.putIfAbsent(new SequenceMapKey(10, 2), -10), "value is not yet in the map"); @@ -864,7 +860,7 @@ void putIfAbsentTest(final MapBuilder mapBuilder) { @MethodSource("testConfiguration") @DisplayName("removeSequenceNumber() Test") void removeSequenceNumberTest(final MapBuilder mapBuilder) { - final SequenceMap map = mapBuilder.constructor.apply(0L, 100); + final SequenceMap map = mapBuilder.constructor.newMap(0, 100, false); // The number of things inserted into the map final int size = 100; @@ -982,7 +978,7 @@ void removeSequenceNumberTest(final MapBuilder mapBuilder) { @MethodSource("testConfiguration") @DisplayName("removeSequenceNumber() With Callback Test") void removeSequenceNumberWithCallbackTest(final MapBuilder mapBuilder) { - final SequenceMap map = mapBuilder.constructor.apply(0L, 100); + final SequenceMap map = mapBuilder.constructor.newMap(0, 100, false); // The number of things inserted into the map final int size = 100; @@ -1191,4 +1187,229 @@ void parallelSequenceMapTest() throws InterruptedException { assertFalse(error.get(), "error(s) encountered"); } + + @ParameterizedTest + @MethodSource("testConfiguration") + @DisplayName("Expand Start From Sequence 0 Test") + void expandStartFromSequence0Test(final MapBuilder mapBuilder) { + final Random random = getRandomPrintSeed(); + + // The number of things inserted into the map + final int size = 10_000; + + final SequenceMap map = mapBuilder.constructor.newMap(0, 1, true); + + // The number of keys for each sequence number + final int keysPerSeq = 5; + + for (int i = 0; i < size; i++) { + if (random.nextBoolean()) { + map.put(new SequenceMapKey(i, i / keysPerSeq), -i); + } else { + map.putIfAbsent(new SequenceMapKey(i, i / keysPerSeq), -i); + } + assertEquals(i + 1, map.getSize(), "unexpected size"); + } + + validateMapContents( + map, + -size, + 2 * size, + key -> { + if (key >= 0 && key < size) { + return (long) key / keysPerSeq; + } else { + // key is not present + return null; + } + }, + key -> -key); + } + + @ParameterizedTest + @MethodSource("testConfiguration") + @DisplayName("Expand Start From Negative Sequence Test") + void expandStartFromNegativeSequenceTest(final MapBuilder mapBuilder) { + final Random random = getRandomPrintSeed(); + + // The number of things inserted into the map + final int size = 10_000; + + final int initialSequenceNumber = -42; + + final SequenceMap map = mapBuilder.constructor.newMap(initialSequenceNumber, 1, true); + + // The number of keys for each sequence number + final int keysPerSeq = 5; + + for (int i = 0; i < size; i++) { + if (random.nextBoolean()) { + map.put(new SequenceMapKey(i, i / keysPerSeq + initialSequenceNumber), -i); + } else { + map.putIfAbsent(new SequenceMapKey(i, i / keysPerSeq + initialSequenceNumber), -i); + } + assertEquals(i + 1, map.getSize(), "unexpected size"); + } + + validateMapContents( + map, + -size, + 2 * size, + key -> { + if (key >= 0 && key < size) { + return (long) key / keysPerSeq + initialSequenceNumber; + } else { + // key is not present + return null; + } + }, + key -> -key); + } + + @ParameterizedTest + @MethodSource("testConfiguration") + @DisplayName("Expand Start From Positive Sequence Test") + void expandStartFromPositiveSequenceTest(final MapBuilder mapBuilder) { + final Random random = getRandomPrintSeed(); + + // The number of things inserted into the map + final int size = 10_000; + + final int initialSequenceNumber = 42; + + final SequenceMap map = mapBuilder.constructor.newMap(initialSequenceNumber, 1, true); + + // The number of keys for each sequence number + final int keysPerSeq = 5; + + for (int i = 0; i < size; i++) { + if (random.nextBoolean()) { + map.put(new SequenceMapKey(i, i / keysPerSeq + initialSequenceNumber), -i); + } else { + map.putIfAbsent(new SequenceMapKey(i, i / keysPerSeq + initialSequenceNumber), -i); + } + assertEquals(i + 1, map.getSize(), "unexpected size"); + } + + validateMapContents( + map, + -size, + 2 * size, + key -> { + if (key >= 0 && key < size) { + return (long) key / keysPerSeq + initialSequenceNumber; + } else { + // key is not present + return null; + } + }, + key -> -key); + } + + @ParameterizedTest + @MethodSource("testConfiguration") + @DisplayName("Expand And Shift Test") + void expandAndShiftTest(final MapBuilder mapBuilder) { + final Random random = getRandomPrintSeed(); + + final int phaseOneCount = 1_000; + final int phaseTwoCount = 10_000; + + final SequenceMap map = mapBuilder.constructor.newMap(0, 1, true); + + // The number of keys for each sequence number + final int keysPerSeq = 5; + + for (int i = 0; i < phaseOneCount; i++) { + if (random.nextBoolean()) { + map.put(new SequenceMapKey(i, i / keysPerSeq), -i); + } else { + map.putIfAbsent(new SequenceMapKey(i, i / keysPerSeq), -i); + } + assertEquals(i + 1, map.getSize(), "unexpected size"); + } + + // This shift will cause us to remove half of the elements added during phase 1 + final long firstSeqAfterShift = phaseOneCount / keysPerSeq / 2; + map.shiftWindow(firstSeqAfterShift); + final int countAfterPhase1 = map.getSize(); + + for (int i = 0; i < phaseTwoCount; i++) { + final int key = i + phaseOneCount; + if (random.nextBoolean()) { + map.put(new SequenceMapKey(key, key / keysPerSeq), -key); + } else { + map.putIfAbsent(new SequenceMapKey(key, key / keysPerSeq), -key); + } + assertEquals(countAfterPhase1 + i + 1, map.getSize(), "unexpected size"); + } + + validateMapContents( + map, + -phaseOneCount, + 2 * phaseTwoCount, + key -> { + if (key < (phaseTwoCount + phaseOneCount) && (key / keysPerSeq) >= firstSeqAfterShift) { + return (long) key / keysPerSeq; + } else { + // key is not present + return null; + } + }, + key -> -key); + } + + @ParameterizedTest + @MethodSource("testConfiguration") + @DisplayName("Sudden Expansion Test") + void suddenExpansionTest(final MapBuilder mapBuilder) { + final Random random = getRandomPrintSeed(); + + // The number of things inserted into the map + final int size = 10_000; + + final SequenceMap map = mapBuilder.constructor.newMap(0, 1, true); + + // The number of keys for each sequence number + final int keysPerSeq = 5; + + // Intentionally start inserting high sequence numbers first + for (int i = size - 1; i >= 0; i--) { + if (random.nextBoolean()) { + map.put(new SequenceMapKey(i, i / keysPerSeq), -i); + } else { + map.putIfAbsent(new SequenceMapKey(i, i / keysPerSeq), -i); + } + assertEquals(size - i, map.getSize(), "unexpected size"); + } + + validateMapContents( + map, + -size, + 2 * size, + key -> { + if (key >= 0 && key < size) { + return (long) key / keysPerSeq; + } else { + // key is not present + return null; + } + }, + key -> -key); + } + + @ParameterizedTest + @MethodSource("testConfiguration") + @DisplayName("Expansion Limits Test") + void expansionLimitsTest(final MapBuilder mapBuilder) { + final SequenceMap map1 = mapBuilder.constructor.newMap(0, 1, true); + assertThrows(IllegalStateException.class, () -> map1.put(new SequenceMapKey(1, Integer.MAX_VALUE - 7), 1)); + assertThrows(IllegalStateException.class, () -> map1.put(new SequenceMapKey(1, Integer.MAX_VALUE), 1)); + assertThrows(IllegalStateException.class, () -> map1.put(new SequenceMapKey(1, Long.MAX_VALUE), 1)); + + final SequenceMap map2 = mapBuilder.constructor.newMap(Long.MIN_VALUE, 1, true); + assertThrows(IllegalStateException.class, () -> map2.put(new SequenceMapKey(1, Integer.MAX_VALUE - 7), 1)); + assertThrows(IllegalStateException.class, () -> map2.put(new SequenceMapKey(1, Integer.MAX_VALUE), 1)); + assertThrows(IllegalStateException.class, () -> map2.put(new SequenceMapKey(1, Long.MAX_VALUE), 1)); + } } diff --git a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/threading/QueueThreadTests.java b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/threading/QueueThreadTests.java index 0e855a62f76c..21d3b4deeb97 100644 --- a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/threading/QueueThreadTests.java +++ b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/threading/QueueThreadTests.java @@ -21,6 +21,7 @@ import static com.swirlds.common.test.AssertionUtils.assertEventuallyFalse; import static com.swirlds.common.test.AssertionUtils.assertEventuallyTrue; import static com.swirlds.common.test.AssertionUtils.completeBeforeTimeout; +import static com.swirlds.common.threading.framework.internal.AbstractQueueThreadConfiguration.UNLIMITED_CAPACITY; import static com.swirlds.common.threading.manager.AdHocThreadManager.getStaticThreadManager; import static com.swirlds.test.framework.TestQualifierTags.TIME_CONSUMING; import static java.util.concurrent.TimeUnit.MILLISECONDS; @@ -1051,23 +1052,23 @@ void idleCallbackTest() throws InterruptedException { }); queue.start(); - // The queue should call the idle callback during this time, + // The queue should not call the idle callback during this time, // but give it some time to do bad things if it's going to do bad things. MILLISECONDS.sleep(10); latch1.countDown(); - // The queue should call the idle callback during this time, + // The queue should not call the idle callback during this time, // but give it some time to do bad things if it's going to do bad things. MILLISECONDS.sleep(10); latch2.countDown(); - // The queue should call the idle callback during this time, + // The queue should not call the idle callback during this time, // but give it some time to do bad things if it's going to do bad things. MILLISECONDS.sleep(10); - // Once job 3 is permitted to complete, we expect for the idle callback to be invoked shortly afterwards. + // Once job 3 is permitted to complete, we expect for the idle callback to be invoked shortly afterward. idleCallbackPermitted.set(true); latch3.countDown(); @@ -1078,4 +1079,68 @@ void idleCallbackTest() throws InterruptedException { assertFalse(error.get()); } + + @Test + void batchCompletedCallbackTest() throws InterruptedException { + final AtomicInteger count = new AtomicInteger(0); + + final int bufferSize = 100; + + final QueueThread queue = new QueueThreadConfiguration(getStaticThreadManager()) + .setThreadName("test") + .setBatchHandledCallback(count::getAndIncrement) + .setHandler(x -> {}) + .setCapacity(UNLIMITED_CAPACITY) + .setMaxBufferSize(bufferSize) + .build(); + + // Add a bunch of stuff to the queue. Things haven't started yet, so we shouldn't have any callbacks. + for (int i = 0; i < bufferSize; i++) { + queue.add(i); + } + + assertEquals(0, count.get()); + + // Start the queue. We should see the batch complete callback exactly once, since all 100 items will fit + // into the buffer. + + queue.start(); + + assertEventuallyEquals(1, count::get, Duration.ofSeconds(1), "Batch completed callback was not called"); + + // Wait for a while. Callback should not be called, but give the thread time to misbehave it wants to. + MILLISECONDS.sleep(10); + assertEquals(1, count.get()); + + // Adding just a single element should cause the callback to be called again. + queue.add(42); + + assertEventuallyEquals(2, count::get, Duration.ofSeconds(1), "Batch completed callback was not called"); + + // Wait for a while. Callback should not be called, but give the thread time to misbehave it wants to. + MILLISECONDS.sleep(10); + assertEquals(2, count.get()); + + // Add a bunch of stuff. Any number of callbacks between 1 + // and the number of elements divided by buffer size is legal. + final int amountToAdd = 10_000; + for (int i = 0; i < amountToAdd; i++) { + queue.add(i); + } + + final int minCount = 2 + (amountToAdd / bufferSize); + final int maxCount = 2 + amountToAdd; + + assertEventuallyTrue( + () -> count.get() >= minCount, + Duration.ofSeconds(1), + "Batch completed callback was not called enough times"); + + // Give the thread some time to misbehave if it wants to. + MILLISECONDS.sleep(10); + + assertTrue(count.get() <= maxCount, "Batch completed callback was called too many times"); + + queue.stop(); + } } diff --git a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/throttle/RateLimiterTests.java b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/throttle/RateLimiterTests.java index 2f689ce8f959..a111ecf4b43b 100644 --- a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/throttle/RateLimiterTests.java +++ b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/throttle/RateLimiterTests.java @@ -16,13 +16,15 @@ package com.swirlds.common.test.throttle; +import static com.swirlds.common.test.RandomUtils.getRandomPrintSeed; import static com.swirlds.common.utility.CompareTo.isLessThan; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; import com.swirlds.common.test.fixtures.FakeTime; import com.swirlds.common.utility.throttle.RateLimiter; import java.time.Duration; +import java.util.Random; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; @@ -34,6 +36,8 @@ class RateLimiterTests { @ValueSource(ints = {1, 2, 100}) @DisplayName("Period Test") void periodTest(final int periodMs) { + final Random random = getRandomPrintSeed(); + final FakeTime time = new FakeTime(Duration.ofNanos(1)); final RateLimiter rateLimiter = new RateLimiter(time, Duration.ofMillis(periodMs)); @@ -43,23 +47,48 @@ void periodTest(final int periodMs) { final Duration limit = Duration.ofSeconds(1); while (isLessThan(time.elapsed(), limit)) { assertEquals(denied, rateLimiter.getDeniedRequests(), "invalid number of denied requests"); - if (rateLimiter.request()) { + + // Check and see if the rate limiter will allow the action to be triggered. + final boolean requestAccepted = rateLimiter.request(); + if (!requestAccepted) { + denied++; + } else { + denied = 0; + } + + assertEquals(denied, rateLimiter.getDeniedRequests(), "invalid number of denied requests"); + + if (random.nextBoolean()) { + if (rateLimiter.request()) { + rateLimiter.trigger(); + count++; + } else { + denied++; + } + } else if (rateLimiter.requestAndTrigger()) { count++; denied = 0; } else { denied++; } + + // If we successfully triggered above, we should now be denied until time advances. If we did not + // successfully trigger, we should also be denied. + assertFalse(rateLimiter.request()); + denied++; + time.tick(Duration.ofNanos(1_000)); } - assertTrue(count <= 1000 / periodMs, "count is too high"); - assertTrue(count > 1000 / periodMs / 2, "count is way to low"); + assertEquals(limit.toMillis() / periodMs, count); } @ParameterizedTest @ValueSource(ints = {1, 2, 100}) @DisplayName("Frequency Test") void frequencyTest(final int periodMs) { + final Random random = getRandomPrintSeed(); + final FakeTime time = new FakeTime(Duration.ofNanos(1)); final RateLimiter rateLimiter = new RateLimiter(time, 1000.0 / periodMs); @@ -69,16 +98,39 @@ void frequencyTest(final int periodMs) { final Duration limit = Duration.ofSeconds(1); while (isLessThan(time.elapsed(), limit)) { assertEquals(denied, rateLimiter.getDeniedRequests(), "invalid number of denied requests"); - if (rateLimiter.request()) { + + // Check and see if the rate limiter will allow the action to be triggered. + final boolean requestAccepted = rateLimiter.request(); + if (!requestAccepted) { + denied++; + } else { + denied = 0; + } + + assertEquals(denied, rateLimiter.getDeniedRequests(), "invalid number of denied requests"); + + if (random.nextBoolean()) { + if (rateLimiter.request()) { + rateLimiter.trigger(); + count++; + } else { + denied++; + } + } else if (rateLimiter.requestAndTrigger()) { count++; denied = 0; } else { denied++; } + + // If we successfully triggered above, we should now be denied until time advances. If we did not + // successfully trigger, we should also be denied. + assertFalse(rateLimiter.request()); + denied++; + time.tick(Duration.ofNanos(1_000)); } - assertTrue(count <= 1000 / periodMs, "count is too high"); - assertTrue(count > 1000 / periodMs / 2, "count is way to low"); + assertEquals(limit.toMillis() / periodMs, count); } } diff --git a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/utility/RecycleBinTests.java b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/utility/RecycleBinTests.java index 743ada4153eb..b917433a308f 100644 --- a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/utility/RecycleBinTests.java +++ b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/utility/RecycleBinTests.java @@ -24,11 +24,11 @@ import com.swirlds.common.system.NodeId; import com.swirlds.config.api.Configuration; import com.swirlds.test.framework.config.TestConfigBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.BufferedWriter; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/build.gradle.kts b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/build.gradle.kts index 1d1f753820d8..40f00812ae30 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/build.gradle.kts +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/build.gradle.kts @@ -15,33 +15,34 @@ */ plugins { - id("com.swirlds.platform.conventions") - id("com.swirlds.platform.library") - id("com.swirlds.platform.benchmark-conventions") - id("org.gradle.java-test-fixtures") + id("com.swirlds.platform.conventions") + id("com.swirlds.platform.library") + id("com.swirlds.platform.benchmark-conventions") + id("org.gradle.java-test-fixtures") } extraJavaModuleInfo { failOnMissingModuleInfo.set(false) } dependencies { - // Individual Dependencies - implementation(project(":swirlds-platform-core")) - compileOnly(libs.spotbugs.annotations) + // Individual Dependencies + implementation(project(":swirlds-platform-core")) + compileOnly(libs.spotbugs.annotations) - // Test Dependencies + // Test Dependencies + testCompileOnly(libs.spotbugs.annotations) - // These should not be implementation() based deps, but this requires refactoring to eliminate. - implementation(testLibs.bundles.mocking) - implementation(project(":swirlds-unit-tests:common:swirlds-common-test")) - implementation(project(":swirlds-unit-tests:common:swirlds-test-framework")) + // These should not be implementation() based deps, but this requires refactoring to eliminate. + implementation(testLibs.bundles.mocking) + implementation(project(":swirlds-unit-tests:common:swirlds-common-test")) + implementation(project(":swirlds-unit-tests:common:swirlds-test-framework")) - testImplementation(project(":swirlds-merkle")) - testImplementation(project(":swirlds-sign-tool")) // TODO: should be removed in future - testImplementation(libs.commons.collections4) - testImplementation(libs.classgraph) - testImplementation(testLibs.bundles.junit) - testImplementation(testLibs.bundles.utils) - testImplementation(testFixtures(project(":swirlds-common"))) + testImplementation(project(":swirlds-merkle")) + testImplementation(project(":swirlds-sign-tool")) // TODO: should be removed in future + testImplementation(libs.commons.collections4) + testImplementation(libs.classgraph) + testImplementation(testLibs.bundles.junit) + testImplementation(testLibs.bundles.utils) + testImplementation(testFixtures(project(":swirlds-common"))) - testImplementation(project(":swirlds-config-impl")) + testImplementation(project(":swirlds-config-impl")) } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/SimpleEventGenerator.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/SimpleEventGenerator.java index b2e33965ea14..6594cce51447 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/SimpleEventGenerator.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/SimpleEventGenerator.java @@ -17,8 +17,10 @@ package com.swirlds.platform.test; import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.address.AddressBook; import com.swirlds.platform.internal.EventImpl; import com.swirlds.platform.test.event.RandomEventUtils; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -28,20 +30,21 @@ @Deprecated public class SimpleEventGenerator { - final int numberOfNodes; + final AddressBook addressBook; final Map lastEvent; final Random random; final Set excludeAsOtherParent; - public SimpleEventGenerator(int numberOfNodes, Random random) { - this.numberOfNodes = numberOfNodes; - lastEvent = new HashMap<>(numberOfNodes); - this.random = random; + public SimpleEventGenerator(@NonNull final AddressBook addressBook, @NonNull final Random random) { + this.addressBook = Objects.requireNonNull(addressBook); + lastEvent = new HashMap<>(addressBook.getSize()); + this.random = Objects.requireNonNull(random); excludeAsOtherParent = new HashSet<>(); } public EventImpl nextEvent(final boolean fakeHash) { - final NodeId nodeId = new NodeId(random.nextInt(numberOfNodes)); + final int randomIndex = random.nextInt(addressBook.getSize()); + final NodeId nodeId = addressBook.getNodeId(randomIndex); final NodeId otherId = getOtherParent(nodeId); final EventImpl event = RandomEventUtils.randomEvent( random, nodeId, lastEvent.get(nodeId), lastEvent.get(otherId), fakeHash, true); @@ -56,7 +59,8 @@ public EventImpl nextEvent() { private NodeId getOtherParent(final NodeId exclude) { NodeId otherId = exclude; while (Objects.equals(otherId, exclude) || excludeAsOtherParent.contains(otherId)) { - otherId = new NodeId(random.nextInt(numberOfNodes)); + final int randomIndex = random.nextInt(addressBook.getSize()); + otherId = addressBook.getNodeId(randomIndex); } return otherId; } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/chatter/EventDedup.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/chatter/EventDedup.java index a6378a39fc87..58bd6a14e4e6 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/chatter/EventDedup.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/chatter/EventDedup.java @@ -19,9 +19,9 @@ import com.swirlds.common.sequence.Shiftable; import com.swirlds.common.sequence.set.ConcurrentSequenceSet; import com.swirlds.common.sequence.set.SequenceSet; +import com.swirlds.platform.event.EventDescriptor; import com.swirlds.platform.gossip.chatter.protocol.MessageHandler; import com.swirlds.platform.gossip.chatter.protocol.messages.ChatterEvent; -import com.swirlds.platform.gossip.chatter.protocol.messages.EventDescriptor; import java.util.List; public class EventDedup implements MessageHandler, Shiftable { diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/chatter/simulator/EventTracker.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/chatter/simulator/EventTracker.java index dfa6b68d399c..5670f1033eac 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/chatter/simulator/EventTracker.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/chatter/simulator/EventTracker.java @@ -26,8 +26,7 @@ import com.swirlds.common.sequence.map.ConcurrentSequenceMap; import com.swirlds.common.sequence.map.SequenceMap; import com.swirlds.common.system.NodeId; -import com.swirlds.platform.gossip.chatter.protocol.messages.ChatterEventDescriptor; -import com.swirlds.platform.gossip.chatter.protocol.messages.EventDescriptor; +import com.swirlds.platform.event.EventDescriptor; import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Duration; import java.time.Instant; @@ -111,7 +110,7 @@ public EventTracker(final GossipSimulationBuilder builder) { * @param descriptor describes the event that was just created * @param creationTime the time when the event was created */ - public void registerNewEvent(final ChatterEventDescriptor descriptor, final Instant creationTime) { + public void registerNewEvent(final EventDescriptor descriptor, final Instant creationTime) { final TrackedEvent prev = events.put(descriptor, new TrackedEvent(descriptor.getCreator(), creationTime)); if (prev != null) { diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/chatter/simulator/SimulatedEvent.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/chatter/simulator/SimulatedEvent.java index 017ff86e774f..c8ceb0894d21 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/chatter/simulator/SimulatedEvent.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/chatter/simulator/SimulatedEvent.java @@ -21,8 +21,8 @@ import com.swirlds.common.io.streams.SerializableDataInputStream; import com.swirlds.common.io.streams.SerializableDataOutputStream; import com.swirlds.common.system.NodeId; +import com.swirlds.platform.event.EventDescriptor; import com.swirlds.platform.gossip.chatter.protocol.messages.ChatterEvent; -import com.swirlds.platform.gossip.chatter.protocol.messages.ChatterEventDescriptor; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.time.Instant; @@ -40,7 +40,7 @@ private static final class ClassVersion { public static final int ORIGINAL = 1; } - private ChatterEventDescriptor descriptor; + private EventDescriptor descriptor; private byte[] data; private Instant timeReceived; @@ -72,14 +72,14 @@ public SimulatedEvent( random.nextBytes(hashBytes); final Hash hash = new Hash(hashBytes, DigestType.SHA_384); - this.descriptor = new ChatterEventDescriptor(hash, creator, round); + this.descriptor = new EventDescriptor(hash, creator, round); } /** * {@inheritDoc} */ @Override - public ChatterEventDescriptor getDescriptor() { + public EventDescriptor getDescriptor() { return descriptor; } @@ -88,6 +88,14 @@ public Instant getTimeReceived() { return timeReceived; } + /** + * {@inheritDoc} + */ + @Override + public long getGeneration() { + return descriptor.getGeneration(); + } + public void setTimeReceived(final Instant timeReceived) { this.timeReceived = timeReceived; } @@ -123,7 +131,7 @@ public void serialize(final SerializableDataOutputStream out) throws IOException */ @Override public void deserialize(final SerializableDataInputStream in, final int version) throws IOException { - descriptor = in.readSerializable(false, ChatterEventDescriptor::new); + descriptor = in.readSerializable(false, EventDescriptor::new); data = in.readByteArray(Integer.MAX_VALUE); } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/components/AccessibleEventCreator.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/components/AccessibleEventCreator.java index cb9ed8c74753..751719b0335c 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/components/AccessibleEventCreator.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/components/AccessibleEventCreator.java @@ -16,6 +16,7 @@ package com.swirlds.platform.test.components; +import com.swirlds.common.context.PlatformContext; import com.swirlds.common.stream.Signer; import com.swirlds.common.system.BasicSoftwareVersion; import com.swirlds.common.system.NodeId; @@ -36,6 +37,7 @@ public class AccessibleEventCreator extends EventCreator { public AccessibleEventCreator( + final PlatformContext platformContext, final NodeId selfId, final EventMapper eventMapper, final Signer signer, @@ -47,6 +49,7 @@ public AccessibleEventCreator( final EventCreationRules eventCreationRules) { super( + platformContext, new BasicSoftwareVersion(1), selfId, signer, diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/ConsensusTestDefinition.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/ConsensusTestDefinition.java index 128428c85655..9ce4573b9176 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/ConsensusTestDefinition.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/ConsensusTestDefinition.java @@ -18,6 +18,7 @@ import static com.swirlds.platform.test.event.source.EventSourceFactory.newStandardEventSources; +import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.test.WeightGenerator; import com.swirlds.platform.test.event.TestSequence; import com.swirlds.platform.test.event.emitter.EventEmitter; @@ -27,6 +28,7 @@ import com.swirlds.platform.test.event.generator.StandardGraphGenerator; import com.swirlds.platform.test.event.source.EventSource; import com.swirlds.platform.test.event.source.WeightedGraphGenerator; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.List; import java.util.Objects; import java.util.function.Function; @@ -81,6 +83,9 @@ public class ConsensusTestDefinition { private EventEmitter node2EventEmitter; + /** The address book generated from the graph */ + private AddressBook addressBook; + /** The test sequences generated using the current seed. {@code null} until {@link #setSeed(long)} is called. */ private List testSequences; @@ -173,11 +178,21 @@ public void setSeed(final long seed) { System.out.println("Node Weights: " + nodeWeights); } final GraphGenerator graphGenerator = graphGeneratorProvider.getGraphGenerator(nodeWeights); + addressBook = graphGenerator.getAddressBook(); node1EventEmitter = node1EventEmitterGenerator.getEventEmitter(graphGenerator.cleanCopy(), seed); node2EventEmitter = node2EventEmitterGenerator.getEventEmitter(graphGenerator.cleanCopy(), seed); testSequences = testSequenceGenerator.apply(this); } + /** + * Returns the address book generated from the graph. Must be called after {@link #setSeed(long)}. + * @return the address book generated from the graph + */ + @NonNull + public AddressBook getAddressBook() { + return addressBook; + } + public int getNumberOfNodes() { return numberOfNodes; } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/NodeContext.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/NodeContext.java index 5e1cd1c357cd..d700a9d79a83 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/NodeContext.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/NodeContext.java @@ -128,8 +128,7 @@ public void restart(final Path stateDir) throws ConstructableRegistryException, // load events from signed state into the sources final AddressBook addressBook = collectingEmitter.getGraphGenerator().getAddressBook(); for (final Address address : addressBook) { - final EventSource source = - collectingEmitter.getGraphGenerator().getSource(addressBook.getIndexOfNodeId(address.getNodeId())); + final EventSource source = collectingEmitter.getGraphGenerator().getSource(address.getNodeId()); final List eventsByCreator = Arrays.stream(indexedEvents) .filter(e -> Objects.equals(e.getCreatorId(), address.getNodeId())) .toList(); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/emitter/EventEmitterFactory.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/emitter/EventEmitterFactory.java index 52ea05121363..0bdc852debd1 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/emitter/EventEmitterFactory.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/emitter/EventEmitterFactory.java @@ -16,12 +16,15 @@ package com.swirlds.platform.test.event.emitter; +import com.swirlds.common.system.address.AddressBook; import com.swirlds.platform.test.event.generator.StandardGraphGenerator; import com.swirlds.platform.test.event.source.EventSource; import com.swirlds.platform.test.event.source.EventSourceFactory; import com.swirlds.platform.test.event.source.ForkingEventSource; import com.swirlds.platform.test.event.source.StandardEventSource; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.List; +import java.util.Objects; import java.util.Random; /** @@ -29,8 +32,10 @@ */ public class EventEmitterFactory { + /** the random number generator to use */ private final Random random; - private final int numNetworkNodes; + /** the address book to use */ + private final AddressBook addressBook; /** * Seed used for the standard generator. Must be same for all instances to ensure the same events are * generated for different instances. Differences in the graphs are managed in other ways and are defined in each @@ -40,11 +45,11 @@ public class EventEmitterFactory { private final EventSourceFactory sourceFactory; - public EventEmitterFactory(final Random random, final int numNetworkNodes) { - this.random = random; - this.numNetworkNodes = numNetworkNodes; + public EventEmitterFactory(@NonNull final Random random, @NonNull final AddressBook addressBook) { + this.random = Objects.requireNonNull(random); + this.addressBook = Objects.requireNonNull(addressBook); this.commonSeed = random.nextLong(); - this.sourceFactory = new EventSourceFactory(numNetworkNodes); + this.sourceFactory = new EventSourceFactory(addressBook); } /** @@ -68,6 +73,7 @@ public StandardEventEmitter newStandardEmitter() { * @return the new {@link ShuffledEventEmitter} */ public ShuffledEventEmitter newForkingShuffledGenerator() { + final int numNetworkNodes = addressBook.getSize(); // No more than 1/3 of the nodes can create forks for consensus to be successful final int maxNumForkingSources = (int) Math.floor(numNetworkNodes / 3.0); @@ -85,9 +91,16 @@ public StandardEventEmitter newStandardFromSourceFactory() { } private StandardGraphGenerator newStandardGraphGenerator(final List> eventSources) { - return new StandardGraphGenerator( - commonSeed, // standard seed must be the same across all generators - eventSources); + if (addressBook == null) { + return new StandardGraphGenerator( + commonSeed, // standard seed must be the same across all generators + eventSources); + } else { + return new StandardGraphGenerator( + commonSeed, // standard seed must be the same across all generators + eventSources, + addressBook); + } } private ShuffledEventEmitter newShuffledEmitter(final List> eventSources) { diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/emitter/StandardEventEmitter.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/emitter/StandardEventEmitter.java index d8aecfa452e3..1795dad6442f 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/emitter/StandardEventEmitter.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/emitter/StandardEventEmitter.java @@ -26,10 +26,12 @@ public class StandardEventEmitter extends AbstractEventEmitter graphGenerator) { super(graphGenerator); + reset(); } public StandardEventEmitter(final StandardEventEmitter that) { this(that.getGraphGenerator().cleanCopy()); + reset(); } /** diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/generator/AbstractGraphGenerator.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/generator/AbstractGraphGenerator.java index d9e5d90fc088..61c103760be8 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/generator/AbstractGraphGenerator.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/generator/AbstractGraphGenerator.java @@ -16,9 +16,11 @@ package com.swirlds.platform.test.event.generator; +import com.swirlds.common.system.NodeId; import com.swirlds.platform.consensus.GraphGenerations; import com.swirlds.platform.event.EventConstants; import com.swirlds.platform.test.event.IndexedEvent; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.HashMap; import java.util.Map; import java.util.Random; @@ -45,7 +47,7 @@ public abstract class AbstractGraphGenerator private Random random; /** A map that holds the maximum event generation for each creator */ - private final Map maxGenerationPerCreator; + private final Map maxGenerationPerCreator; protected AbstractGraphGenerator(final long initialSeed) { this.initialSeed = initialSeed; @@ -115,14 +117,14 @@ public final long getInitialSeed() { * Updates the max generation based on the latest event */ private void updateMaxGeneration(final IndexedEvent event) { - maxGenerationPerCreator.merge(event.getCreatorId().id(), event.getGeneration(), Math::max); + maxGenerationPerCreator.merge(event.getCreatorId(), event.getGeneration(), Math::max); } /** * {@inheritDoc} */ @Override - public long getMaxGeneration(final long creatorId) { + public long getMaxGeneration(@Nullable final NodeId creatorId) { return maxGenerationPerCreator.getOrDefault(creatorId, EventConstants.GENERATION_UNDEFINED); } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/generator/GraphGenerator.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/generator/GraphGenerator.java index 4ff114a803fb..e66b6b5f8f65 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/generator/GraphGenerator.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/generator/GraphGenerator.java @@ -16,10 +16,13 @@ package com.swirlds.platform.test.event.generator; +import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.AddressBook; import com.swirlds.platform.test.event.DynamicValue; import com.swirlds.platform.test.event.IndexedEvent; import com.swirlds.platform.test.event.source.EventSource; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.ArrayList; import java.util.List; @@ -44,7 +47,7 @@ public interface GraphGenerator> { /** * Get the event source for a particular node ID. */ - EventSource getSource(final int nodeID); + EventSource getSource(@NonNull final NodeId nodeID); /** * Get an exact copy of this event generator in its current state. The events returned by this @@ -113,6 +116,7 @@ default List generateEvents(final int numberOfEvents) { /** * Get an address book that represents the collection of nodes that are generating the events. */ + @NonNull AddressBook getAddressBook(); /** @@ -122,7 +126,7 @@ default List generateEvents(final int numberOfEvents) { * the event creator * @return the maximum event generation for the supplied creator */ - long getMaxGeneration(long creatorId); + long getMaxGeneration(@Nullable final NodeId creatorId); /** * Returns the maximum generation of all events created by this generator @@ -134,8 +138,9 @@ default List generateEvents(final int numberOfEvents) { * * @param affinityMatrix * An n by n matrix where n is the number of event sources. Each row defines the preference of a particular - * node when choosing other parents. Node 0 is described by the first row, node 1 by the next, etc. - * Each entry should be a weight. Weights of self (i.e. the weights on the diagonal) should be 0. + * node when choosing other parents. The node at index 0 in the address book is described by the first row, + * the node at index 1 in the address book by the next row, etc. Each entry should be a weight. Weights of + * self (i.e. the weights on the diagonal) should be 0. */ void setOtherParentAffinity(final List> affinityMatrix); @@ -143,7 +148,9 @@ default List generateEvents(final int numberOfEvents) { * Set the affinity of each node for choosing the parents of its events. * * @param affinityMatrix - * A dynamic n by n matrix where n is the number of event sources. Each entry should be a weight. + * A dynamic n by n matrix where n is the number of event sources. Each row defines the preference of a + * particular node when choosing other parents. The node at index 0 in the address book is described by + * the first row, the node at index 1 in the address book by the next row, etc. Each entry should be a weight. * Weights of self (i.e. the weights on the diagonal) should be 0. */ void setOtherParentAffinity(final DynamicValue>> affinityMatrix); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/generator/StandardGraphGenerator.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/generator/StandardGraphGenerator.java index 9e3a0de4f102..cd6c1ff50482 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/generator/StandardGraphGenerator.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/generator/StandardGraphGenerator.java @@ -27,12 +27,12 @@ import com.swirlds.platform.test.event.DynamicValueGenerator; import com.swirlds.platform.test.event.IndexedEvent; import com.swirlds.platform.test.event.source.EventSource; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.List; -import java.util.Map; +import java.util.Objects; /** * A utility class for generating a graph of events. @@ -98,21 +98,42 @@ public StandardGraphGenerator(final long seed, final EventSource... eventSour * @param eventSources * One or more event sources. */ - public StandardGraphGenerator(final long seed, final List> eventSources) { + public StandardGraphGenerator(final long seed, @NonNull final List> eventSources) { super(seed); + Objects.requireNonNull(eventSources); this.sources = eventSources; if (eventSources.isEmpty()) { throw new IllegalArgumentException("At least one event source is required"); } - for (int index = 0; index < eventSources.size(); index++) { - final EventSource source = eventSources.get(index); - source.setNodeId(new NodeId(index)); + buildAddressBookInitializeEventSources(eventSources); + buildDefaultOtherParentAffinityMatrix(); + } + + /** + * Construct a new StandardEventGenerator. + * + * @param seed + * The random seed used to generate events. + * @param eventSources + * One or more event sources. + * @param addressBook + * The address book to use with the event sources. + */ + public StandardGraphGenerator( + final long seed, @NonNull final List> eventSources, @NonNull final AddressBook addressBook) { + super(seed); + Objects.requireNonNull(eventSources); + Objects.requireNonNull(addressBook); + + this.sources = eventSources; + if (eventSources.isEmpty()) { + throw new IllegalArgumentException("At least one event source is required"); } + setAddressBookInitializeEventSources(eventSources, addressBook); buildDefaultOtherParentAffinityMatrix(); - buildAddressBook(eventSources); } /** @@ -134,24 +155,45 @@ private StandardGraphGenerator(final StandardGraphGenerator that, final long see final EventSource copy = sourceToCopy.copy(); this.sources.add(copy); } + this.addressBook = that.getAddressBook().copy(); this.eventPeriodMean = that.eventPeriodMean; this.eventPeriodStandardDeviation = that.eventPeriodStandardDeviation; this.simultaneousEventFraction = that.simultaneousEventFraction; - buildAddressBook(this.sources); } - private void buildAddressBook(final List> eventSources) { - final Map weightMap = new HashMap<>(); - for (final EventSource eventSource : eventSources) { - weightMap.put(eventSource.getNodeId(), eventSource.getWeight()); - } + /** + * builds a random address book, updates the weight of the addresses from the event sources, and initialize the node ids of the event sources from the addresses. + * + * @param eventSources the event sources to initialize. + */ + private void buildAddressBookInitializeEventSources(@NonNull final List> eventSources) { + final int eventSourceCount = eventSources.size(); - addressBook = new RandomAddressBookGenerator(getRandom()) - .setNodeIds(weightMap.keySet()) - .setCustomWeightGenerator(weightMap::get) + final AddressBook addressBook = new RandomAddressBookGenerator(getRandom()) + .setSize(eventSourceCount) .setHashStrategy(RandomAddressBookGenerator.HashStrategy.FAKE_HASH) - .setSequentialIds(false) .build(); + setAddressBookInitializeEventSources(eventSources, addressBook); + } + + /** + * sets the address book, updates the weight of the addresses from the event sources, and initialize the node ids of + * the event sources from the addresses. + * + * @param eventSources the event sources to initialize. + * @param addressBook the address book to use. + */ + private void setAddressBookInitializeEventSources( + @NonNull final List> eventSources, @NonNull final AddressBook addressBook) { + final int eventSourceCount = eventSources.size(); + + this.addressBook = addressBook; + for (int index = 0; index < eventSourceCount; index++) { + final EventSource source = eventSources.get(index); + final NodeId nodeId = addressBook.getNodeId(index); + addressBook.updateWeight(nodeId, source.getWeight()); + source.setNodeId(nodeId); + } } /** @@ -192,10 +234,12 @@ private List getOtherParentAffinityVector(final long eventIndex, final i private void buildDefaultOtherParentAffinityMatrix() { final List> matrix = new ArrayList<>(sources.size()); - for (int nodeId = 0; nodeId < sources.size(); nodeId++) { + for (int nodeIndex = 0; nodeIndex < sources.size(); nodeIndex++) { + final NodeId nodeId = addressBook.getNodeId(nodeIndex); final List affinityVector = new ArrayList<>(sources.size()); - for (int index = 0; index < sources.size(); index++) { - if (index == nodeId) { + for (int otherNodeIndex = 0; otherNodeIndex < sources.size(); otherNodeIndex++) { + final NodeId otherNodeId = addressBook.getNodeId(otherNodeIndex); + if (Objects.equals(nodeId, otherNodeId)) { affinityVector.add(0.0); } else { affinityVector.add(1.0); @@ -282,8 +326,17 @@ public int getNumberOfSources() { * {@inheritDoc} */ @Override - public EventSource getSource(final int nodeID) { - return sources.get(nodeID); + public EventSource getSource(final NodeId nodeID) { + final int nodeIndex = addressBook.getIndexOfNodeId(nodeID); + return sources.get(nodeIndex); + } + /** + * Get the event source for a particular node index. + * + * @return the event source + */ + public EventSource getSourceByIndex(final int nodeIndex) { + return sources.get(nodeIndex); } /** @@ -329,15 +382,14 @@ protected void resetInternalData() { source.reset(); } previousTimestamp = null; - buildAddressBook(sources); } /** * Get the next node that is creating an event. */ private EventSource getNextEventSource(final long eventIndex) { - final int nodeID = weightedChoice(getRandom(), getSourceWeights(eventIndex)); - return sources.get(nodeID); + final int nodeIndex = weightedChoice(getRandom(), getSourceWeights(eventIndex)); + return sources.get(nodeIndex); } /** @@ -349,8 +401,8 @@ private EventSource getNextEventSource(final long eventIndex) { private EventSource getNextOtherParentSource(final long eventIndex, final EventSource source) { final List affinityVector = getOtherParentAffinityVector(eventIndex, addressBook.getIndexOfNodeId(source.getNodeId())); - final int nodeID = weightedChoice(getRandom(), affinityVector); - return sources.get(nodeID); + final int nodeIndex = weightedChoice(getRandom(), affinityVector); + return sources.get(nodeIndex); } /** diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/source/EventSourceFactory.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/source/EventSourceFactory.java index ab3240578452..ab08ab1109f8 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/source/EventSourceFactory.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/event/source/EventSourceFactory.java @@ -16,9 +16,12 @@ package com.swirlds.platform.test.event.source; +import com.swirlds.common.system.address.AddressBook; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; +import java.util.Objects; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.IntStream; @@ -26,14 +29,15 @@ public class EventSourceFactory { - private final int numNetworkNodes; + /** the address book to use */ + private final AddressBook addressBook; /** * a list of lambdas that supply a custom event source for some indexes */ private final List, Supplier>>> customSources; - public EventSourceFactory(final int numNetworkNodes) { - this.numNetworkNodes = numNetworkNodes; + public EventSourceFactory(@NonNull final AddressBook addressBook) { + this.addressBook = Objects.requireNonNull(addressBook); this.customSources = new LinkedList<>(); } @@ -55,8 +59,9 @@ public void addCustomSource(final Predicate indexPredicate, final Supplier */ public List> generateSources() { final List> list = new LinkedList<>(); + final int numNodes = addressBook.getSize(); forEachNode: - for (long i = 0; i < numNetworkNodes; i++) { + for (long i = 0; i < numNodes; i++) { for (final Pair, Supplier>> customSource : customSources) { if (customSource.getLeft().test(i)) { list.add(customSource.getRight().get()); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/simulated/SimulatedEventCreationNode.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/simulated/SimulatedEventCreationNode.java index da3a6c75b4e7..90caf874f8bc 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/simulated/SimulatedEventCreationNode.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/simulated/SimulatedEventCreationNode.java @@ -19,6 +19,7 @@ import static com.swirlds.common.threading.manager.AdHocThreadManager.getStaticThreadManager; import com.swirlds.base.time.Time; +import com.swirlds.common.context.PlatformContext; import com.swirlds.common.crypto.Cryptography; import com.swirlds.common.crypto.Hash; import com.swirlds.common.crypto.SerializableHashable; @@ -46,6 +47,7 @@ import com.swirlds.platform.internal.EventImpl; import com.swirlds.platform.test.simulated.config.NodeConfig; import com.swirlds.test.framework.config.TestConfigBuilder; +import com.swirlds.test.framework.context.TestPlatformContextBuilder; import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; import java.util.List; @@ -103,12 +105,19 @@ public SimulatedEventCreationNode( this.eventByHash = Objects.requireNonNull(eventByHash, "the event by hash function is null"); this.config = Objects.requireNonNull(config, "the node config is null"); - final Configuration configuration = new TestConfigBuilder().getOrCreateConfig(); + final Configuration configuration = new TestConfigBuilder() + .withValue("event.creation.useTipsetAlgorithm", "false") + .getOrCreateConfig(); + final ChatterConfig chatterConfig = configuration.getConfigData(ChatterConfig.class); final ThreadConfig threadConfig = configuration.getConfigData(ThreadConfig.class); criticalQuorum = new CriticalQuorumImpl( - new NoOpMetrics(), new NodeId(0), addressBook, false, chatterConfig.criticalQuorumSoftening()); + new NoOpMetrics(), + addressBook.getNodeId(0), + addressBook, + false, + chatterConfig.criticalQuorumSoftening()); final OtherParentTracker otherParentTracker = new OtherParentTracker(); final LoggingEventCreationRules eventCreationRules = LoggingEventCreationRules.create( @@ -122,7 +131,13 @@ public SimulatedEventCreationNode( invocation.getArgument(0, SerializableHashable.class).setHash(hash); return hash; }); + + final PlatformContext platformContext = TestPlatformContextBuilder.create() + .withConfiguration(configuration) + .build(); + chatterEventCreator = new ChatterEventCreator( + platformContext, softwareVersion, nodeId, new RandomSigner(random), diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/AddressBookStoreTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/AddressBookStoreTests.java index c7b0926a2625..4f2d34bcb45d 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/AddressBookStoreTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/AddressBookStoreTests.java @@ -542,17 +542,14 @@ void validationTest(final AddressBookStoreImpl addressBookStoreImpl) throws Inte IllegalStateException.class, () -> store.add(addressBook), "invalid genesis book should be rejected"); // Add an address book that has a high next ID - final RandomAddressBookGenerator generator = - new RandomAddressBookGenerator().setSize(100).setSequentialIds(false); + final RandomAddressBookGenerator generator = new RandomAddressBookGenerator().setSize(100); final AddressBook firstAddressBook = generator.build().setRound(0); store.add(firstAddressBook); assertEquals(1, store.getSize(), "store is the wrong size"); // Attempting to add an address book store with a low next ID should fail - final RandomAddressBookGenerator invalidGenerator = - new RandomAddressBookGenerator().setSize(10).setSequentialIds(false); - final AddressBook invalidBook = - invalidGenerator.setSequentialIds(false).build().setRound(1); + final RandomAddressBookGenerator invalidGenerator = new RandomAddressBookGenerator().setSize(10); + final AddressBook invalidBook = invalidGenerator.build().setRound(1); store.add(invalidBook); assertEquals(2, store.getSize(), "store is the wrong size"); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/SerializationTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/SerializationTests.java index 6defd966445c..0580b771ee08 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/SerializationTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/SerializationTests.java @@ -25,7 +25,6 @@ import com.swirlds.common.system.NodeId; import com.swirlds.common.test.TransactionUtils; import com.swirlds.common.test.io.SerializationUtils; -import com.swirlds.platform.Settings; import com.swirlds.platform.test.event.RandomEventUtils; import com.swirlds.test.framework.TestTypeTags; import com.swirlds.test.framework.config.TestConfigBuilder; @@ -44,7 +43,6 @@ public class SerializationTests { public static void setUp() throws ConstructableRegistryException { new TestConfigBuilder().withValue("transactionMaxBytes", 1_000_000).getOrCreateConfig(); - Settings.populateSettingsCommon(); ConstructableRegistry.getInstance().registerConstructables("com.swirlds"); } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/NetworkTestChatter.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/NetworkTestChatter.java index 7203482bc15e..dc72b6e6abac 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/NetworkTestChatter.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/NetworkTestChatter.java @@ -20,12 +20,12 @@ import com.swirlds.common.system.NodeId; import com.swirlds.platform.gossip.chatter.protocol.messages.ChatterEvent; import com.swirlds.platform.test.chatter.simulator.SimulatedEvent; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.time.Duration; import java.time.Instant; import java.util.Objects; import java.util.Random; -import org.checkerframework.checker.nullness.qual.NonNull; -import org.checkerframework.checker.nullness.qual.Nullable; public class NetworkTestChatter implements SimulatedChatter { private final NodeId selfId; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/ChatterSimulationTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/ChatterSimulationTests.java index d9456862f4ba..19448649e12b 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/ChatterSimulationTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/ChatterSimulationTests.java @@ -37,11 +37,11 @@ import com.swirlds.platform.test.simulated.config.NetworkConfig; import com.swirlds.platform.test.simulated.config.NodeConfig; import com.swirlds.platform.test.simulated.config.NodeConfigBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Duration; import java.util.Map; import java.util.Random; import java.util.concurrent.atomic.AtomicBoolean; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/CountingChatterEvent.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/CountingChatterEvent.java index dd89b1058928..a6875b1214af 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/CountingChatterEvent.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/CountingChatterEvent.java @@ -19,12 +19,12 @@ import com.swirlds.common.io.streams.SerializableDataInputStream; import com.swirlds.common.io.streams.SerializableDataOutputStream; import com.swirlds.common.system.NodeId; -import com.swirlds.platform.gossip.chatter.protocol.messages.EventDescriptor; +import com.swirlds.platform.event.EventDescriptor; import com.swirlds.platform.test.chatter.network.framework.SimulatedChatterEvent; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.time.Instant; import java.util.Objects; -import org.checkerframework.checker.nullness.qual.NonNull; /** * A very simple, fake event that is easy to track and reason about. Each event should have a number which is one more diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/CountingEventDescriptor.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/CountingEventDescriptor.java index 63d010a1246d..d29a907b9cb3 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/CountingEventDescriptor.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/CountingEventDescriptor.java @@ -16,19 +16,21 @@ package com.swirlds.platform.test.chatter.network; +import static org.mockito.Mockito.mock; + import com.swirlds.common.crypto.Hash; import com.swirlds.common.io.streams.SerializableDataInputStream; import com.swirlds.common.io.streams.SerializableDataOutputStream; import com.swirlds.common.system.NodeId; -import com.swirlds.platform.gossip.chatter.protocol.messages.EventDescriptor; +import com.swirlds.platform.event.EventDescriptor; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.util.Objects; -import org.checkerframework.checker.nullness.qual.NonNull; /** * A descriptor for a {@link CountingChatterEvent} */ -public class CountingEventDescriptor implements EventDescriptor { +public class CountingEventDescriptor extends EventDescriptor { private static final long CLASS_ID = 0x281cc80fd18964f0L; private static final class ClassVersion { @@ -71,12 +73,12 @@ public void serialize(final SerializableDataOutputStream out) throws IOException public void deserialize(final SerializableDataInputStream in, final int version) throws IOException {} @Override - public Hash getHash() { - return null; + public @NonNull Hash getHash() { + return mock(Hash.class); } @Override - public NodeId getCreator() { + public @NonNull NodeId getCreator() { return creator; } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/EventDeduper.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/EventDeduper.java index a5a0f5985277..f8cccb7fa7da 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/EventDeduper.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/EventDeduper.java @@ -17,8 +17,8 @@ package com.swirlds.platform.test.chatter.network; import com.swirlds.common.system.NodeId; +import com.swirlds.platform.event.EventDescriptor; import com.swirlds.platform.gossip.chatter.protocol.ChatterCore; -import com.swirlds.platform.gossip.chatter.protocol.messages.EventDescriptor; import com.swirlds.platform.test.chatter.network.framework.AbstractSimulatedEventPipeline; import com.swirlds.platform.test.chatter.network.framework.SimulatedChatterEvent; import java.util.HashSet; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/InOrderOrphanBuffer.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/InOrderOrphanBuffer.java index c1d746d2ec93..8994ddcdde62 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/InOrderOrphanBuffer.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/InOrderOrphanBuffer.java @@ -19,6 +19,7 @@ import com.swirlds.common.system.NodeId; import com.swirlds.platform.gossip.chatter.protocol.ChatterCore; import com.swirlds.platform.test.chatter.network.framework.AbstractSimulatedEventPipeline; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -27,7 +28,6 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; -import org.checkerframework.checker.nullness.qual.NonNull; /** * Mimics an orphan buffer using {@link CountingChatterEvent}. It buffers the events and only allows an event to emitted diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/protocol/heartbeat/PingChecker.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/protocol/heartbeat/PingChecker.java index 4e968e5dbdc5..c821876545b7 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/protocol/heartbeat/PingChecker.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/protocol/heartbeat/PingChecker.java @@ -17,9 +17,9 @@ package com.swirlds.platform.test.chatter.protocol.heartbeat; import com.swirlds.common.system.NodeId; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Duration; import java.util.Objects; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.Assertions; /** diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/protocol/messages/ChatterEventDescriptorTest.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/protocol/messages/EventDescriptorTest.java similarity index 68% rename from platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/protocol/messages/ChatterEventDescriptorTest.java rename to platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/protocol/messages/EventDescriptorTest.java index 7f2882062690..139f1c572f7b 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/protocol/messages/ChatterEventDescriptorTest.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/protocol/messages/EventDescriptorTest.java @@ -27,31 +27,27 @@ import com.swirlds.common.system.NodeId; import com.swirlds.common.test.RandomUtils; import com.swirlds.common.test.io.SerializationUtils; -import com.swirlds.platform.gossip.chatter.protocol.messages.ChatterEventDescriptor; +import com.swirlds.platform.event.EventDescriptor; import java.io.IOException; import org.junit.jupiter.api.Test; -class ChatterEventDescriptorTest { +class EventDescriptorTest { @Test void testSerialization() throws IOException, ConstructableRegistryException { - final ChatterEventDescriptor descriptor = - new ChatterEventDescriptor(RandomUtils.randomHash(), new NodeId(1), 123); + final EventDescriptor descriptor = new EventDescriptor(RandomUtils.randomHash(), new NodeId(1), 123); ConstructableRegistry.getInstance() - .registerConstructable( - new ClassConstructorPair(ChatterEventDescriptor.class, ChatterEventDescriptor::new)); - final ChatterEventDescriptor copy = SerializationUtils.serializeDeserialize(descriptor); + .registerConstructable(new ClassConstructorPair(EventDescriptor.class, EventDescriptor::new)); + final EventDescriptor copy = SerializationUtils.serializeDeserialize(descriptor); assertEquals(descriptor, copy, "deserialized version should be the same"); assertThrows( - Exception.class, - () -> new ChatterEventDescriptor(null, new NodeId(0), 0), - "we should not permit a null hash"); + Exception.class, () -> new EventDescriptor(null, new NodeId(0), 0), "we should not permit a null hash"); } @Test void testEquals() { - final ChatterEventDescriptor d1 = new ChatterEventDescriptor(RandomUtils.randomHash(), new NodeId(1), 123); - final ChatterEventDescriptor d2 = new ChatterEventDescriptor(RandomUtils.randomHash(), new NodeId(2), 234); + final EventDescriptor d1 = new EventDescriptor(RandomUtils.randomHash(), new NodeId(1), 123); + final EventDescriptor d2 = new EventDescriptor(RandomUtils.randomHash(), new NodeId(2), 234); assertTrue(d1.equals(d1), "should be equal to itself"); assertFalse(d1.equals(null), "should not be equal to null"); assertFalse(d1.equals(new Object()), "should not be equal to a different class"); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventCreatorTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventCreatorTests.java index 56d06d4f5281..fee2b4af294c 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventCreatorTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventCreatorTests.java @@ -22,12 +22,12 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.swirlds.common.context.PlatformContext; import com.swirlds.common.crypto.Signature; import com.swirlds.common.crypto.SignatureType; import com.swirlds.common.stream.Signer; @@ -38,20 +38,21 @@ import com.swirlds.common.system.events.PlatformEvent; import com.swirlds.common.system.transaction.Transaction; import com.swirlds.common.system.transaction.internal.SwirldTransaction; +import com.swirlds.config.api.Configuration; import com.swirlds.platform.components.EventCreationRules; import com.swirlds.platform.components.EventHandler; import com.swirlds.platform.components.EventMapper; import com.swirlds.platform.components.transaction.TransactionPool; import com.swirlds.platform.components.transaction.TransactionSupplier; import com.swirlds.platform.consensus.GraphGenerations; -import com.swirlds.platform.event.EventConstants; -import com.swirlds.platform.event.EventUtils; import com.swirlds.platform.event.creation.AncientParentsRule; import com.swirlds.platform.gossip.shadowgraph.Generations; import com.swirlds.platform.internal.EventImpl; import com.swirlds.platform.test.event.EventMocks; import com.swirlds.test.framework.TestComponentTags; import com.swirlds.test.framework.TestTypeTags; +import com.swirlds.test.framework.config.TestConfigBuilder; +import com.swirlds.test.framework.context.TestPlatformContextBuilder; import java.time.Instant; import java.util.HashMap; import java.util.HashSet; @@ -187,96 +188,6 @@ public boolean isAncient(final BaseEvent event) { } } - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @Tag(TestComponentTags.PLATFORM) - @DisplayName("getTimeCreated() Test") - void getTimeCreatedTest() { - final Instant now = Instant.now(); - - assertEquals( - now, EventUtils.getChildTimeCreated(now, null), "time should not be increased for null self parent"); - - final BaseEventHashedData hashedData = mock(BaseEventHashedData.class); - - Mockito.when(hashedData.getTimeCreated()).thenReturn(now.minusNanos(1000)); - - assertEquals( - now, - EventUtils.getChildTimeCreated(now, EventMocks.mockEvent(hashedData)), - "time should not be increased if parent is in the past with no transactions"); - - Mockito.when(hashedData.getTransactions()).thenReturn(new SwirldTransaction[100]); - assertEquals( - now, - EventUtils.getChildTimeCreated(now, EventMocks.mockEvent(hashedData)), - "time should not be increased if parent is in the past with a few transactions"); - - Mockito.when(hashedData.getTransactions()).thenReturn(new SwirldTransaction[2000]); - assertEquals( - now.plusNanos(1000), - EventUtils.getChildTimeCreated(now, EventMocks.mockEvent(hashedData)), - "time should be increased so that 1 nanosecond passes per previous transaction"); - - Mockito.when(hashedData.getTransactions()).thenReturn(new SwirldTransaction[0]); - Mockito.when(hashedData.getTimeCreated()).thenReturn(now); - assertEquals( - now.plusNanos(1), - EventUtils.getChildTimeCreated(now, EventMocks.mockEvent(hashedData)), - "time should be increased so that 1 nanosecond since event with 0 transactions"); - } - - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @Tag(TestComponentTags.PLATFORM) - @DisplayName("getEventGeneration() Test") - void getEventGenerationTest() { - assertEquals(-1, EventUtils.getEventGeneration(null), "generation of a null event should equal -1"); - - final BaseEventHashedData hashedData = mock(BaseEventHashedData.class); - Mockito.when(hashedData.getGeneration()).thenReturn(1234L); - - assertEquals( - hashedData.getGeneration(), - EventUtils.getEventGeneration(EventMocks.mockEvent(hashedData)), - "should return generation of non-null event"); - } - - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @Tag(TestComponentTags.PLATFORM) - @DisplayName("getEventHash() Test") - void getEventHashTest() { - assertNull(EventUtils.getEventHash(null), "hash of null event should be null"); - - final BaseEventHashedData hashedData = mock(BaseEventHashedData.class); - Mockito.when(hashedData.getHash()).thenReturn(randomHash()); - - assertSame( - hashedData.getHash().getValue(), - EventUtils.getEventHash(EventMocks.mockEvent(hashedData)), - "should return the hash of a non-null event"); - } - - @Test - @Tag(TestTypeTags.FUNCTIONAL) - @Tag(TestComponentTags.PLATFORM) - @DisplayName("getOtherParentCreatorId() Test") - void getOtherParentCreatorIdTest() { - assertEquals( - EventConstants.CREATOR_ID_UNDEFINED, - EventUtils.getCreatorId(null), - "null event should have creator ID = CREATOR_ID_UNDEFINED"); - - final BaseEventHashedData hashedData = mock(BaseEventHashedData.class); - Mockito.when(hashedData.getCreatorId()).thenReturn(new NodeId(4321L)); - - assertEquals( - hashedData.getCreatorId(), - EventUtils.getCreatorId(EventMocks.mockEvent(hashedData)), - "should have returned creator id of event"); - } - @Test @Tag(TestTypeTags.FUNCTIONAL) @Tag(TestComponentTags.PLATFORM) @@ -294,7 +205,16 @@ void transactionsArePutIntoEventsTest() { recentEvents.put(0L, parent); final Queue events = new LinkedList<>(); + final Configuration configuration = new TestConfigBuilder() + .withValue("event.creation.useTipsetAlgorithm", "false") + .getOrCreateConfig(); + + final PlatformContext platformContext = TestPlatformContextBuilder.create() + .withConfiguration(configuration) + .build(); + final AccessibleEventCreator eventCreator = new AccessibleEventCreator( + platformContext, selfId, mockMapper(recentEvents, null), noOpSigner, @@ -348,7 +268,16 @@ void verifyEventDataTest() { recentEvents.put(selfId.id(), selfParentImpl); recentEvents.put(1L, otherParent); + final Configuration configuration = new TestConfigBuilder() + .withValue("event.creation.useTipsetAlgorithm", "false") + .getOrCreateConfig(); + + final PlatformContext platformContext = TestPlatformContextBuilder.create() + .withConfiguration(configuration) + .build(); + final AccessibleEventCreator eventCreator = new AccessibleEventCreator( + platformContext, selfId, mockMapper(recentEvents, selfParentImpl), noOpSigner, @@ -410,7 +339,16 @@ void sequenceOfEventsTest() { Mockito.when(mapper.getMostRecentSelfEvent()).thenReturn(selfParent); Mockito.when(mapper.getMostRecentEvent(new NodeId(0L))).thenReturn(otherParent); + final Configuration configuration = new TestConfigBuilder() + .withValue("event.creation.useTipsetAlgorithm", "false") + .getOrCreateConfig(); + + final PlatformContext platformContext = TestPlatformContextBuilder.create() + .withConfiguration(configuration) + .build(); + final AccessibleEventCreator eventCreator = new AccessibleEventCreator( + platformContext, selfId, mapper, noOpSigner, @@ -452,7 +390,16 @@ void testThrottle() { recentEvents.put(0L, parent); final Queue events = new LinkedList<>(); + final Configuration configuration = new TestConfigBuilder() + .withValue("event.creation.useTipsetAlgorithm", "false") + .getOrCreateConfig(); + + final PlatformContext platformContext = TestPlatformContextBuilder.create() + .withConfiguration(configuration) + .build(); + final AccessibleEventCreator eventCreator = new AccessibleEventCreator( + platformContext, selfId, mockMapper(recentEvents, null), noOpSigner, diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskCreatorTest.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskCreatorTest.java index 9f1a68f23685..f22f979a941d 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskCreatorTest.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskCreatorTest.java @@ -45,9 +45,9 @@ import com.swirlds.test.framework.TestComponentTags; import com.swirlds.test.framework.TestTypeTags; import com.swirlds.test.framework.config.TestConfigBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Random; import java.util.concurrent.BlockingQueue; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventUtilsTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventUtilsTests.java new file mode 100644 index 000000000000..1fb8f6f863f2 --- /dev/null +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventUtilsTests.java @@ -0,0 +1,163 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.test.components; + +import static com.swirlds.common.test.RandomUtils.randomHash; +import static com.swirlds.platform.event.EventUtils.calculateNewEventCreationTime; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.mockito.Mockito.mock; + +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.events.BaseEventHashedData; +import com.swirlds.common.system.transaction.internal.SwirldTransaction; +import com.swirlds.platform.event.EventConstants; +import com.swirlds.platform.event.EventUtils; +import com.swirlds.platform.test.event.EventMocks; +import com.swirlds.test.framework.TestComponentTags; +import com.swirlds.test.framework.TestTypeTags; +import java.time.Instant; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +@DisplayName("EventUtils Tests") +class EventUtilsTests { + + @Test + @Tag(TestTypeTags.FUNCTIONAL) + @Tag(TestComponentTags.PLATFORM) + @DisplayName("getTimeCreated() Test") + void getTimeCreatedTest() { + final Instant now = Instant.now(); + + assertEquals( + now, EventUtils.getChildTimeCreated(now, null), "time should not be increased for null self parent"); + + final BaseEventHashedData hashedData = mock(BaseEventHashedData.class); + + Mockito.when(hashedData.getTimeCreated()).thenReturn(now.minusNanos(1000)); + + assertEquals( + now, + EventUtils.getChildTimeCreated(now, EventMocks.mockEvent(hashedData)), + "time should not be increased if parent is in the past with no transactions"); + + Mockito.when(hashedData.getTransactions()).thenReturn(new SwirldTransaction[100]); + assertEquals( + now, + EventUtils.getChildTimeCreated(now, EventMocks.mockEvent(hashedData)), + "time should not be increased if parent is in the past with a few transactions"); + + Mockito.when(hashedData.getTransactions()).thenReturn(new SwirldTransaction[2000]); + assertEquals( + now.plusNanos(1000), + EventUtils.getChildTimeCreated(now, EventMocks.mockEvent(hashedData)), + "time should be increased so that 1 nanosecond passes per previous transaction"); + + Mockito.when(hashedData.getTransactions()).thenReturn(new SwirldTransaction[0]); + Mockito.when(hashedData.getTimeCreated()).thenReturn(now); + assertEquals( + now.plusNanos(1), + EventUtils.getChildTimeCreated(now, EventMocks.mockEvent(hashedData)), + "time should be increased so that 1 nanosecond since event with 0 transactions"); + } + + @Test + @Tag(TestTypeTags.FUNCTIONAL) + @Tag(TestComponentTags.PLATFORM) + @DisplayName("getEventGeneration() Test") + void getEventGenerationTest() { + assertEquals(-1, EventUtils.getEventGeneration(null), "generation of a null event should equal -1"); + + final BaseEventHashedData hashedData = mock(BaseEventHashedData.class); + Mockito.when(hashedData.getGeneration()).thenReturn(1234L); + + assertEquals( + hashedData.getGeneration(), + EventUtils.getEventGeneration(EventMocks.mockEvent(hashedData)), + "should return generation of non-null event"); + } + + @Test + @Tag(TestTypeTags.FUNCTIONAL) + @Tag(TestComponentTags.PLATFORM) + @DisplayName("getEventHash() Test") + void getEventHashTest() { + assertNull(EventUtils.getEventHash(null), "hash of null event should be null"); + + final BaseEventHashedData hashedData = mock(BaseEventHashedData.class); + Mockito.when(hashedData.getHash()).thenReturn(randomHash()); + + assertSame( + hashedData.getHash().getValue(), + EventUtils.getEventHash(EventMocks.mockEvent(hashedData)), + "should return the hash of a non-null event"); + } + + @Test + @Tag(TestTypeTags.FUNCTIONAL) + @Tag(TestComponentTags.PLATFORM) + @DisplayName("getOtherParentCreatorId() Test") + void getOtherParentCreatorIdTest() { + assertEquals( + EventConstants.CREATOR_ID_UNDEFINED, + EventUtils.getCreatorId(null), + "null event should have creator ID = CREATOR_ID_UNDEFINED"); + + final BaseEventHashedData hashedData = mock(BaseEventHashedData.class); + Mockito.when(hashedData.getCreatorId()).thenReturn(new NodeId(4321L)); + + assertEquals( + hashedData.getCreatorId(), + EventUtils.getCreatorId(EventMocks.mockEvent(hashedData)), + "should have returned creator id of event"); + } + + @Test + @DisplayName("calculateNewEventCreationTime Test()") + void calculateNewEventCreationTimeTest() { + final Instant parentTime = Instant.now(); + + // now is after minimum time, no transactions + final Instant now1 = parentTime.plusNanos(10); + final Instant calculatedTime1 = calculateNewEventCreationTime(now1, parentTime, 0); + assertEquals(now1, calculatedTime1); + + // now is after minimum time with transactions + final Instant now2 = parentTime.plusNanos(10); + final Instant calculatedTime2 = calculateNewEventCreationTime(now2, parentTime, 5); + assertEquals(now2, calculatedTime2); + + // now is before minimum time, no transactions + final Instant now3 = parentTime.minusNanos(10); + final Instant calculatedTime3 = calculateNewEventCreationTime(now3, parentTime, 0); + assertEquals(parentTime.plusNanos(1), calculatedTime3); + + // now is before minimum time because of transactions + final Instant now4 = parentTime.plusNanos(10); + final Instant calculatedTime4 = calculateNewEventCreationTime(now4, parentTime, 20); + assertEquals(parentTime.plusNanos(20), calculatedTime4); + + // exact time no transactions + final Instant now5 = parentTime; + final Instant calculatedTime5 = calculateNewEventCreationTime(now5, parentTime, 0); + assertEquals(parentTime.plusNanos(1), calculatedTime5); + } +} diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/ConsensusTestDefinitions.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/ConsensusTestDefinitions.java index 65c6c304ae39..e7de103ca999 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/ConsensusTestDefinitions.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/ConsensusTestDefinitions.java @@ -132,7 +132,8 @@ private static StandardGraphGenerator createAncientEventGenerator( // events again. Event X will not have any children, because the node is shunned after event X is // created. for (int i = 0; i < standardGenerator.getNumberOfSources(); i++) { - final EventSource source = standardGenerator.getSource(i); + final NodeId nodeId = standardGenerator.getAddressBook().getNodeId(i); + final EventSource source = standardGenerator.getSource(nodeId); // This is the last source. Force it to stop creating events (go to sleep) after the event X if (i == standardGenerator.getNumberOfSources() - 1) { @@ -635,7 +636,7 @@ public static void nodeUsesStaleOtherParents( final StandardGraphGenerator generator = new StandardGraphGenerator(0, eventSources); generator - .getSource(staleNodeProvider) + .getSourceByIndex(staleNodeProvider) .setRecentEventRetentionSize(5000) .setRequestedOtherParentAgeDistribution(integerPowerDistribution(0.002, 300)); @@ -673,7 +674,7 @@ public static void nodeProvidesStaleOtherParents( final StandardGraphGenerator generator = new StandardGraphGenerator(0, eventSources); generator - .getSource(staleNodeProvider) + .getSourceByIndex(staleNodeProvider) .setRecentEventRetentionSize(5000) .setProvidedOtherParentAgeDistribution(integerPowerDistribution(0.002, 300)); @@ -981,7 +982,6 @@ public static void areAllEventsReturned( final List nodeWeights = weightGenerator.getWeights(seed, numberOfNodes); final AtomicInteger index = new AtomicInteger(0); final AddressBook ab = new RandomAddressBookGenerator(random) - .setSequentialIds(true) .setSize(numberOfNodes) .setCustomWeightGenerator(id -> nodeWeights.get(index.getAndIncrement())) .setHashStrategy(RandomAddressBookGenerator.HashStrategy.FAKE_HASH) @@ -990,7 +990,7 @@ public static void areAllEventsReturned( // create an empty intake object final TestIntake intake = new TestIntake(ab); - final SimpleEventGenerator gen = new SimpleEventGenerator(numberOfNodes, random); + final SimpleEventGenerator gen = new SimpleEventGenerator(ab, random); final AtomicInteger numReturned = new AtomicInteger(); @@ -1089,7 +1089,6 @@ private static void doStaleEvent(final int numberOfNodes, final WeightGenerator final List nodeWeights = weightGenerator.getWeights(seedToUse, numberOfNodes); final AtomicInteger index = new AtomicInteger(0); final AddressBook ab = new RandomAddressBookGenerator(random) - .setSequentialIds(true) .setSize(numberOfNodes) .setCustomWeightGenerator(id -> nodeWeights.get(index.getAndIncrement())) .setHashStrategy(RandomAddressBookGenerator.HashStrategy.FAKE_HASH) @@ -1098,7 +1097,7 @@ private static void doStaleEvent(final int numberOfNodes, final WeightGenerator // create an empty consensus object final Consensus cons = buildSimpleConsensus(ab); - final SimpleEventGenerator gen = new SimpleEventGenerator(numberOfNodes, random); + final SimpleEventGenerator gen = new SimpleEventGenerator(ab, random); for (int i = 0; i < numEventsBeforeExclude; i++) { cons.addEvent(gen.nextEvent(), ab); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/EventEmitterTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/EventEmitterTests.java index 9e07cedc175b..bc33bc4ce71c 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/EventEmitterTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/EventEmitterTests.java @@ -67,6 +67,8 @@ public void validateReset(final EventEmitter emitter) { System.out.println("Validate Reset"); final int numberOfEvents = 1000; + emitter.reset(); + final List events1 = emitter.emitEvents(numberOfEvents); assertEquals(numberOfEvents, events1.size()); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/GraphGeneratorTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/GraphGeneratorTests.java index dd90ef63d8fc..cc762fbd68c0 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/GraphGeneratorTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/GraphGeneratorTests.java @@ -31,6 +31,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.address.AddressBook; import com.swirlds.platform.test.event.DynamicValue; import com.swirlds.platform.test.event.DynamicValueGenerator; import com.swirlds.platform.test.event.IndexedEvent; @@ -78,6 +79,8 @@ public void validateReset(final GraphGenerator generator) { System.out.println("Validate Reset"); final int numberOfEvents = 1000; + generator.reset(); + final List events1 = generator.generateEvents(numberOfEvents); assertEquals(numberOfEvents, events1.size()); @@ -170,11 +173,11 @@ public void validateCleanCopyOfActiveGenerator(final GraphGenerator generator * any value between 0.24 and 0.26. */ protected void verifyExpectedOtherParentRatio( - final List events, final int nodeId, final double expectedRatio, final double tolerance) { + final List events, final NodeId nodeId, final double expectedRatio, final double tolerance) { int count = 0; for (final IndexedEvent event : events) { - if (Objects.equals(event.getOtherId(), new NodeId(nodeId))) { + if (Objects.equals(event.getOtherId(), nodeId)) { count++; } } @@ -199,11 +202,11 @@ protected void verifyExpectedOtherParentRatio( * any value between 0.24 and 0.26. */ protected void verifyExpectedParentRatio( - final List events, final int nodeId, final double expectedRatio, final double tolerance) { + final List events, final NodeId nodeId, final double expectedRatio, final double tolerance) { int count = 0; for (final IndexedEvent event : events) { - if (event.getCreatorId().id() == nodeId) { + if (Objects.equals(event.getCreatorId(), nodeId)) { count++; } } @@ -221,38 +224,39 @@ public void validateParentDistribution(GraphGenerator generator) { System.out.println("Validate Parent Distribution"); assertEquals(4, generator.getNumberOfSources()); + final AddressBook addressBook = generator.getAddressBook(); // Test even weights generator = generator.cleanCopy(); - generator.getSource(0).setNewEventWeight(1.0); - generator.getSource(1).setNewEventWeight(1.0); - generator.getSource(2).setNewEventWeight(1.0); - generator.getSource(3).setNewEventWeight(1.0); + generator.getSource(addressBook.getNodeId(0)).setNewEventWeight(1.0); + generator.getSource(addressBook.getNodeId(1)).setNewEventWeight(1.0); + generator.getSource(addressBook.getNodeId(2)).setNewEventWeight(1.0); + generator.getSource(addressBook.getNodeId(3)).setNewEventWeight(1.0); List events = generator.generateEvents(1000); - verifyExpectedParentRatio(events, 0, 0.25, 0.05); - verifyExpectedParentRatio(events, 1, 0.25, 0.05); - verifyExpectedParentRatio(events, 2, 0.25, 0.05); - verifyExpectedParentRatio(events, 3, 0.25, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(0), 0.25, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(1), 0.25, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(2), 0.25, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(3), 0.25, 0.05); // Test un-even weights generator.reset(); - generator.getSource(0).setNewEventWeight(0.5); - generator.getSource(1).setNewEventWeight(1.0); - generator.getSource(2).setNewEventWeight(1.0); - generator.getSource(3).setNewEventWeight(2.0); + generator.getSource(addressBook.getNodeId(0)).setNewEventWeight(0.5); + generator.getSource(addressBook.getNodeId(1)).setNewEventWeight(1.0); + generator.getSource(addressBook.getNodeId(2)).setNewEventWeight(1.0); + generator.getSource(addressBook.getNodeId(3)).setNewEventWeight(2.0); events = generator.generateEvents(1000); - verifyExpectedParentRatio(events, 0, 0.5 / 4.5, 0.05); - verifyExpectedParentRatio(events, 1, 1.0 / 4.5, 0.05); - verifyExpectedParentRatio(events, 2, 1.0 / 4.5, 0.05); - verifyExpectedParentRatio(events, 3, 2.0 / 4.5, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(0), 0.5 / 4.5, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(1), 1.0 / 4.5, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(2), 1.0 / 4.5, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(3), 2.0 / 4.5, 0.05); // Test dynamic weights generator.reset(); - generator.getSource(0).setNewEventWeight(1.0); - generator.getSource(1).setNewEventWeight(1.0); - generator.getSource(2).setNewEventWeight(1.0); + generator.getSource(addressBook.getNodeId(0)).setNewEventWeight(1.0); + generator.getSource(addressBook.getNodeId(1)).setNewEventWeight(1.0); + generator.getSource(addressBook.getNodeId(2)).setNewEventWeight(1.0); final DynamicValue dynamicWeight = (Random random, long eventIndex, Double previousValue) -> { if (eventIndex < 1000) { return 0.0; @@ -262,25 +266,25 @@ public void validateParentDistribution(GraphGenerator generator) { return 2.0; } }; - generator.getSource(3).setNewEventWeight(dynamicWeight); + generator.getSource(addressBook.getNodeId(3)).setNewEventWeight(dynamicWeight); events = generator.generateEvents(1000); - verifyExpectedParentRatio(events, 0, 0.33, 0.05); - verifyExpectedParentRatio(events, 1, 0.33, 0.05); - verifyExpectedParentRatio(events, 2, 0.33, 0.05); - verifyExpectedParentRatio(events, 3, 0.0, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(0), 0.33, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(1), 0.33, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(2), 0.33, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(3), 0.0, 0.05); events = generator.generateEvents(1000); - verifyExpectedParentRatio(events, 0, 0.25, 0.05); - verifyExpectedParentRatio(events, 1, 0.25, 0.05); - verifyExpectedParentRatio(events, 2, 0.25, 0.05); - verifyExpectedParentRatio(events, 3, 0.25, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(0), 0.25, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(1), 0.25, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(2), 0.25, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(3), 0.25, 0.05); events = generator.generateEvents(1000); - verifyExpectedParentRatio(events, 0, 0.2, 0.05); - verifyExpectedParentRatio(events, 1, 0.2, 0.05); - verifyExpectedParentRatio(events, 2, 0.2, 0.05); - verifyExpectedParentRatio(events, 3, 0.4, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(0), 0.2, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(1), 0.2, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(2), 0.2, 0.05); + verifyExpectedParentRatio(events, addressBook.getNodeId(3), 0.4, 0.05); } /** @@ -303,6 +307,7 @@ public void validateOtherParentDistribution(GraphGenerator generator) { // Even distribution generator = generator.cleanCopy(); + final AddressBook addressBook = generator.getAddressBook(); StandardGraphGenerator baseGenerator = getBaseGenerator(generator); baseGenerator.setOtherParentAffinity(asList( asList(0.0, 1.0, 1.0, 1.0), @@ -310,10 +315,10 @@ public void validateOtherParentDistribution(GraphGenerator generator) { asList(1.0, 1.0, 0.0, 1.0), asList(1.0, 1.0, 1.0, 0.0))); List events = generator.generateEvents(1000); - verifyExpectedOtherParentRatio(events, 0, 0.25, 0.05); - verifyExpectedOtherParentRatio(events, 1, 0.25, 0.05); - verifyExpectedOtherParentRatio(events, 2, 0.25, 0.05); - verifyExpectedOtherParentRatio(events, 3, 0.25, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(0), 0.25, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(1), 0.25, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(2), 0.25, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(3), 0.25, 0.05); // Node 0 is never used as the other parent generator.reset(); @@ -324,10 +329,10 @@ public void validateOtherParentDistribution(GraphGenerator generator) { asList(0.0, 1.0, 0.0, 1.0), asList(0.0, 1.0, 1.0, 0.0))); events = generator.generateEvents(1000); - verifyExpectedOtherParentRatio(events, 0, 0.0, 0.0); - verifyExpectedOtherParentRatio(events, 1, 0.333, 0.05); - verifyExpectedOtherParentRatio(events, 2, 0.333, 0.05); - verifyExpectedOtherParentRatio(events, 3, 0.333, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(0), 0.0, 0.0); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(1), 0.333, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(2), 0.333, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(3), 0.333, 0.05); // Node 3 is never used as the other parent generator.reset(); @@ -338,10 +343,10 @@ public void validateOtherParentDistribution(GraphGenerator generator) { asList(1.0, 1.0, 0.0, 0.0), asList(1.0, 1.0, 1.0, 0.0))); events = generator.generateEvents(1000); - verifyExpectedOtherParentRatio(events, 0, 0.333, 0.05); - verifyExpectedOtherParentRatio(events, 1, 0.333, 0.05); - verifyExpectedOtherParentRatio(events, 2, 0.333, 0.05); - verifyExpectedOtherParentRatio(events, 3, 0.0, 0.0); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(0), 0.333, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(1), 0.333, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(2), 0.333, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(3), 0.0, 0.0); // Node 0 uses node 1 as the other parent twice as often as it uses either 2 or 3 generator.reset(); @@ -352,10 +357,10 @@ public void validateOtherParentDistribution(GraphGenerator generator) { asList(1.0, 1.0, 0.0, 1.0), asList(1.0, 1.0, 1.0, 0.0))); events = generator.generateEvents(1000); - verifyExpectedOtherParentRatio(events, 0, 0.25, 0.05); - verifyExpectedOtherParentRatio(events, 1, 0.5 * 0.333 + 0.25 * 0.5, 0.05); - verifyExpectedOtherParentRatio(events, 2, 0.5 * 0.333 + 0.25 * 0.25, 0.05); - verifyExpectedOtherParentRatio(events, 3, 0.5 * 0.333 + 0.25 * 0.25, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(0), 0.25, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(1), 0.5 * 0.333 + 0.25 * 0.5, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(2), 0.5 * 0.333 + 0.25 * 0.25, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(3), 0.5 * 0.333 + 0.25 * 0.25, 0.05); // Dynamic other parent affinity generator.reset(); @@ -389,22 +394,22 @@ public void validateOtherParentDistribution(GraphGenerator generator) { baseGenerator.setOtherParentAffinity(affinityGenerator); events = generator.generateEvents(1000); - verifyExpectedOtherParentRatio(events, 0, 0.25, 0.05); - verifyExpectedOtherParentRatio(events, 1, 0.25, 0.05); - verifyExpectedOtherParentRatio(events, 2, 0.25, 0.05); - verifyExpectedOtherParentRatio(events, 3, 0.25, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(0), 0.25, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(1), 0.25, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(2), 0.25, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(3), 0.25, 0.05); events = generator.generateEvents(1000); - verifyExpectedOtherParentRatio(events, 0, 0.0, 0.0); - verifyExpectedOtherParentRatio(events, 1, 0.333, 0.05); - verifyExpectedOtherParentRatio(events, 2, 0.333, 0.05); - verifyExpectedOtherParentRatio(events, 3, 0.333, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(0), 0.0, 0.0); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(1), 0.333, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(2), 0.333, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(3), 0.333, 0.05); events = generator.generateEvents(1000); - verifyExpectedOtherParentRatio(events, 0, 0.333, 0.05); - verifyExpectedOtherParentRatio(events, 1, 0.333, 0.05); - verifyExpectedOtherParentRatio(events, 2, 0.333, 0.05); - verifyExpectedOtherParentRatio(events, 3, 0.0, 0.0); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(0), 0.333, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(1), 0.333, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(2), 0.333, 0.05); + verifyExpectedOtherParentRatio(events, addressBook.getNodeId(3), 0.0, 0.0); } /** @@ -454,7 +459,7 @@ public void validateMaxGeneration(final GraphGenerator generator) { // validate only the last event to keep the validation simple assertEquals( lastEvent.getGeneration(), - generator.getMaxGeneration(lastEvent.getCreatorId().id()), + generator.getMaxGeneration(lastEvent.getCreatorId()), "last event should have the max generation"); generator.reset(); } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/IntakeAndConsensusTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/IntakeAndConsensusTests.java index f8eebdb1a67f..0462f97a4cba 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/IntakeAndConsensusTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/consensus/IntakeAndConsensusTests.java @@ -19,6 +19,7 @@ import static com.swirlds.test.framework.TestQualifierTags.TIME_CONSUMING; import com.swirlds.common.config.ConsensusConfig; +import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.AddressBook; import com.swirlds.config.api.Configuration; import com.swirlds.platform.event.EventConstants; @@ -30,8 +31,11 @@ import com.swirlds.platform.test.event.source.StandardEventSource; import com.swirlds.platform.test.graph.OtherParentMatrixFactory; import com.swirlds.test.framework.config.TestConfigBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.LinkedList; import java.util.List; +import java.util.Objects; import java.util.stream.Stream; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; @@ -200,7 +204,9 @@ public int getNumberOfSources() { } @Override - public EventSource getSource(final int nodeID) { + @Nullable + public EventSource getSource(@NonNull final NodeId nodeID) { + Objects.requireNonNull(nodeID); return generator.getSource(nodeID); } @@ -230,7 +236,8 @@ public AddressBook getAddressBook() { } @Override - public long getMaxGeneration(final long creatorId) { + public long getMaxGeneration(@NonNull final NodeId creatorId) { + Objects.requireNonNull(creatorId); return generator.getMaxGeneration(creatorId); } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/creation/EventCreationSimulationTest.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/creation/EventCreationSimulationTest.java index caeafc3ccede..3e71b06a9359 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/creation/EventCreationSimulationTest.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/creation/EventCreationSimulationTest.java @@ -106,7 +106,6 @@ void simulateEventCreation(final EventCreationSimulationParams params) { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setNodeIds(nodeConfigs.keySet()) .setHashStrategy(RandomAddressBookGenerator.HashStrategy.FAKE_HASH) - .setSequentialIds(false) .build(); final FakeTime time = new FakeTime(); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/linking/OrphanBufferTester.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/linking/OrphanBufferTester.java index fb88b118470f..adea6754ff2d 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/linking/OrphanBufferTester.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/linking/OrphanBufferTester.java @@ -23,11 +23,11 @@ import com.swirlds.common.crypto.Hash; import com.swirlds.platform.consensus.GraphGenerations; +import com.swirlds.platform.event.EventDescriptor; import com.swirlds.platform.event.GossipEvent; import com.swirlds.platform.event.linking.EventLinker; import com.swirlds.platform.event.linking.OrphanBufferingLinker; import com.swirlds.platform.event.linking.ParentFinder; -import com.swirlds.platform.gossip.chatter.protocol.messages.EventDescriptor; import com.swirlds.platform.gossip.shadowgraph.Generations; import com.swirlds.platform.internal.EventImpl; import com.swirlds.platform.state.signed.SignedState; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/AsyncPreconsensusEventWriterTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/AsyncPreconsensusEventWriterTests.java index bcf4f5ed6df0..f61ab7ab916b 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/AsyncPreconsensusEventWriterTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/AsyncPreconsensusEventWriterTests.java @@ -56,6 +56,7 @@ import com.swirlds.platform.test.event.generator.StandardGraphGenerator; import com.swirlds.platform.test.event.source.StandardEventSource; import com.swirlds.test.framework.config.TestConfigBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -69,7 +70,6 @@ import java.util.Random; import java.util.Set; import java.util.stream.Stream; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java index d48e31489f6e..e079f360f821 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java @@ -42,6 +42,7 @@ import com.swirlds.platform.event.preconsensus.PreconsensusEventFile; import com.swirlds.platform.event.preconsensus.PreconsensusEventFileManager; import com.swirlds.test.framework.config.TestConfigBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.FileOutputStream; import java.io.IOException; import java.nio.file.Files; @@ -56,7 +57,6 @@ import java.util.Random; import java.util.Set; import java.util.stream.Stream; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventReplayPipelineTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventReplayPipelineTests.java index dc5c710d57a5..539c2d3fd9fa 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventReplayPipelineTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventReplayPipelineTests.java @@ -43,6 +43,7 @@ import com.swirlds.platform.test.event.generator.StandardGraphGenerator; import com.swirlds.platform.test.event.source.StandardEventSource; import com.swirlds.test.framework.context.TestPlatformContextBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.io.UncheckedIOException; import java.time.Duration; @@ -53,7 +54,6 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/ChildlessEventTrackerTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/ChildlessEventTrackerTests.java new file mode 100644 index 000000000000..81e3ba4c12aa --- /dev/null +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/ChildlessEventTrackerTests.java @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.test.event.tipset; + +import static com.swirlds.common.test.RandomUtils.getRandomPrintSeed; +import static com.swirlds.common.test.RandomUtils.randomHash; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.swirlds.common.system.NodeId; +import com.swirlds.platform.event.EventDescriptor; +import com.swirlds.platform.event.tipset.ChildlessEventTracker; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Random; +import java.util.Set; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +@DisplayName("ChildlessEventTracker Tests") +class ChildlessEventTrackerTests { + + @Test + @DisplayName("Basic Behavior Test") + void basicBehaviorTest() { + final Random random = getRandomPrintSeed(); + + final ChildlessEventTracker tracker = new ChildlessEventTracker(); + + // Adding some event with no parents + final List batch1 = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + final EventDescriptor descriptor = new EventDescriptor(randomHash(random), new NodeId(i), 0); + tracker.addEvent(descriptor, List.of()); + batch1.add(descriptor); + } + + assertEquals(new HashSet<>(batch1), new HashSet<>(tracker.getChildlessEvents())); + + // Increase generation. All new events will either have parents with odd node + // IDs or parents that haven't been registered yet. When this is completed, + // the new events should be tracked, and all registered parents should not be. + + final List batch2 = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + final NodeId nonExistentParentId = new NodeId(i + 100); + final EventDescriptor nonExistentParent = new EventDescriptor(randomHash(random), nonExistentParentId, 0); + final int oddParentId = (i * 2 + 1) % 10; + final EventDescriptor oddParent = batch1.get(oddParentId); + + final EventDescriptor descriptor = new EventDescriptor(randomHash(), new NodeId(i), 1); + tracker.addEvent(descriptor, List.of(nonExistentParent, oddParent)); + batch2.add(descriptor); + } + + final Set expectedEvents = new HashSet<>(batch2); + for (final EventDescriptor descriptor : batch1) { + if (descriptor.getCreator().id() % 2 == 0) { + expectedEvents.add(descriptor); + } + } + + assertEquals(expectedEvents, new HashSet<>(tracker.getChildlessEvents())); + + // Increase the minimum generation non-ancient to 1, all events from batch1 should be removed + tracker.pruneOldEvents(1); + + assertEquals(new HashSet<>(batch2), new HashSet<>(tracker.getChildlessEvents())); + } +} diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetAdvancementWeightTest.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetAdvancementWeightTest.java new file mode 100644 index 000000000000..f390572ff3a4 --- /dev/null +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetAdvancementWeightTest.java @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.test.event.tipset; + +import static com.swirlds.platform.event.tipset.TipsetAdvancementWeight.ZERO_ADVANCEMENT_WEIGHT; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.swirlds.platform.event.tipset.TipsetAdvancementWeight; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +@DisplayName("TipsetAdvancementWeight Tests") +class TipsetAdvancementWeightTest { + + @Test + @DisplayName("plus() Test") + void plusTest() { + assertEquals(ZERO_ADVANCEMENT_WEIGHT, ZERO_ADVANCEMENT_WEIGHT.plus(ZERO_ADVANCEMENT_WEIGHT)); + assertEquals( + TipsetAdvancementWeight.of(1234, 4321), + TipsetAdvancementWeight.of(1234, 4321).plus(ZERO_ADVANCEMENT_WEIGHT)); + assertEquals( + TipsetAdvancementWeight.of(579, 975), + TipsetAdvancementWeight.of(123, 321).plus(TipsetAdvancementWeight.of(456, 654))); + } + + @Test + @DisplayName("minus() Test") + void minusTest() { + assertEquals(ZERO_ADVANCEMENT_WEIGHT, ZERO_ADVANCEMENT_WEIGHT.minus(ZERO_ADVANCEMENT_WEIGHT)); + assertEquals( + TipsetAdvancementWeight.of(1234, 4321), + TipsetAdvancementWeight.of(1234, 4321).minus(ZERO_ADVANCEMENT_WEIGHT)); + assertEquals( + TipsetAdvancementWeight.of(579, 975), + TipsetAdvancementWeight.of(123, 321).minus(TipsetAdvancementWeight.of(-456, -654))); + } + + @Test + @DisplayName("isGreaterThan() Test") + void isGreaterThanTest() { + assertFalse(ZERO_ADVANCEMENT_WEIGHT.isGreaterThan(ZERO_ADVANCEMENT_WEIGHT)); + assertFalse(ZERO_ADVANCEMENT_WEIGHT.isGreaterThan(TipsetAdvancementWeight.of(0, 1))); + assertFalse(ZERO_ADVANCEMENT_WEIGHT.isGreaterThan(TipsetAdvancementWeight.of(1, 0))); + assertFalse(ZERO_ADVANCEMENT_WEIGHT.isGreaterThan(TipsetAdvancementWeight.of(1, 1))); + assertTrue(TipsetAdvancementWeight.of(1, 1).isGreaterThan(ZERO_ADVANCEMENT_WEIGHT)); + assertTrue(TipsetAdvancementWeight.of(1, 0).isGreaterThan(ZERO_ADVANCEMENT_WEIGHT)); + assertTrue(TipsetAdvancementWeight.of(0, 1).isGreaterThan(ZERO_ADVANCEMENT_WEIGHT)); + assertTrue(TipsetAdvancementWeight.of(1, 1).isGreaterThan(TipsetAdvancementWeight.of(0, 1))); + assertTrue(TipsetAdvancementWeight.of(1, 1).isGreaterThan(TipsetAdvancementWeight.of(1, 0))); + assertTrue(TipsetAdvancementWeight.of(1, 1).isGreaterThan(TipsetAdvancementWeight.of(0, 0))); + assertFalse(TipsetAdvancementWeight.of(0, 1).isGreaterThan(TipsetAdvancementWeight.of(1, 1))); + assertFalse(TipsetAdvancementWeight.of(1, 0).isGreaterThan(TipsetAdvancementWeight.of(1, 1))); + assertFalse(TipsetAdvancementWeight.of(0, 0).isGreaterThan(TipsetAdvancementWeight.of(1, 1))); + } + + @Test + @DisplayName("isNonzero() Test") + void isNonzeroTest() { + assertFalse(ZERO_ADVANCEMENT_WEIGHT.isNonZero()); + assertTrue(TipsetAdvancementWeight.of(1, 0).isNonZero()); + assertTrue(TipsetAdvancementWeight.of(0, 1).isNonZero()); + assertTrue(TipsetAdvancementWeight.of(1, 1).isNonZero()); + } +} diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetEventCreationRulesTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetEventCreationRulesTests.java new file mode 100644 index 000000000000..edbb4c682680 --- /dev/null +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetEventCreationRulesTests.java @@ -0,0 +1,294 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.test.event.tipset; + +import static com.swirlds.common.system.EventCreationRuleResponse.PASS; +import static com.swirlds.common.system.status.PlatformStatus.ACTIVE; +import static com.swirlds.common.system.status.PlatformStatus.CHECKING; +import static com.swirlds.common.system.status.PlatformStatus.FREEZING; +import static com.swirlds.common.test.RandomUtils.getRandomPrintSeed; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.swirlds.base.time.Time; +import com.swirlds.common.context.PlatformContext; +import com.swirlds.common.system.EventCreationRuleResponse; +import com.swirlds.common.system.status.PlatformStatus; +import com.swirlds.common.test.fixtures.FakeTime; +import com.swirlds.config.api.Configuration; +import com.swirlds.platform.StartUpEventFrozenManager; +import com.swirlds.platform.event.GossipEvent; +import com.swirlds.platform.event.tipset.TipsetEventCreator; +import com.swirlds.platform.event.tipset.rules.AggregateTipsetEventCreationRules; +import com.swirlds.platform.event.tipset.rules.TipsetEventCreationRule; +import com.swirlds.platform.event.tipset.rules.TipsetMaximumRateRule; +import com.swirlds.platform.event.tipset.rules.TipsetPlatformStatusRule; +import com.swirlds.platform.eventhandling.EventTransactionPool; +import com.swirlds.test.framework.config.TestConfigBuilder; +import com.swirlds.test.framework.context.TestPlatformContextBuilder; +import java.time.Duration; +import java.util.Random; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +@DisplayName("Tipset Event Creation Rules Tests") +class TipsetEventCreationRulesTests { + + @Test + @DisplayName("Empty Aggregate Test") + void emptyAggregateTest() { + final TipsetEventCreationRule rule = AggregateTipsetEventCreationRules.of(); + assertTrue(rule.isEventCreationPermitted()); + + // should not throw + rule.eventWasCreated(); + } + + @Test + @DisplayName("Aggregate Test") + void aggregateTest() { + final TipsetEventCreationRule rule1 = mock(TipsetEventCreationRule.class); + when(rule1.isEventCreationPermitted()).thenAnswer(invocation -> true); + final AtomicInteger rule1Count = new AtomicInteger(0); + doAnswer(invocation -> { + rule1Count.incrementAndGet(); + return null; + }) + .when(rule1) + .eventWasCreated(); + + final TipsetEventCreationRule rule2 = mock(TipsetEventCreationRule.class); + when(rule2.isEventCreationPermitted()).thenAnswer(invocation -> true); + final AtomicInteger rule2Count = new AtomicInteger(0); + doAnswer(invocation -> { + rule2Count.incrementAndGet(); + return null; + }) + .when(rule2) + .eventWasCreated(); + + final TipsetEventCreationRule rule3 = mock(TipsetEventCreationRule.class); + when(rule3.isEventCreationPermitted()).thenAnswer(invocation -> true); + final AtomicInteger rule3Count = new AtomicInteger(0); + doAnswer(invocation -> { + rule3Count.incrementAndGet(); + return null; + }) + .when(rule3) + .eventWasCreated(); + + final TipsetEventCreationRule rule4 = mock(TipsetEventCreationRule.class); + when(rule4.isEventCreationPermitted()).thenAnswer(invocation -> true); + final AtomicInteger rule4Count = new AtomicInteger(0); + doAnswer(invocation -> { + rule4Count.incrementAndGet(); + return null; + }) + .when(rule4) + .eventWasCreated(); + + final TipsetEventCreationRule aggregateRule = AggregateTipsetEventCreationRules.of(rule1, rule2, rule3, rule4); + + assertTrue(aggregateRule.isEventCreationPermitted()); + + when(rule3.isEventCreationPermitted()).thenAnswer(invocation -> false); + assertFalse(aggregateRule.isEventCreationPermitted()); + + when(rule2.isEventCreationPermitted()).thenAnswer(invocation -> false); + assertFalse(aggregateRule.isEventCreationPermitted()); + + when(rule1.isEventCreationPermitted()).thenAnswer(invocation -> false); + assertFalse(aggregateRule.isEventCreationPermitted()); + + when(rule4.isEventCreationPermitted()).thenAnswer(invocation -> false); + assertFalse(aggregateRule.isEventCreationPermitted()); + + aggregateRule.eventWasCreated(); + assertEquals(1, rule1Count.get()); + assertEquals(1, rule2Count.get()); + assertEquals(1, rule3Count.get()); + assertEquals(1, rule4Count.get()); + } + + @Test + @DisplayName("Blocked by StartUpFrozenManager Test") + void blockedByStartUpFrozenManagerTest() { + final EventTransactionPool transactionPool = mock(EventTransactionPool.class); + final Supplier platformStatusSupplier = () -> ACTIVE; + + final AtomicReference shouldCreateEvent = + new AtomicReference<>(EventCreationRuleResponse.DONT_CREATE); + final StartUpEventFrozenManager startUpEventFrozenManager = mock(StartUpEventFrozenManager.class); + when(startUpEventFrozenManager.shouldCreateEvent()).thenAnswer(invocation -> shouldCreateEvent.get()); + + final AtomicInteger eventCreationCount = new AtomicInteger(0); + final TipsetEventCreator baseEventCreator = mock(TipsetEventCreator.class); + when(baseEventCreator.maybeCreateEvent()).thenAnswer(invocation -> { + eventCreationCount.incrementAndGet(); + return null; + }); + + final TipsetPlatformStatusRule rule = + new TipsetPlatformStatusRule(platformStatusSupplier, transactionPool, startUpEventFrozenManager); + + assertFalse(rule.isEventCreationPermitted()); + + shouldCreateEvent.set(PASS); + + assertTrue(rule.isEventCreationPermitted()); + } + + @Test + @DisplayName("Blocked by Freeze Test") + void blockedByFreeze() { + final Supplier platformStatusSupplier = () -> FREEZING; + final StartUpEventFrozenManager startUpEventFrozenManager = mock(StartUpEventFrozenManager.class); + when(startUpEventFrozenManager.shouldCreateEvent()).thenAnswer(invocation -> PASS); + + final AtomicInteger numSignatureTransactions = new AtomicInteger(0); + final EventTransactionPool transactionPool = mock(EventTransactionPool.class); + when(transactionPool.numSignatureTransEvent()).thenAnswer(invocation -> numSignatureTransactions.get()); + + final AtomicInteger eventCreationCount = new AtomicInteger(0); + final TipsetEventCreator baseEventCreator = mock(TipsetEventCreator.class); + when(baseEventCreator.maybeCreateEvent()).thenAnswer(invocation -> { + eventCreationCount.incrementAndGet(); + return null; + }); + + final TipsetEventCreationRule rule = + new TipsetPlatformStatusRule(platformStatusSupplier, transactionPool, startUpEventFrozenManager); + + assertFalse(rule.isEventCreationPermitted()); + numSignatureTransactions.set(1); + assertTrue(rule.isEventCreationPermitted()); + } + + @Test + @DisplayName("Blocked by Status Test") + void blockedByStatus() { + final EventTransactionPool transactionPool = mock(EventTransactionPool.class); + final StartUpEventFrozenManager startUpEventFrozenManager = mock(StartUpEventFrozenManager.class); + when(startUpEventFrozenManager.shouldCreateEvent()).thenAnswer(invocation -> PASS); + + final AtomicReference status = new AtomicReference<>(); + + final AtomicInteger eventCreationCount = new AtomicInteger(0); + final TipsetEventCreator baseEventCreator = mock(TipsetEventCreator.class); + when(baseEventCreator.maybeCreateEvent()).thenAnswer(invocation -> { + eventCreationCount.incrementAndGet(); + return null; + }); + + final TipsetEventCreationRule rule = + new TipsetPlatformStatusRule(status::get, transactionPool, startUpEventFrozenManager); + + for (final PlatformStatus platformStatus : PlatformStatus.values()) { + if (platformStatus == FREEZING) { + // this is checked in another test, don't bother checking + continue; + } + + status.set(platformStatus); + + if (platformStatus == ACTIVE || platformStatus == CHECKING) { + assertTrue(rule.isEventCreationPermitted()); + } else { + assertFalse(rule.isEventCreationPermitted()); + } + } + } + + @Test + @DisplayName("No Rate Limit Test") + void noRateLimitTest() { + final PlatformContext platformContext = + TestPlatformContextBuilder.create().build(); + + final Time time = new FakeTime(); + + final StartUpEventFrozenManager startUpEventFrozenManager = mock(StartUpEventFrozenManager.class); + when(startUpEventFrozenManager.shouldCreateEvent()).thenAnswer(invocation -> PASS); + + final AtomicInteger eventCreationCount = new AtomicInteger(0); + final TipsetEventCreator baseEventCreator = mock(TipsetEventCreator.class); + when(baseEventCreator.maybeCreateEvent()).thenAnswer(invocation -> { + eventCreationCount.incrementAndGet(); + return mock(GossipEvent.class); + }); + + final TipsetEventCreationRule rule = new TipsetMaximumRateRule(platformContext, time); + + // Ask for a bunch of events to be created without advancing the time. + for (int i = 0; i < 100; i++) { + assertTrue(rule.isEventCreationPermitted()); + } + } + + @Test + @DisplayName("Rate Limit Test") + void rateLimitTest() { + final Random random = getRandomPrintSeed(); + + final int maxRate = 100; + final Duration period = Duration.ofSeconds(1).dividedBy(maxRate); + + final Configuration configuration = new TestConfigBuilder() + .withValue("event.creation.maxCreationRate", maxRate) + .getOrCreateConfig(); + + final PlatformContext platformContext = TestPlatformContextBuilder.create() + .withConfiguration(configuration) + .build(); + + final FakeTime time = new FakeTime(); + + final StartUpEventFrozenManager startUpEventFrozenManager = mock(StartUpEventFrozenManager.class); + when(startUpEventFrozenManager.shouldCreateEvent()).thenAnswer(invocation -> PASS); + + final TipsetEventCreationRule rule = new TipsetMaximumRateRule(platformContext, time); + + int millisSinceLastEvent = (int) period.toMillis(); + for (int i = 0; i < 100; i++) { + final boolean tickForwards = random.nextBoolean(); + if (tickForwards) { + final int millisToTick = random.nextInt(5); + time.tick(Duration.ofMillis(millisToTick)); + millisSinceLastEvent += millisToTick; + } + + if (millisSinceLastEvent >= period.toMillis()) { + assertTrue(rule.isEventCreationPermitted()); + + // Sometimes create an event. Sometimes don't. + if (random.nextBoolean()) { + rule.eventWasCreated(); + millisSinceLastEvent = 0; + } + } else { + assertFalse(rule.isEventCreationPermitted()); + } + } + } +} diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetEventCreatorImplTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetEventCreatorImplTests.java new file mode 100644 index 000000000000..f8e6f1496f0e --- /dev/null +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetEventCreatorImplTests.java @@ -0,0 +1,636 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.test.event.tipset; + +import static com.swirlds.common.test.RandomUtils.getRandomPrintSeed; +import static com.swirlds.common.test.RandomUtils.randomSignature; +import static com.swirlds.common.utility.CompareTo.isGreaterThanOrEqualTo; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.swirlds.base.time.Time; +import com.swirlds.common.context.PlatformContext; +import com.swirlds.common.crypto.Hash; +import com.swirlds.common.stream.Signer; +import com.swirlds.common.system.BasicSoftwareVersion; +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.SoftwareVersion; +import com.swirlds.common.system.address.Address; +import com.swirlds.common.system.address.AddressBook; +import com.swirlds.common.system.transaction.internal.ConsensusTransactionImpl; +import com.swirlds.common.system.transaction.internal.SwirldTransaction; +import com.swirlds.common.test.RandomAddressBookGenerator; +import com.swirlds.common.test.fixtures.FakeTime; +import com.swirlds.platform.components.transaction.TransactionSupplier; +import com.swirlds.platform.event.EventDescriptor; +import com.swirlds.platform.event.GossipEvent; +import com.swirlds.platform.event.tipset.ChildlessEventTracker; +import com.swirlds.platform.event.tipset.TipsetEventCreator; +import com.swirlds.platform.event.tipset.TipsetEventCreatorImpl; +import com.swirlds.platform.event.tipset.TipsetTracker; +import com.swirlds.platform.event.tipset.TipsetUtils; +import com.swirlds.platform.event.tipset.TipsetWeightCalculator; +import com.swirlds.platform.internal.EventImpl; +import com.swirlds.test.framework.context.TestPlatformContextBuilder; +import java.time.Duration; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.atomic.AtomicReference; +import org.checkerframework.checker.nullness.qual.NonNull; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +@DisplayName("TipsetEventCreatorImpl Tests") +class TipsetEventCreatorImplTests { + + /** + * @param nodeId the node ID of the simulated node + * @param tipsetTracker tracks tipsets of events + * @param tipsetEventCreator the event creator for the simulated node + * @param tipsetWeightCalculator used to sanity check event creation logic + */ + private record SimulatedNode( + @NonNull NodeId nodeId, + @NonNull TipsetTracker tipsetTracker, + @NonNull TipsetEventCreator tipsetEventCreator, + @NonNull TipsetWeightCalculator tipsetWeightCalculator) {} + + /** + * Build an event creator for a node. + */ + @NonNull + private TipsetEventCreator buildEventCreator( + @NonNull final Random random, + @NonNull final Time time, + @NonNull final AddressBook addressBook, + @NonNull final NodeId nodeId, + @NonNull final TransactionSupplier transactionSupplier) { + + final PlatformContext platformContext = + TestPlatformContextBuilder.create().build(); + + final Signer signer = mock(Signer.class); + when(signer.sign(any())).thenAnswer(invocation -> randomSignature(random)); + + final SoftwareVersion softwareVersion = new BasicSoftwareVersion(1); + + return new TipsetEventCreatorImpl( + platformContext, time, random, signer, addressBook, nodeId, softwareVersion, transactionSupplier); + } + + /** + * Build an event creator for each node in the address book. + */ + @NonNull + private Map buildSimulatedNodes( + @NonNull final Random random, + @NonNull final Time time, + @NonNull final AddressBook addressBook, + @NonNull final TransactionSupplier transactionSupplier) { + + final Map eventCreators = new HashMap<>(); + final PlatformContext platformContext = + TestPlatformContextBuilder.create().build(); + + for (final Address address : addressBook) { + + final TipsetEventCreator eventCreator = + buildEventCreator(random, time, addressBook, address.getNodeId(), transactionSupplier); + + final TipsetTracker tipsetTracker = new TipsetTracker(addressBook); + + final ChildlessEventTracker childlessEventTracker = new ChildlessEventTracker(); + final TipsetWeightCalculator tipsetWeightCalculator = new TipsetWeightCalculator( + platformContext, addressBook, address.getNodeId(), tipsetTracker, childlessEventTracker); + + eventCreators.put( + address.getNodeId(), + new SimulatedNode(address.getNodeId(), tipsetTracker, eventCreator, tipsetWeightCalculator)); + } + + return eventCreators; + } + + private void validateNewEvent( + @NonNull final Map events, + @NonNull final GossipEvent newEvent, + @NonNull final ConsensusTransactionImpl[] expectedTransactions, + @NonNull final SimulatedNode simulatedNode, + final boolean slowNode) { + + final EventImpl selfParent = events.get(newEvent.getHashedData().getSelfParentHash()); + final long selfParentGeneration = selfParent == null ? -1 : selfParent.getGeneration(); + final EventImpl otherParent = events.get(newEvent.getHashedData().getOtherParentHash()); + final long otherParentGeneration = otherParent == null ? -1 : otherParent.getGeneration(); + + if (selfParent == null) { + // The only legal time to have a null self parent is genesis. + for (final EventImpl event : events.values()) { + if (event.getHashedData() + .getHash() + .equals(newEvent.getHashedData().getHash())) { + // comparing to self + continue; + } + assertNotEquals(event.getCreatorId(), newEvent.getHashedData().getCreatorId()); + } + } + + if (otherParent == null) { + if (slowNode) { + // During the slow node test, we intentionally don't distribute an event that ends up in the + // events map. So it's possible for this map to contain two events at this point in time. + assertTrue(events.size() == 1 || events.size() == 2); + } else { + // The only legal time to have no other-parent is at genesis before other events are received. + assertEquals(1, events.size()); + } + assertTrue(events.containsKey(newEvent.getHashedData().getHash())); + } + + // Generation should be max of parents plus one + final long expectedGeneration = Math.max(selfParentGeneration, otherParentGeneration) + 1; + assertEquals(expectedGeneration, newEvent.getHashedData().getGeneration()); + + // Timestamp must always increase by 1 nanosecond, and there must always be a unique timestamp + // with nanosecond precision for transaction. + if (selfParent != null) { + final int minimumIncrement = Math.max(1, selfParent.getHashedData().getTransactions().length); + final Instant minimumTimestamp = + selfParent.getHashedData().getTimeCreated().plus(Duration.ofNanos(minimumIncrement)); + assertTrue(isGreaterThanOrEqualTo(newEvent.getHashedData().getTimeCreated(), minimumTimestamp)); + } + + // Validate tipset constraints. + final EventDescriptor descriptor = newEvent.getDescriptor(); + if (selfParent != null) { + // Except for a genesis event, all other new events must have a positive advancement score. + assertTrue(simulatedNode + .tipsetWeightCalculator + .addEventAndGetAdvancementWeight(descriptor) + .isNonZero()); + } else { + simulatedNode.tipsetWeightCalculator.addEventAndGetAdvancementWeight(descriptor); + } + + // We should see the expected transactions + assertArrayEquals(expectedTransactions, newEvent.getHashedData().getTransactions()); + } + + /** + * Link the event into its parents and distribute to all nodes in the network. + */ + private void linkAndDistributeEvent( + @NonNull final Map eventCreators, + @NonNull final Map events, + @NonNull final GossipEvent event) { + + distributeEvent(eventCreators, linkEvent(eventCreators, events, event)); + } + + /** + * Link an event to its parents. + */ + @NonNull + private EventImpl linkEvent( + @NonNull final Map eventCreators, + @NonNull final Map events, + @NonNull final GossipEvent event) { + + eventCreators + .get(event.getHashedData().getCreatorId()) + .tipsetTracker + .addEvent(event.getDescriptor(), TipsetUtils.getParentDescriptors(event)); + + final EventImpl selfParent = events.get(event.getHashedData().getSelfParentHash()); + final EventImpl otherParent = events.get(event.getHashedData().getOtherParentHash()); + + final EventImpl eventImpl = + new EventImpl(event.getHashedData(), event.getUnhashedData(), selfParent, otherParent); + events.put(event.getHashedData().getHash(), eventImpl); + + return eventImpl; + } + + /** + * Distribute an event to all nodes in the network. + */ + private void distributeEvent( + @NonNull final Map eventCreators, @NonNull final EventImpl eventImpl) { + + for (final SimulatedNode eventCreator : eventCreators.values()) { + eventCreator.tipsetEventCreator.registerEvent(eventImpl); + eventCreator.tipsetTracker.addEvent( + eventImpl.getBaseEvent().getDescriptor(), TipsetUtils.getParentDescriptors(eventImpl)); + } + } + + /** + * Generate a small number of random transactions. + */ + @NonNull + private ConsensusTransactionImpl[] generateRandomTransactions(@NonNull final Random random) { + final int transactionCount = random.nextInt(0, 10); + final ConsensusTransactionImpl[] transactions = new ConsensusTransactionImpl[transactionCount]; + + for (int i = 0; i < transactionCount; i++) { + final byte[] bytes = new byte[32]; + random.nextBytes(bytes); + final ConsensusTransactionImpl transaction = new SwirldTransaction(bytes); + transactions[i] = transaction; + } + + return transactions; + } + + /** + * Nodes take turns creating events in a round-robin fashion. + */ + @ParameterizedTest + @ValueSource(booleans = {false, true}) + @DisplayName("Round Robin Test") + void roundRobinTest(final boolean advancingClock) { + final Random random = getRandomPrintSeed(); + + final int networkSize = 10; + + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(networkSize).build(); + + final FakeTime time = new FakeTime(); + + final AtomicReference transactionSupplier = new AtomicReference<>(); + + final Map nodes = + buildSimulatedNodes(random, time, addressBook, transactionSupplier::get); + + final Map events = new HashMap<>(); + + for (int eventIndex = 0; eventIndex < 100; eventIndex++) { + for (final Address address : addressBook) { + if (advancingClock) { + time.tick(Duration.ofMillis(10)); + } + + transactionSupplier.set(generateRandomTransactions(random)); + + final NodeId nodeId = address.getNodeId(); + final TipsetEventCreator eventCreator = nodes.get(nodeId).tipsetEventCreator; + + final GossipEvent event = eventCreator.maybeCreateEvent(); + + // In this test, it should be impossible for a node to be unable to create an event. + assertNotNull(event); + + linkAndDistributeEvent(nodes, events, event); + + if (advancingClock) { + assertEquals(event.getHashedData().getTimeCreated(), time.now()); + } + + validateNewEvent(events, event, transactionSupplier.get(), nodes.get(nodeId), false); + } + } + } + + /** + * Each cycle, randomize the order in which nodes are asked to create events. + */ + @ParameterizedTest + @ValueSource(booleans = {false, true}) + @DisplayName("Random Order Test") + void randomOrderTest(final boolean advancingClock) { + final Random random = getRandomPrintSeed(); + + final int networkSize = 10; + + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(networkSize).build(); + + final FakeTime time = new FakeTime(); + + final AtomicReference transactionSupplier = new AtomicReference<>(); + + final Map nodes = + buildSimulatedNodes(random, time, addressBook, transactionSupplier::get); + + final Map events = new HashMap<>(); + + for (int eventIndex = 0; eventIndex < 100; eventIndex++) { + + final List
addresses = new ArrayList<>(); + addressBook.iterator().forEachRemaining(addresses::add); + Collections.shuffle(addresses, random); + + boolean atLeastOneEventCreated = false; + + for (final Address address : addresses) { + if (advancingClock) { + time.tick(Duration.ofMillis(10)); + } + + transactionSupplier.set(generateRandomTransactions(random)); + + final NodeId nodeId = address.getNodeId(); + final TipsetEventCreator eventCreator = nodes.get(nodeId).tipsetEventCreator; + + final GossipEvent event = eventCreator.maybeCreateEvent(); + + // It's possible a node may not be able to create an event. But we are guaranteed + // to be able to create at least one event per cycle. + if (event == null) { + continue; + } + atLeastOneEventCreated = true; + + linkAndDistributeEvent(nodes, events, event); + + if (advancingClock) { + assertEquals(event.getHashedData().getTimeCreated(), time.now()); + } + validateNewEvent(events, event, transactionSupplier.get(), nodes.get(nodeId), false); + } + + assertTrue(atLeastOneEventCreated); + } + } + + /** + * Each node creates many events in a row without allowing others to take a turn. Eventually, a node should be + * unable to create another event without first receiving an event from another node. + */ + @ParameterizedTest + @ValueSource(booleans = {false, true}) + @DisplayName("Create Many Events In A Row Test") + void createManyEventsInARowTest(final boolean advancingClock) { + final Random random = getRandomPrintSeed(); + + final int networkSize = 10; + + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(networkSize).build(); + + final FakeTime time = new FakeTime(); + + final AtomicReference transactionSupplier = new AtomicReference<>(); + + final Map nodes = + buildSimulatedNodes(random, time, addressBook, transactionSupplier::get); + + final Map events = new HashMap<>(); + + for (int eventIndex = 0; eventIndex < 100; eventIndex++) { + for (final Address address : addressBook) { + + int count = 0; + while (true) { + if (advancingClock) { + time.tick(Duration.ofMillis(10)); + } + + transactionSupplier.set(generateRandomTransactions(random)); + + final NodeId nodeId = address.getNodeId(); + final TipsetEventCreator eventCreator = nodes.get(nodeId).tipsetEventCreator; + + final GossipEvent event = eventCreator.maybeCreateEvent(); + + if (count == 0) { + // The first time we attempt to create an event we should be able to do so. + assertNotNull(event); + } else if (event == null) { + // we can't create any more events + break; + } + + linkAndDistributeEvent(nodes, events, event); + + if (advancingClock) { + assertEquals(event.getHashedData().getTimeCreated(), time.now()); + } + validateNewEvent(events, event, transactionSupplier.get(), nodes.get(nodeId), false); + + // At best, we can create a genesis event and one event per node in the network. + // We are unlikely to create this many, but we definitely shouldn't be able to go beyond this. + assertTrue(count < networkSize); + count++; + } + } + } + } + + /** + * The tipset algorithm must still build on top of zero weight nodes, even though they don't help consensus to + * advance. + */ + @ParameterizedTest + @ValueSource(booleans = {false, true}) + @DisplayName("Zero Weight Node Test") + void zeroWeightNodeTest(final boolean advancingClock) { + final Random random = getRandomPrintSeed(); + + final int networkSize = 10; + + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(networkSize).build(); + + final NodeId zeroWeightNode = addressBook.getNodeId(0); + + for (final Address address : addressBook) { + if (address.getNodeId().equals(zeroWeightNode)) { + addressBook.add(address.copySetWeight(0)); + } else { + addressBook.add(address.copySetWeight(1)); + } + } + + final FakeTime time = new FakeTime(); + + final AtomicReference transactionSupplier = new AtomicReference<>(); + + final Map nodes = + buildSimulatedNodes(random, time, addressBook, transactionSupplier::get); + + final Map events = new HashMap<>(); + + int zeroWeightNodeOtherParentCount = 0; + + for (int eventIndex = 0; eventIndex < 100; eventIndex++) { + + final List
addresses = new ArrayList<>(); + addressBook.iterator().forEachRemaining(addresses::add); + Collections.shuffle(addresses, random); + + boolean atLeastOneEventCreated = false; + + for (final Address address : addresses) { + if (advancingClock) { + time.tick(Duration.ofMillis(10)); + } + + transactionSupplier.set(generateRandomTransactions(random)); + + final NodeId nodeId = address.getNodeId(); + final TipsetEventCreator eventCreator = nodes.get(nodeId).tipsetEventCreator; + + final GossipEvent event = eventCreator.maybeCreateEvent(); + + // It's possible a node may not be able to create an event. But we are guaranteed + // to be able to create at least one event per cycle. + if (event == null) { + continue; + } + atLeastOneEventCreated = true; + + final NodeId otherId = event.getUnhashedData().getOtherId(); + if (otherId != null && otherId.equals(zeroWeightNode)) { + zeroWeightNodeOtherParentCount++; + } + + linkAndDistributeEvent(nodes, events, event); + + if (advancingClock) { + assertEquals(event.getHashedData().getTimeCreated(), time.now()); + } + validateNewEvent(events, event, transactionSupplier.get(), nodes.get(nodeId), false); + } + + assertTrue(atLeastOneEventCreated); + } + + // This is just a heuristic. When running this, I typically see numbers around 100. + // Essentially, we need to make sure that we are choosing the zero weight node's events + // as other parents. Precisely how often is less important to this test, as long as we are + // doing it at least some of the time. + assertTrue(zeroWeightNodeOtherParentCount > 20); + } + + /** + * The tipset algorithm must still build on top of zero weight nodes, even though they don't help consensus to + * advance. Further disadvantage the zero weight node by delaying the propagation of its events, so that others find + * that they do not get transitive tipset score improvements by using it. + */ + @ParameterizedTest + @ValueSource(booleans = {false, true}) + @DisplayName("Zero Weight Slow Node Test") + void zeroWeightSlowNodeTest(final boolean advancingClock) { + final Random random = getRandomPrintSeed(); + + final int networkSize = 10; + + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(networkSize).build(); + + final NodeId zeroWeightNode = addressBook.getNodeId(0); + + for (final Address address : addressBook) { + if (address.getNodeId().equals(zeroWeightNode)) { + addressBook.add(address.copySetWeight(0)); + } else { + addressBook.add(address.copySetWeight(1)); + } + } + + final FakeTime time = new FakeTime(); + + final AtomicReference transactionSupplier = new AtomicReference<>(); + + final Map nodes = + buildSimulatedNodes(random, time, addressBook, transactionSupplier::get); + + final Map events = new HashMap<>(); + final List slowNodeEvents = new ArrayList<>(); + int zeroWeightNodeOtherParentCount = 0; + + for (int eventIndex = 0; eventIndex < 100; eventIndex++) { + + final List
addresses = new ArrayList<>(); + addressBook.iterator().forEachRemaining(addresses::add); + Collections.shuffle(addresses, random); + + boolean atLeastOneEventCreated = false; + + for (final Address address : addresses) { + if (advancingClock) { + time.tick(Duration.ofMillis(10)); + } + + transactionSupplier.set(generateRandomTransactions(random)); + + final NodeId nodeId = address.getNodeId(); + final TipsetEventCreator eventCreator = nodes.get(nodeId).tipsetEventCreator; + + final GossipEvent event = eventCreator.maybeCreateEvent(); + + // It's possible a node may not be able to create an event. But we are guaranteed + // to be able to create at least one event per cycle. + if (event == null) { + continue; + } + atLeastOneEventCreated = true; + + final NodeId otherId = event.getUnhashedData().getOtherId(); + if (otherId != null && otherId.equals(zeroWeightNode)) { + zeroWeightNodeOtherParentCount++; + } + + if (nodeId.equals(zeroWeightNode)) { + if (random.nextDouble() < 0.1 || slowNodeEvents.size() > 10) { + // Once in a while, take all the slow events and distribute them. + for (final EventImpl slowEvent : slowNodeEvents) { + distributeEvent(nodes, slowEvent); + } + slowNodeEvents.clear(); + linkAndDistributeEvent(nodes, events, event); + } else { + // Most of the time, we don't immediately distribute the slow events. + final EventImpl eventImpl = linkEvent(nodes, events, event); + slowNodeEvents.add(eventImpl); + } + } else { + // immediately distribute all events not created by the zero stake node + linkAndDistributeEvent(nodes, events, event); + } + + if (advancingClock) { + assertEquals(event.getHashedData().getTimeCreated(), time.now()); + } + validateNewEvent(events, event, transactionSupplier.get(), nodes.get(nodeId), true); + } + + assertTrue(atLeastOneEventCreated); + } + + // This is just a heuristic. When running this, I typically see numbers around 10. + // Essentially, we need to make sure that we are choosing the zero weight node's events + // as other parents. Precisely how often is less important to this test, as long as we are + // doing it at least some of the time. + assertTrue(zeroWeightNodeOtherParentCount > 1); + } +} diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetTests.java new file mode 100644 index 000000000000..664810a9c0b0 --- /dev/null +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetTests.java @@ -0,0 +1,216 @@ +/* + * Copyright (C) 2016-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.test.event.tipset; + +import static com.swirlds.common.test.RandomUtils.getRandomPrintSeed; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.address.Address; +import com.swirlds.common.system.address.AddressBook; +import com.swirlds.common.test.RandomAddressBookGenerator; +import com.swirlds.common.test.RandomAddressBookGenerator.WeightDistributionStrategy; +import com.swirlds.platform.event.tipset.Tipset; +import com.swirlds.platform.event.tipset.TipsetAdvancementWeight; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +@DisplayName("Tipset Tests") +class TipsetTests { + + private static void validateTipset(final Tipset tipset, final Map expectedTipGenerations) { + for (final NodeId nodeId : expectedTipGenerations.keySet()) { + assertEquals(expectedTipGenerations.get(nodeId), tipset.getTipGenerationForNode(nodeId)); + } + } + + @Test + @DisplayName("Advancement Test") + void advancementTest() { + final Random random = getRandomPrintSeed(); + + final int nodeCount = 100; + + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(nodeCount).build(); + + final Tipset tipset = new Tipset(addressBook); + assertEquals(nodeCount, tipset.size()); + + final Map expected = new HashMap<>(); + + for (int iteration = 0; iteration < 10; iteration++) { + for (int creator = 0; creator < 100; creator++) { + final NodeId creatorId = addressBook.getNodeId(creator); + final long generation = random.nextLong(1, 100); + + tipset.advance(creatorId, generation); + expected.put(creatorId, Math.max(generation, expected.getOrDefault(creatorId, 0L))); + validateTipset(tipset, expected); + } + } + } + + @Test + @DisplayName("Merge Test") + void mergeTest() { + final Random random = getRandomPrintSeed(); + + final int nodeCount = 100; + + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(nodeCount).build(); + + for (int count = 0; count < 10; count++) { + final List tipsets = new ArrayList<>(); + final Map expected = new HashMap<>(); + + for (int tipsetIndex = 0; tipsetIndex < 10; tipsetIndex++) { + final Tipset tipset = new Tipset(addressBook); + for (int creator = 0; creator < nodeCount; creator++) { + final NodeId creatorId = addressBook.getNodeId(creator); + final long generation = random.nextLong(1, 100); + tipset.advance(creatorId, generation); + expected.put(creatorId, Math.max(generation, expected.getOrDefault(creatorId, 0L))); + } + tipsets.add(tipset); + } + + final Tipset merged = Tipset.merge(tipsets); + validateTipset(merged, expected); + } + } + + @Test + @DisplayName("getAdvancementCount() Test") + void getAdvancementCountTest() { + final Random random = getRandomPrintSeed(); + + final int nodeCount = 100; + + final AddressBook addressBook = new RandomAddressBookGenerator(random) + .setSize(nodeCount) + .setAverageWeight(1) + .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) + .build(); + + final NodeId selfId = addressBook.getNodeId(random.nextInt(nodeCount)); + + final Tipset initialTipset = new Tipset(addressBook); + for (long creator = 0; creator < nodeCount; creator++) { + final NodeId creatorId = addressBook.getNodeId((int) creator); + final long generation = random.nextLong(1, 100); + initialTipset.advance(creatorId, generation); + } + + // Merging the tipset with itself will result in a copy + final Tipset comparisonTipset = Tipset.merge(List.of(initialTipset)); + assertEquals(initialTipset.size(), comparisonTipset.size()); + for (int creator = 0; creator < 100; creator++) { + final NodeId creatorId = addressBook.getNodeId(creator); + assertEquals( + initialTipset.getTipGenerationForNode(creatorId), + comparisonTipset.getTipGenerationForNode(creatorId)); + } + + // Cause the comparison tipset to advance in a random way + for (int entryIndex = 0; entryIndex < 100; entryIndex++) { + final long creator = random.nextLong(100); + final NodeId creatorId = addressBook.getNodeId((int) creator); + final long generation = random.nextLong(1, 100); + + comparisonTipset.advance(creatorId, generation); + } + + long expectedAdvancementCount = 0; + for (int i = 0; i < 100; i++) { + final NodeId nodeId = addressBook.getNodeId(i); + if (nodeId.equals(selfId)) { + // Self advancements are not counted + continue; + } + if (initialTipset.getTipGenerationForNode(nodeId) < comparisonTipset.getTipGenerationForNode(nodeId)) { + expectedAdvancementCount++; + } + } + assertEquals( + TipsetAdvancementWeight.of(expectedAdvancementCount, 0), + initialTipset.getTipAdvancementWeight(selfId, comparisonTipset)); + } + + @Test + @DisplayName("Weighted getAdvancementCount() Test") + void weightedGetAdvancementCountTest() { + final Random random = getRandomPrintSeed(); + final int nodeCount = 100; + + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(nodeCount).build(); + + final Map weights = new HashMap<>(); + for (final Address address : addressBook) { + weights.put(address.getNodeId(), address.getWeight()); + } + + final NodeId selfId = addressBook.getNodeId(random.nextInt(nodeCount)); + + final Tipset initialTipset = new Tipset(addressBook); + for (long creator = 0; creator < 100; creator++) { + final NodeId creatorId = addressBook.getNodeId((int) creator); + final long generation = random.nextLong(1, 100); + initialTipset.advance(creatorId, generation); + } + + // Merging the tipset with itself will result in a copy + final Tipset comparisonTipset = Tipset.merge(List.of(initialTipset)); + assertEquals(initialTipset.size(), comparisonTipset.size()); + for (int creator = 0; creator < 100; creator++) { + final NodeId creatorId = addressBook.getNodeId(creator); + assertEquals( + initialTipset.getTipGenerationForNode(creatorId), + comparisonTipset.getTipGenerationForNode(creatorId)); + } + + // Cause the comparison tipset to advance in a random way + for (final Address address : addressBook) { + final long generation = random.nextLong(1, 100); + + comparisonTipset.advance(address.getNodeId(), generation); + } + + long expectedAdvancementCount = 0; + for (final Address address : addressBook) { + final NodeId nodeId = address.getNodeId(); + if (nodeId.equals(selfId)) { + // Self advancements are not counted + continue; + } + if (initialTipset.getTipGenerationForNode(nodeId) < comparisonTipset.getTipGenerationForNode(nodeId)) { + expectedAdvancementCount += weights.get(nodeId); + } + } + + assertEquals( + TipsetAdvancementWeight.of(expectedAdvancementCount, 0), + initialTipset.getTipAdvancementWeight(selfId, comparisonTipset)); + } +} diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetTrackerTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetTrackerTests.java new file mode 100644 index 000000000000..8ebe2e9d6835 --- /dev/null +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetTrackerTests.java @@ -0,0 +1,151 @@ +/* + * Copyright (C) 2016-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.test.event.tipset; + +import static com.swirlds.common.test.RandomUtils.getRandomPrintSeed; +import static com.swirlds.common.test.RandomUtils.randomHash; +import static com.swirlds.platform.event.tipset.Tipset.merge; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; + +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.address.Address; +import com.swirlds.common.system.address.AddressBook; +import com.swirlds.common.test.RandomAddressBookGenerator; +import com.swirlds.platform.event.EventDescriptor; +import com.swirlds.platform.event.tipset.Tipset; +import com.swirlds.platform.event.tipset.TipsetTracker; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import org.checkerframework.checker.nullness.qual.NonNull; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +@DisplayName("TipsetTracker Tests") +class TipsetTrackerTests { + + private static void assertTipsetEquality( + @NonNull final AddressBook addressBook, @NonNull final Tipset expected, @NonNull final Tipset actual) { + assertEquals(expected.size(), actual.size()); + + for (final Address address : addressBook) { + assertEquals( + expected.getTipGenerationForNode(address.getNodeId()), + actual.getTipGenerationForNode(address.getNodeId())); + } + } + + @Test + @DisplayName("Basic Behavior Test") + void basicBehaviorTest() { + final Random random = getRandomPrintSeed(0); + + final int nodeCount = random.nextInt(10, 20); + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(nodeCount).build(); + + final Map latestEvents = new HashMap<>(); + final Map expectedTipsets = new HashMap<>(); + + final TipsetTracker tracker = new TipsetTracker(addressBook); + + for (int eventIndex = 0; eventIndex < 1000; eventIndex++) { + + final NodeId creator = addressBook.getNodeId(random.nextInt(nodeCount)); + final long generation; + if (latestEvents.containsKey(creator)) { + generation = latestEvents.get(creator).getGeneration() + 1; + } else { + generation = 1; + } + + final EventDescriptor selfParent = latestEvents.get(creator); + final EventDescriptor fingerprint = new EventDescriptor(randomHash(random), creator, generation); + latestEvents.put(creator, fingerprint); + + // Select some nodes we'd like to be our parents. + final Set desiredParents = new HashSet<>(); + final int maxParentCount = random.nextInt(nodeCount); + for (int parentIndex = 0; parentIndex < maxParentCount; parentIndex++) { + final NodeId parent = addressBook.getNodeId(random.nextInt(nodeCount)); + + // We are only trying to generate a random number of parents, the exact count is unimportant. + // So it doesn't matter if the actual number of parents is less than the number we requested. + if (parent.equals(creator)) { + continue; + } + desiredParents.add(parent); + } + + // Select the actual parents. + final List parentFingerprints = new ArrayList<>(desiredParents.size()); + if (selfParent != null) { + parentFingerprints.add(selfParent); + } + for (final NodeId parent : desiredParents) { + final EventDescriptor parentFingerprint = latestEvents.get(parent); + if (parentFingerprint != null) { + parentFingerprints.add(parentFingerprint); + } + } + + final Tipset newTipset = tracker.addEvent(fingerprint, parentFingerprints); + assertSame(newTipset, tracker.getTipset(fingerprint)); + + // Now, reconstruct the tipset manually, and make sure it matches what we were expecting. + final List parentTipsets = new ArrayList<>(parentFingerprints.size()); + for (final EventDescriptor parentFingerprint : parentFingerprints) { + parentTipsets.add(expectedTipsets.get(parentFingerprint)); + } + + final Tipset expectedTipset; + if (parentTipsets.isEmpty()) { + expectedTipset = new Tipset(addressBook).advance(creator, generation); + } else { + expectedTipset = merge(parentTipsets).advance(creator, generation); + } + + expectedTipsets.put(fingerprint, expectedTipset); + assertTipsetEquality(addressBook, expectedTipset, newTipset); + } + + // At the very end, we shouldn't see any modified tipsets + for (final EventDescriptor fingerprint : expectedTipsets.keySet()) { + assertTipsetEquality(addressBook, expectedTipsets.get(fingerprint), tracker.getTipset(fingerprint)); + } + + // Slowly advance the minimum generation, we should see tipsets disappear as we go. + long minimumGenerationNonAncient = 0; + while (tracker.size() > 0) { + minimumGenerationNonAncient += random.nextInt(1, 5); + tracker.setMinimumGenerationNonAncient(minimumGenerationNonAncient); + for (final EventDescriptor fingerprint : expectedTipsets.keySet()) { + if (fingerprint.getGeneration() < minimumGenerationNonAncient) { + assertNull(tracker.getTipset(fingerprint)); + } else { + assertTipsetEquality(addressBook, expectedTipsets.get(fingerprint), tracker.getTipset(fingerprint)); + } + } + } + } +} diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetWeightCalculatorTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetWeightCalculatorTests.java new file mode 100644 index 000000000000..4c52e59db977 --- /dev/null +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/tipset/TipsetWeightCalculatorTests.java @@ -0,0 +1,473 @@ +/* + * Copyright (C) 2016-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.test.event.tipset; + +import static com.swirlds.common.test.RandomUtils.getRandomPrintSeed; +import static com.swirlds.common.test.RandomUtils.randomHash; +import static com.swirlds.platform.Utilities.isSuperMajority; +import static com.swirlds.platform.event.tipset.Tipset.merge; +import static com.swirlds.platform.event.tipset.TipsetAdvancementWeight.ZERO_ADVANCEMENT_WEIGHT; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertSame; + +import com.swirlds.common.context.PlatformContext; +import com.swirlds.common.system.NodeId; +import com.swirlds.common.system.address.Address; +import com.swirlds.common.system.address.AddressBook; +import com.swirlds.common.test.RandomAddressBookGenerator; +import com.swirlds.common.test.RandomAddressBookGenerator.WeightDistributionStrategy; +import com.swirlds.platform.event.EventDescriptor; +import com.swirlds.platform.event.tipset.ChildlessEventTracker; +import com.swirlds.platform.event.tipset.Tipset; +import com.swirlds.platform.event.tipset.TipsetAdvancementWeight; +import com.swirlds.platform.event.tipset.TipsetTracker; +import com.swirlds.platform.event.tipset.TipsetWeightCalculator; +import com.swirlds.test.framework.context.TestPlatformContextBuilder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +@DisplayName("TipsetWeightCalculator Tests") +class TipsetWeightCalculatorTests { + + @Test + @DisplayName("Basic Behavior Test") + void basicBehaviorTest() { + final Random random = getRandomPrintSeed(); + final int nodeCount = 5; + + final Map latestEvents = new HashMap<>(); + + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(nodeCount).build(); + + final Map weightMap = new HashMap<>(); + long totalWeight = 0; + for (final Address address : addressBook) { + weightMap.put(address.getNodeId(), address.getWeight()); + totalWeight += address.getWeight(); + } + + final NodeId selfId = addressBook.getNodeId(random.nextInt(nodeCount)); + + final PlatformContext platformContext = + TestPlatformContextBuilder.create().build(); + + final TipsetTracker builder = new TipsetTracker(addressBook); + final ChildlessEventTracker childlessEventTracker = new ChildlessEventTracker(); + final TipsetWeightCalculator calculator = + new TipsetWeightCalculator(platformContext, addressBook, selfId, builder, childlessEventTracker); + + List previousParents = List.of(); + TipsetAdvancementWeight runningAdvancementScore = ZERO_ADVANCEMENT_WEIGHT; + Tipset previousSnapshot = calculator.getSnapshot(); + + for (int eventIndex = 0; eventIndex < 1000; eventIndex++) { + final NodeId creator = addressBook.getNodeId(random.nextInt(nodeCount)); + final long generation; + if (latestEvents.containsKey(creator)) { + generation = latestEvents.get(creator).getGeneration() + 1; + } else { + generation = 1; + } + + final EventDescriptor selfParent = latestEvents.get(creator); + final EventDescriptor fingerprint = new EventDescriptor(randomHash(random), creator, generation); + latestEvents.put(creator, fingerprint); + + // Select some nodes we'd like to be our parents. + final Set desiredParents = new HashSet<>(); + final int maxParentCount = random.nextInt(nodeCount); + for (int parentIndex = 0; parentIndex < maxParentCount; parentIndex++) { + final NodeId parent = addressBook.getNodeId(random.nextInt(nodeCount)); + + // We are only trying to generate a random number of parents, the exact count is unimportant. + // So it doesn't matter if the actual number of parents is less than the number we requested. + if (parent.equals(creator)) { + continue; + } + desiredParents.add(parent); + } + + // Select the actual parents. + final List parentFingerprints = new ArrayList<>(desiredParents.size()); + if (selfParent != null) { + parentFingerprints.add(selfParent); + } + for (final NodeId parent : desiredParents) { + final EventDescriptor parentFingerprint = latestEvents.get(parent); + if (parentFingerprint != null) { + parentFingerprints.add(parentFingerprint); + } + } + + builder.addEvent(fingerprint, parentFingerprints); + + if (creator != selfId) { + // The following validation only needs to happen for events created by self + + // Only do previous parent validation if we create two or more events in a row. + previousParents = List.of(); + + continue; + } + + // Manually calculate the advancement score. + final List parentTipsets = new ArrayList<>(parentFingerprints.size()); + for (final EventDescriptor parentFingerprint : parentFingerprints) { + parentTipsets.add(builder.getTipset(parentFingerprint)); + } + + final Tipset newTipset; + if (parentTipsets.isEmpty()) { + newTipset = new Tipset(addressBook).advance(creator, generation); + } else { + newTipset = merge(parentTipsets).advance(creator, generation); + } + + final TipsetAdvancementWeight expectedAdvancementScoreChange = + previousSnapshot.getTipAdvancementWeight(selfId, newTipset).minus(runningAdvancementScore); + + // For events created by "this" node, check that the calculator is updated correctly. + final TipsetAdvancementWeight advancementScoreChange = + calculator.addEventAndGetAdvancementWeight(fingerprint); + + assertEquals(expectedAdvancementScoreChange, advancementScoreChange); + + // Special case: if we create more than one event in a row and our current parents are a + // subset of the previous parents, then we should expect an advancement score of zero. + boolean subsetOfPreviousParents = true; + for (final EventDescriptor parentFingerprint : parentFingerprints) { + if (!previousParents.contains(parentFingerprint)) { + subsetOfPreviousParents = false; + break; + } + } + if (subsetOfPreviousParents) { + assertEquals(ZERO_ADVANCEMENT_WEIGHT, advancementScoreChange); + } + previousParents = parentFingerprints; + + // Validate that the snapshot advances correctly. + runningAdvancementScore = runningAdvancementScore.plus(advancementScoreChange); + if (isSuperMajority(runningAdvancementScore.advancementWeight() + weightMap.get(selfId), totalWeight)) { + // The snapshot should have been updated. + assertNotSame(previousSnapshot, calculator.getSnapshot()); + previousSnapshot = calculator.getSnapshot(); + runningAdvancementScore = ZERO_ADVANCEMENT_WEIGHT; + } else { + // The snapshot should have not been updated. + assertSame(previousSnapshot, calculator.getSnapshot()); + } + } + } + + @Test + @DisplayName("Bully Test") + void bullyTest() { + final Random random = getRandomPrintSeed(); + final int nodeCount = 4; + + final AddressBook addressBook = new RandomAddressBookGenerator(random) + .setSize(nodeCount) + .setAverageWeight(1) + .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) + .build(); + + // In this test, we simulate from the perspective of node A. All nodes have 1 weight. + final NodeId nodeA = addressBook.getNodeId(0); + final NodeId nodeB = addressBook.getNodeId(1); + final NodeId nodeC = addressBook.getNodeId(2); + final NodeId nodeD = addressBook.getNodeId(3); + + final PlatformContext platformContext = + TestPlatformContextBuilder.create().build(); + + final TipsetTracker tracker = new TipsetTracker(addressBook); + final ChildlessEventTracker childlessEventTracker = new ChildlessEventTracker(); + final TipsetWeightCalculator calculator = + new TipsetWeightCalculator(platformContext, addressBook, nodeA, tracker, childlessEventTracker); + + final Tipset snapshot1 = calculator.getSnapshot(); + + // Each node creates an event. + final EventDescriptor eventA1 = new EventDescriptor(randomHash(random), nodeA, 1); + tracker.addEvent(eventA1, List.of()); + childlessEventTracker.addEvent(eventA1, List.of()); + final EventDescriptor eventB1 = new EventDescriptor(randomHash(random), nodeB, 1); + tracker.addEvent(eventB1, List.of()); + childlessEventTracker.addEvent(eventB1, List.of()); + final EventDescriptor eventC1 = new EventDescriptor(randomHash(random), nodeC, 1); + tracker.addEvent(eventC1, List.of()); + childlessEventTracker.addEvent(eventC1, List.of()); + final EventDescriptor eventD1 = new EventDescriptor(randomHash(random), nodeD, 1); + tracker.addEvent(eventD1, List.of()); + childlessEventTracker.addEvent(eventD1, List.of()); + + assertEquals(ZERO_ADVANCEMENT_WEIGHT, calculator.getTheoreticalAdvancementWeight(List.of())); + assertEquals(ZERO_ADVANCEMENT_WEIGHT, calculator.addEventAndGetAdvancementWeight(eventA1)); + assertSame(snapshot1, calculator.getSnapshot()); + + // Each node creates another event. All nodes use all available other parents except the event from D. + final EventDescriptor eventA2 = new EventDescriptor(randomHash(random), nodeA, 2); + tracker.addEvent(eventA2, List.of(eventA1, eventB1, eventC1)); + childlessEventTracker.addEvent(eventA2, List.of(eventA1, eventB1, eventC1)); + final EventDescriptor eventB2 = new EventDescriptor(randomHash(random), nodeB, 2); + tracker.addEvent(eventB2, List.of(eventA1, eventB1, eventC1)); + childlessEventTracker.addEvent(eventB2, List.of(eventA1, eventB1, eventC1)); + final EventDescriptor eventC2 = new EventDescriptor(randomHash(random), nodeC, 2); + tracker.addEvent(eventC2, List.of(eventA1, eventB1, eventC1)); + childlessEventTracker.addEvent(eventC2, List.of(eventA1, eventB1, eventC1)); + final EventDescriptor eventD2 = new EventDescriptor(randomHash(random), nodeD, 2); + tracker.addEvent(eventD2, List.of(eventA1, eventB1, eventC1, eventD1)); + childlessEventTracker.addEvent(eventD2, List.of(eventA1, eventB1, eventC1, eventD1)); + + assertEquals( + TipsetAdvancementWeight.of(2, 0), + calculator.getTheoreticalAdvancementWeight(List.of(eventA1, eventB1, eventC1))); + assertEquals(TipsetAdvancementWeight.of(2, 0), calculator.addEventAndGetAdvancementWeight(eventA2)); + + // This should have been enough to advance the snapshot window by 1. + final Tipset snapshot2 = calculator.getSnapshot(); + assertNotSame(snapshot1, snapshot2); + + // D should have a bully score of 1, all others a score of 0. + assertEquals(0, calculator.getBullyScoreForNode(nodeA)); + assertEquals(0, calculator.getBullyScoreForNode(nodeB)); + assertEquals(0, calculator.getBullyScoreForNode(nodeC)); + assertEquals(1, calculator.getBullyScoreForNode(nodeD)); + assertEquals(1, calculator.getMaxBullyScore()); + + // Create another batch of events where D is bullied. + final EventDescriptor eventA3 = new EventDescriptor(randomHash(random), nodeA, 3); + tracker.addEvent(eventA3, List.of(eventA2, eventB2, eventC2)); + childlessEventTracker.addEvent(eventA3, List.of(eventA2, eventB2, eventC2)); + final EventDescriptor eventB3 = new EventDescriptor(randomHash(random), nodeB, 3); + tracker.addEvent(eventB3, List.of(eventA2, eventB2, eventC2)); + childlessEventTracker.addEvent(eventB3, List.of(eventA2, eventB2, eventC2)); + final EventDescriptor eventC3 = new EventDescriptor(randomHash(random), nodeC, 3); + tracker.addEvent(eventC3, List.of(eventA2, eventB2, eventC2)); + childlessEventTracker.addEvent(eventC3, List.of(eventA2, eventB2, eventC2)); + final EventDescriptor eventD3 = new EventDescriptor(randomHash(random), nodeD, 3); + tracker.addEvent(eventD3, List.of(eventA2, eventB2, eventC2, eventD2)); + childlessEventTracker.addEvent(eventD3, List.of(eventA2, eventB2, eventC2, eventD2)); + + assertEquals( + TipsetAdvancementWeight.of(2, 0), + calculator.getTheoreticalAdvancementWeight(List.of(eventA2, eventB2, eventC2))); + assertEquals(TipsetAdvancementWeight.of(2, 0), calculator.addEventAndGetAdvancementWeight(eventA3)); + + final Tipset snapshot3 = calculator.getSnapshot(); + assertNotSame(snapshot2, snapshot3); + + // D should have a bully score of 2, all others a score of 0. + assertEquals(0, calculator.getBullyScoreForNode(nodeA)); + assertEquals(0, calculator.getBullyScoreForNode(nodeB)); + assertEquals(0, calculator.getBullyScoreForNode(nodeC)); + assertEquals(2, calculator.getBullyScoreForNode(nodeD)); + assertEquals(2, calculator.getMaxBullyScore()); + + // Create a bach of events that don't bully D. Let's all bully C, because C is a jerk. + final EventDescriptor eventA4 = new EventDescriptor(randomHash(random), nodeA, 4); + tracker.addEvent(eventA4, List.of(eventA3, eventB3, eventD3)); + childlessEventTracker.addEvent(eventA4, List.of(eventA3, eventB3, eventD3)); + final EventDescriptor eventB4 = new EventDescriptor(randomHash(random), nodeB, 4); + tracker.addEvent(eventB4, List.of(eventA3, eventB3, eventD3)); + childlessEventTracker.addEvent(eventB4, List.of(eventA3, eventB3, eventD3)); + final EventDescriptor eventC4 = new EventDescriptor(randomHash(random), nodeC, 4); + tracker.addEvent(eventC4, List.of(eventA3, eventB3, eventC3, eventD3)); + childlessEventTracker.addEvent(eventC4, List.of(eventA3, eventB3, eventC3, eventD3)); + final EventDescriptor eventD4 = new EventDescriptor(randomHash(random), nodeD, 4); + tracker.addEvent(eventD4, List.of(eventA3, eventB3, eventD3)); + childlessEventTracker.addEvent(eventD4, List.of(eventA3, eventB3, eventD3)); + + assertEquals( + TipsetAdvancementWeight.of(2, 0), + calculator.getTheoreticalAdvancementWeight(List.of(eventA3, eventB3, eventD3))); + assertEquals(TipsetAdvancementWeight.of(2, 0), calculator.addEventAndGetAdvancementWeight(eventA4)); + + final Tipset snapshot4 = calculator.getSnapshot(); + assertNotSame(snapshot3, snapshot4); + + // Now, all nodes should have a bully score of 0 except for C, which should have a score of 1. + assertEquals(0, calculator.getBullyScoreForNode(nodeA)); + assertEquals(0, calculator.getBullyScoreForNode(nodeB)); + assertEquals(1, calculator.getBullyScoreForNode(nodeC)); + assertEquals(0, calculator.getBullyScoreForNode(nodeD)); + assertEquals(1, calculator.getMaxBullyScore()); + + // Stop bullying C. D stops creating events. + final EventDescriptor eventA5 = new EventDescriptor(randomHash(random), nodeA, 5); + tracker.addEvent(eventA5, List.of(eventA4, eventB4, eventC4, eventD4)); + childlessEventTracker.addEvent(eventA5, List.of(eventA4, eventB4, eventC4, eventD4)); + final EventDescriptor eventB5 = new EventDescriptor(randomHash(random), nodeB, 5); + tracker.addEvent(eventB5, List.of(eventA4, eventB4, eventC4, eventD4)); + childlessEventTracker.addEvent(eventB5, List.of(eventA4, eventB4, eventC4, eventD4)); + final EventDescriptor eventC5 = new EventDescriptor(randomHash(random), nodeC, 5); + tracker.addEvent(eventC5, List.of(eventA4, eventB4, eventC4, eventD4)); + childlessEventTracker.addEvent(eventC5, List.of(eventA4, eventB4, eventC4, eventD4)); + + assertEquals( + TipsetAdvancementWeight.of(3, 0), + calculator.getTheoreticalAdvancementWeight(List.of(eventA4, eventB4, eventC4, eventD4))); + assertEquals(TipsetAdvancementWeight.of(3, 0), calculator.addEventAndGetAdvancementWeight(eventA5)); + + final Tipset snapshot5 = calculator.getSnapshot(); + assertNotSame(snapshot4, snapshot5); + + assertEquals(0, calculator.getBullyScoreForNode(nodeA)); + assertEquals(0, calculator.getBullyScoreForNode(nodeB)); + assertEquals(0, calculator.getBullyScoreForNode(nodeC)); + assertEquals(0, calculator.getBullyScoreForNode(nodeD)); + assertEquals(0, calculator.getMaxBullyScore()); + + // D still is not creating events. Since there is no legal event from D to use as a parent, this doesn't + // count as bullying. + final EventDescriptor eventA6 = new EventDescriptor(randomHash(random), nodeA, 6); + tracker.addEvent(eventA6, List.of(eventA5, eventB5, eventC5)); + childlessEventTracker.addEvent(eventA6, List.of(eventA5, eventB5, eventC5)); + final EventDescriptor eventB6 = new EventDescriptor(randomHash(random), nodeB, 6); + tracker.addEvent(eventB6, List.of(eventA5, eventB5, eventC5)); + childlessEventTracker.addEvent(eventB6, List.of(eventA5, eventB5, eventC5)); + final EventDescriptor eventC6 = new EventDescriptor(randomHash(random), nodeC, 6); + tracker.addEvent(eventC6, List.of(eventA5, eventB5, eventC5)); + childlessEventTracker.addEvent(eventC6, List.of(eventA5, eventB5, eventC5)); + + assertEquals( + TipsetAdvancementWeight.of(2, 0), + calculator.getTheoreticalAdvancementWeight(List.of(eventA5, eventB5, eventC5))); + assertEquals(TipsetAdvancementWeight.of(2, 0), calculator.addEventAndGetAdvancementWeight(eventA6)); + + final Tipset snapshot6 = calculator.getSnapshot(); + assertNotSame(snapshot5, snapshot6); + + assertEquals(0, calculator.getBullyScoreForNode(nodeA)); + assertEquals(0, calculator.getBullyScoreForNode(nodeB)); + assertEquals(0, calculator.getBullyScoreForNode(nodeC)); + assertEquals(0, calculator.getBullyScoreForNode(nodeD)); + assertEquals(0, calculator.getMaxBullyScore()); + + // Rinse and repeat. + final EventDescriptor eventA7 = new EventDescriptor(randomHash(random), nodeA, 7); + tracker.addEvent(eventA7, List.of(eventA6, eventB6, eventC6)); + childlessEventTracker.addEvent(eventA7, List.of(eventA6, eventB6, eventC6)); + final EventDescriptor eventB7 = new EventDescriptor(randomHash(random), nodeB, 7); + tracker.addEvent(eventB7, List.of(eventA6, eventB6, eventC6)); + childlessEventTracker.addEvent(eventB7, List.of(eventA6, eventB6, eventC6)); + final EventDescriptor eventC7 = new EventDescriptor(randomHash(random), nodeC, 7); + tracker.addEvent(eventC7, List.of(eventA6, eventB6, eventC6)); + childlessEventTracker.addEvent(eventC7, List.of(eventA6, eventB6, eventC6)); + + assertEquals( + TipsetAdvancementWeight.of(2, 0), + calculator.getTheoreticalAdvancementWeight(List.of(eventA6, eventB6, eventC6))); + assertEquals(TipsetAdvancementWeight.of(2, 0), calculator.addEventAndGetAdvancementWeight(eventA7)); + + final Tipset snapshot7 = calculator.getSnapshot(); + assertNotSame(snapshot6, snapshot7); + + assertEquals(0, calculator.getBullyScoreForNode(nodeA)); + assertEquals(0, calculator.getBullyScoreForNode(nodeB)); + assertEquals(0, calculator.getBullyScoreForNode(nodeC)); + assertEquals(0, calculator.getBullyScoreForNode(nodeD)); + assertEquals(0, calculator.getMaxBullyScore()); + } + + @Test + @DisplayName("Zero Stake Node Test") + void zeroWeightNodeTest() { + final Random random = getRandomPrintSeed(); + final int nodeCount = 4; + + final AddressBook addressBook = new RandomAddressBookGenerator(random) + .setSize(nodeCount) + .setAverageWeight(1) + .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) + .build(); + + // In this test, we simulate from the perspective of node A. + // All nodes have 1 weight except for D, which has 0 weight. + final NodeId nodeA = addressBook.getNodeId(0); + final NodeId nodeB = addressBook.getNodeId(1); + final NodeId nodeC = addressBook.getNodeId(2); + final NodeId nodeD = addressBook.getNodeId(3); + + addressBook.add(addressBook.getAddress(nodeD).copySetWeight(0)); + + final PlatformContext platformContext = + TestPlatformContextBuilder.create().build(); + + final TipsetTracker builder = new TipsetTracker(addressBook); + final ChildlessEventTracker childlessEventTracker = new ChildlessEventTracker(); + final TipsetWeightCalculator calculator = + new TipsetWeightCalculator(platformContext, addressBook, nodeA, builder, childlessEventTracker); + + final Tipset snapshot1 = calculator.getSnapshot(); + + // Each node creates an event. + final EventDescriptor eventA1 = new EventDescriptor(randomHash(random), nodeA, 1); + builder.addEvent(eventA1, List.of()); + final EventDescriptor eventB1 = new EventDescriptor(randomHash(random), nodeB, 1); + builder.addEvent(eventB1, List.of()); + final EventDescriptor eventC1 = new EventDescriptor(randomHash(random), nodeC, 1); + builder.addEvent(eventC1, List.of()); + final EventDescriptor eventD1 = new EventDescriptor(randomHash(random), nodeD, 1); + builder.addEvent(eventD1, List.of()); + + assertEquals(ZERO_ADVANCEMENT_WEIGHT, calculator.getTheoreticalAdvancementWeight(List.of())); + assertEquals(ZERO_ADVANCEMENT_WEIGHT, calculator.addEventAndGetAdvancementWeight(eventA1)); + assertSame(snapshot1, calculator.getSnapshot()); + + // Create a node "on top of" B1. + final EventDescriptor eventA2 = new EventDescriptor(randomHash(random), nodeA, 2); + builder.addEvent(eventA2, List.of(eventA1, eventB1)); + final TipsetAdvancementWeight advancement1 = calculator.addEventAndGetAdvancementWeight(eventA2); + assertEquals(TipsetAdvancementWeight.of(1, 0), advancement1); + + // Snapshot should not have advanced. + assertSame(snapshot1, calculator.getSnapshot()); + + // If we get 1 more advancement point then the snapshot will advance. But building + // on top of a zero stake node will not contribute to this and the snapshot will not + // advance. Build on top of node D. + final EventDescriptor eventA3 = new EventDescriptor(randomHash(random), nodeA, 3); + builder.addEvent(eventA3, List.of(eventA2, eventD1)); + final TipsetAdvancementWeight advancement2 = calculator.addEventAndGetAdvancementWeight(eventA3); + assertEquals(TipsetAdvancementWeight.of(0, 1), advancement2); + + // Snapshot should not have advanced. + assertSame(snapshot1, calculator.getSnapshot()); + + // Now, build on top of C. This should push us into the next snapshot. + final EventDescriptor eventA4 = new EventDescriptor(randomHash(random), nodeA, 4); + builder.addEvent(eventA4, List.of(eventA3, eventC1)); + final TipsetAdvancementWeight advancement3 = calculator.addEventAndGetAdvancementWeight(eventA4); + assertEquals(TipsetAdvancementWeight.of(1, 0), advancement3); + + final Tipset snapshot2 = calculator.getSnapshot(); + assertNotEquals(snapshot1, snapshot2); + assertEquals(snapshot2, builder.getTipset(eventA4)); + } +} diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowTestUtils.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowTestUtils.java index 0ed0a4103822..a6a02b42cd49 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowTestUtils.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowTestUtils.java @@ -16,12 +16,15 @@ package com.swirlds.platform.test.eventflow; +import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.test.TransactionUtils; import com.swirlds.platform.test.event.emitter.EventEmitterFactory; import com.swirlds.platform.test.event.emitter.StandardEventEmitter; import com.swirlds.platform.test.event.source.EventSource; import com.swirlds.platform.test.event.source.StandardEventSource; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; +import java.util.Objects; import java.util.Random; import java.util.function.Supplier; @@ -47,8 +50,20 @@ public static Supplier inaccurateConsensusTimeEstimater(final Random ra return () -> Instant.now().minusMillis(random.nextInt(5000)); } - public static StandardEventEmitter createEventEmitter(final Random random, final int numNodes) { - return createEventEmitter(random, numNodes, 0.0); + /** + * Creates a {@link StandardEventEmitter} with custom event sources that generate transactions with incrementing + * long values such that no transaction is the same as another across all sources. + * + * @param random + * the random instance to use for the {@link EventEmitterFactory} + * @param addressBook + * the address book to use for the {@link EventEmitterFactory} + * @return the event generator + */ + @NonNull + public static StandardEventEmitter createEventEmitter( + @NonNull final Random random, @NonNull final AddressBook addressBook) { + return createEventEmitter(random, addressBook, 0.0); } /** @@ -57,10 +72,16 @@ public static StandardEventEmitter createEventEmitter(final Random random, final * * @param random * the random instance to use for the {@link EventEmitterFactory} + * @param addressBook + * the address book to use for the {@link EventEmitterFactory} + * @param systemTransactionRatio + * the ratio of system transactions to user transactions * @return the event generator */ public static StandardEventEmitter createEventEmitter( - final Random random, final int numNodes, final double systemTransactionRatio) { + @NonNull final Random random, @NonNull final AddressBook addressBook, final double systemTransactionRatio) { + Objects.requireNonNull(random); + Objects.requireNonNull(addressBook); // Create standard event sources that generate events with incrementing transactions instead of random // transactions final Supplier> eventSourceSupplier = () -> new StandardEventSource( @@ -68,7 +89,7 @@ public static StandardEventEmitter createEventEmitter( (r) -> TransactionUtils.incrementingMixedTransactions( r, TX_PER_EVENT_AVG, TX_PER_EVENT_STD_DEV, systemTransactionRatio)); - final EventEmitterFactory eventGeneratorFactory = new EventEmitterFactory(random, numNodes); + final EventEmitterFactory eventGeneratorFactory = new EventEmitterFactory(random, addressBook); eventGeneratorFactory.getSourceFactory().addCustomSource((nodeIndex) -> true, eventSourceSupplier); return eventGeneratorFactory.newStandardEmitter(); } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowTests.java index 69f28c30c3e3..00399e8a2da1 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowTests.java @@ -35,6 +35,7 @@ import com.swirlds.common.crypto.Hash; import com.swirlds.common.metrics.noop.NoOpMetrics; import com.swirlds.common.stream.EventStreamManager; +import com.swirlds.common.system.BasicSoftwareVersion; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.Round; import com.swirlds.common.system.SoftwareVersion; @@ -71,6 +72,7 @@ import com.swirlds.platform.test.NoOpConsensusMetrics; import com.swirlds.test.framework.config.TestConfigBuilder; import com.swirlds.test.framework.context.TestPlatformContextBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.FileNotFoundException; import java.time.Duration; import java.util.ArrayList; @@ -88,7 +90,6 @@ import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Stream; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.params.provider.Arguments; @@ -160,7 +161,7 @@ void testPreHandle( final Random random = RandomUtils.initRandom(seed); init(random, numNodes, origSwirldState); - final EventFlowWrapper wrapper = createEventFlowWrapper(random, numNodes); + final EventFlowWrapper wrapper = createEventFlowWrapper(random, addressBook); // Submits events final HashSet transactions = applyToWrapper.apply(wrapper); @@ -247,7 +248,7 @@ void testPostConsensusHandle( final Random random = RandomUtils.initRandom(seed); init(random, numNodes, origSwirldState, state); - final EventFlowWrapper wrapper = createEventFlowWrapper(random, numNodes); + final EventFlowWrapper wrapper = createEventFlowWrapper(random, addressBook); final List consensusRounds = wrapper.applyConsensusRounds(addressBook, numEvents); @@ -303,7 +304,7 @@ void testSignedStateSettings( final Random random = RandomUtils.initRandom(seed); init(random, numNodes, origSwirldState, null, prepareConfig(signedStateFreq)); - final EventFlowWrapper wrapper = createEventFlowWrapper(random, numNodes); + final EventFlowWrapper wrapper = createEventFlowWrapper(random, addressBook); final List consensusRounds = wrapper.applyConsensusRounds(addressBook, numEvents); @@ -366,7 +367,7 @@ void testSignedStateFreezePeriod( final AtomicLong freezeRound = new AtomicLong(-1); init(random, numNodes, origSwirldState, null, prepareConfig(signedStateFreq)); - final EventFlowWrapper wrapper = createEventFlowWrapper(random, numNodes); + final EventFlowWrapper wrapper = createEventFlowWrapper(random, addressBook); final List consensusRounds = wrapper.applyConsensusRounds(addressBook, numEvents, newConsRound -> { @@ -463,10 +464,10 @@ void testPreConsensusSystemTransactions( final Long seed, final int numNodes, final int numTransactions, final SwirldState origSwirldState) { final Random random = RandomUtils.initRandom(seed); init(random, numNodes, origSwirldState); - final EventFlowWrapper wrapper = createEventFlowWrapper(random, numNodes); + final EventFlowWrapper wrapper = createEventFlowWrapper(random, addressBook); final Set transactions = wrapper.applyPreConsensusEvents( - numTransactions, EventFlowTestUtils.createEventEmitter(random, numNodes, 1.0)); + numTransactions, EventFlowTestUtils.createEventEmitter(random, addressBook, 1.0)); assertEventuallyEquals( transactions.size(), @@ -497,10 +498,10 @@ void testConsensusSystemTransactions( final Long seed, final int numNodes, final int numEvents, final SwirldState origSwirldState) { final Random random = RandomUtils.initRandom(seed); init(random, numNodes, origSwirldState); - final EventFlowWrapper wrapper = createEventFlowWrapper(random, numNodes); + final EventFlowWrapper wrapper = createEventFlowWrapper(random, addressBook); final List consensusRounds = wrapper.applyConsensusRounds( - addressBook, numEvents, EventFlowTestUtils.createEventEmitter(random, numNodes, 1.0)); + addressBook, numEvents, EventFlowTestUtils.createEventEmitter(random, addressBook, 1.0)); final HashSet systemTransactions = extractTransactions((selfNodeId) -> true, consensusRounds); @@ -570,7 +571,6 @@ protected void init( .setSize(numNodes) .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) .setHashStrategy(RandomAddressBookGenerator.HashStrategy.REAL_HASH) - .setSequentialIds(true) .build(); final Configuration configuration = new TestConfigBuilder() @@ -602,7 +602,6 @@ protected void init( .when(eventStreamManager) .addEvents(anyList()); - final AddressBook addressBook = new RandomAddressBookGenerator().build(); final State state = getInitialState(swirldState, initialState, addressBook); systemTransactionTracker = new SystemTransactionTracker(); @@ -627,7 +626,8 @@ protected void init( mock(SwirldStateMetrics.class), transactionConfig, () -> false, - state); + state, + new BasicSoftwareVersion(1)); ConfigurationHolder.getInstance().setConfiguration(config); final PlatformContext platformContext = @@ -684,9 +684,11 @@ private State getInitialState( return state; } - protected EventFlowWrapper createEventFlowWrapper(final Random random, final int numNodes) { + protected EventFlowWrapper createEventFlowWrapper( + @NonNull final Random random, @NonNull final AddressBook addressBook) { + // arguments are checked for null in the constructor. return new EventFlowWrapper( - random, numNodes, preConsensusEventHandler, consensusEventHandler, swirldStateManager); + random, addressBook, preConsensusEventHandler, consensusEventHandler, swirldStateManager); } /** diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowWrapper.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowWrapper.java index d85da06f5672..2c61b6e66bb0 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowWrapper.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowWrapper.java @@ -36,12 +36,14 @@ import com.swirlds.platform.test.consensus.ConsensusUtils; import com.swirlds.platform.test.event.IndexedEvent; import com.swirlds.platform.test.event.emitter.EventEmitter; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedList; import java.util.List; +import java.util.Objects; import java.util.Random; import java.util.function.Consumer; import java.util.function.Predicate; @@ -62,21 +64,23 @@ public class EventFlowWrapper { /** * Creates new instances and starts the handlers. * - * @param random - * @param numNodes - * @param preConsensusEventHandler - * @param consensusEventHandler + * @param random the random number generator + * @param addressBook the address book to use + * @param preConsensusEventHandler the pre-consensus event handler + * @param consensusEventHandler the consensus event handler */ public EventFlowWrapper( - final Random random, - final int numNodes, - final PreConsensusEventHandler preConsensusEventHandler, - final ConsensusRoundHandler consensusEventHandler, - final SwirldStateManager swirldStateManager) { - this.preConsensusEventHandler = preConsensusEventHandler; - this.consensusRoundHandler = consensusEventHandler; - this.swirldStateManager = swirldStateManager; - defaultEventGenerator = EventFlowTestUtils.createEventEmitter(random, numNodes); + @NonNull final Random random, + @NonNull final AddressBook addressBook, + @NonNull final PreConsensusEventHandler preConsensusEventHandler, + @NonNull final ConsensusRoundHandler consensusEventHandler, + @NonNull final SwirldStateManager swirldStateManager) { + Objects.requireNonNull(random); + Objects.requireNonNull(addressBook); + this.preConsensusEventHandler = Objects.requireNonNull(preConsensusEventHandler); + this.consensusRoundHandler = Objects.requireNonNull(consensusEventHandler); + this.swirldStateManager = Objects.requireNonNull(swirldStateManager); + defaultEventGenerator = EventFlowTestUtils.createEventEmitter(random, addressBook); preConsensusEventHandler.start(); consensusEventHandler.start(); } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/SystemTransactionTracker.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/SystemTransactionTracker.java index 55f5a3c3eef5..0586589b3407 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/SystemTransactionTracker.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/SystemTransactionTracker.java @@ -25,13 +25,13 @@ import com.swirlds.platform.components.transaction.system.PreConsensusSystemTransactionConsumer; import com.swirlds.platform.components.transaction.system.PreConsensusSystemTransactionTypedHandler; import com.swirlds.platform.state.State; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import org.checkerframework.checker.nullness.qual.NonNull; public class SystemTransactionTracker implements PreConsensusSystemTransactionConsumer, PostConsensusSystemTransactionConsumer, Failable { diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/TransactionFeeder.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/TransactionFeeder.java index 2903bbc4fb9b..4409ca67f104 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/TransactionFeeder.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/TransactionFeeder.java @@ -25,12 +25,12 @@ import com.swirlds.common.threading.framework.Stoppable; import com.swirlds.common.threading.framework.StoppableThread; import com.swirlds.common.threading.framework.config.StoppableThreadConfiguration; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Duration; import java.util.Arrays; import java.util.Objects; import java.util.Random; import java.util.function.Consumer; -import org.checkerframework.checker.nullness.qual.NonNull; /** * Feeds transactions to a consumer in a worker thread. diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/graph/PartitionedGraphCreator.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/graph/PartitionedGraphCreator.java index 8dca71d7cdeb..7796bbf951f8 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/graph/PartitionedGraphCreator.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/graph/PartitionedGraphCreator.java @@ -19,6 +19,7 @@ import static com.swirlds.platform.test.graph.OtherParentMatrixFactory.createBalancedOtherParentMatrix; import static com.swirlds.platform.test.graph.OtherParentMatrixFactory.createPartitionedOtherParentAffinityMatrix; +import com.swirlds.common.system.address.AddressBook; import com.swirlds.platform.test.event.emitter.EventEmitter; import com.swirlds.platform.test.event.generator.GraphGenerator; import com.swirlds.platform.test.sync.SyncNode; @@ -41,6 +42,7 @@ public static void setupPartitionForNode( final SyncTestParams params, final SyncNode node, final List nodesInPartition) { final EventEmitter emitter = node.getEmitter(); final GraphGenerator graphGenerator = emitter.getGraphGenerator(); + final AddressBook addressBook = graphGenerator.getAddressBook(); final List> fullyConnectedMatrix = createBalancedOtherParentMatrix(params.getNumNetworkNodes()); @@ -53,7 +55,7 @@ public static void setupPartitionForNode( for (int i = 0; i < graphGenerator.getNumberOfSources(); i++) { final boolean isSourceInPartition = nodesInPartition.contains(i); - graphGenerator.getSource(i).setNewEventWeight((r, index, prev) -> { + graphGenerator.getSource(addressBook.getNodeId(i)).setNewEventWeight((r, index, prev) -> { if (index < params.getNumCommonEvents() || isSourceInPartition) { return 1.0; } else { diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/graph/SplitForkGraphCreator.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/graph/SplitForkGraphCreator.java index 077b5fea1b20..de344f9c62fb 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/graph/SplitForkGraphCreator.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/graph/SplitForkGraphCreator.java @@ -20,6 +20,7 @@ import static com.swirlds.platform.test.graph.OtherParentMatrixFactory.createForcedOtherParentMatrix; import static com.swirlds.platform.test.graph.OtherParentMatrixFactory.createShunnedNodeOtherParentAffinityMatrix; +import com.swirlds.common.system.address.AddressBook; import com.swirlds.platform.test.event.emitter.StandardEventEmitter; import com.swirlds.platform.test.sync.SyncTestParams; import java.util.List; @@ -48,9 +49,11 @@ public static void createSplitForkConditions( private static void forceNextCreator( final SyncTestParams params, final StandardEventEmitter emitter, final int creatorToFork) { - for (int i = 0; i < emitter.getGraphGenerator().getNumberOfSources(); i++) { + final AddressBook addressBook = emitter.getGraphGenerator().getAddressBook(); + final int numberOfSources = addressBook.getSize(); + for (int i = 0; i < numberOfSources; i++) { final boolean sourceIsCreatorToFork = i == creatorToFork; - emitter.getGraphGenerator().getSource(i).setNewEventWeight((r, index, prev) -> { + emitter.getGraphGenerator().getSource(addressBook.getNodeId(i)).setNewEventWeight((r, index, prev) -> { if (index < params.getNumCommonEvents()) { return 1.0; } else if (index == params.getNumCommonEvents() && sourceIsCreatorToFork) { diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/network/OutboundConnectionCreatorTest.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/network/OutboundConnectionCreatorTest.java index 07ad80f6abc1..ae930cf1b214 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/network/OutboundConnectionCreatorTest.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/network/OutboundConnectionCreatorTest.java @@ -43,6 +43,7 @@ import com.swirlds.platform.network.connectivity.OutboundConnectionCreator; import com.swirlds.platform.network.connectivity.SocketFactory; import com.swirlds.test.framework.config.TestConfigBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -51,7 +52,6 @@ import java.net.SocketException; import java.util.Random; import java.util.concurrent.atomic.AtomicBoolean; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.mockito.Mockito; @@ -69,7 +69,6 @@ void createConnectionTest() throws IOException, ConstructableRegistryException { .setSize(numNodes) .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) .setHashStrategy(RandomAddressBookGenerator.HashStrategy.FAKE_HASH) - .setSequentialIds(false) .build(); final int thisNodeIndex = r.nextInt(numNodes); final int otherNodeIndex = r.nextInt(numNodes); @@ -166,7 +165,6 @@ void mismatchedVersionTest() throws IOException, ConstructableRegistryException .setSize(numNodes) .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) .setHashStrategy(RandomAddressBookGenerator.HashStrategy.FAKE_HASH) - .setSequentialIds(false) .build(); final int thisNodeIndex = r.nextInt(numNodes); final int otherNodeIndex = r.nextInt(numNodes); @@ -226,7 +224,6 @@ void mismatchedVersionIgnoredTest() throws IOException, ConstructableRegistryExc .setSize(numNodes) .setWeightDistributionStrategy(WeightDistributionStrategy.BALANCED) .setHashStrategy(RandomAddressBookGenerator.HashStrategy.FAKE_HASH) - .setSequentialIds(false) .build(); final int thisNodeIndex = r.nextInt(numNodes); final int otherNodeIndex = r.nextInt(numNodes); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/ConsensusHashFinderTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/ConsensusHashFinderTests.java index 4b31acab0f08..b0ae21cf46d3 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/ConsensusHashFinderTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/ConsensusHashFinderTests.java @@ -32,6 +32,7 @@ import com.swirlds.common.system.NodeId; import com.swirlds.platform.dispatch.triggers.flow.StateHashValidityTrigger; import com.swirlds.platform.state.iss.internal.ConsensusHashFinder; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -39,7 +40,6 @@ import java.util.List; import java.util.Random; import java.util.Set; -import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/ConsensusHashManagerTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/ConsensusHashManagerTests.java index f68d9e12a4c3..7c094ac9e2f6 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/ConsensusHashManagerTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/ConsensusHashManagerTests.java @@ -79,7 +79,6 @@ void validSignaturesAfterHashTest() { .setSize(100) .setAverageWeight(100) .setWeightStandardDeviation(50) - .setSequentialIds(false) .build(); final DispatchBuilder dispatchBuilder = new DispatchBuilder(getDefaultDispatchConfiguration()); @@ -118,7 +117,6 @@ void mixedOrderTest() { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(Math.max(10, random.nextInt(1000))) - .setSequentialIds(false) .setAverageWeight(100) .setWeightStandardDeviation(50) .build(); @@ -317,7 +315,6 @@ void earlyAddTest() { .setSize(100) .setAverageWeight(100) .setWeightStandardDeviation(50) - .setSequentialIds(false) .build(); final NodeId selfId = addressBook.getNodeId(0); @@ -381,7 +378,6 @@ void lateAddTest() { .setSize(100) .setAverageWeight(100) .setWeightStandardDeviation(50) - .setSequentialIds(false) .build(); final NodeId selfId = addressBook.getNodeId(0); @@ -431,7 +427,6 @@ void shiftBeforeCompleteTest() { .setSize(100) .setAverageWeight(100) .setWeightStandardDeviation(50) - .setSequentialIds(false) .build(); final NodeId selfId = addressBook.getNodeId(0); @@ -521,7 +516,6 @@ void catastrophicShiftBeforeCompleteTest() { .setSize(100) .setAverageWeight(100) .setWeightStandardDeviation(50) - .setSequentialIds(false) .build(); final NodeId selfId = addressBook.getNodeId(0); @@ -584,7 +578,6 @@ void bigShiftTest() { .setSize(100) .setAverageWeight(100) .setWeightStandardDeviation(50) - .setSequentialIds(false) .build(); final NodeId selfId = addressBook.getNodeId(0); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/RoundHashValidatorTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/RoundHashValidatorTests.java index 72b459e21d91..2eb02cf29154 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/RoundHashValidatorTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/RoundHashValidatorTests.java @@ -253,7 +253,6 @@ void selfSignatureLastTest(final HashValidityStatus expectedStatus) { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(Math.max(10, random.nextInt(1000))) - .setSequentialIds(false) .setAverageWeight(100) .setWeightStandardDeviation(50) .build(); @@ -300,7 +299,6 @@ void selfSignatureFirstTest(final HashValidityStatus expectedStatus) { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(Math.max(10, random.nextInt(1000))) - .setSequentialIds(false) .setAverageWeight(100) .setWeightStandardDeviation(50) .build(); @@ -345,7 +343,6 @@ void selfSignatureInMiddleTest(final HashValidityStatus expectedStatus) { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(Math.max(10, random.nextInt(1000))) - .setSequentialIds(false) .setAverageWeight(100) .setWeightStandardDeviation(50) .build(); @@ -396,7 +393,6 @@ void timeoutSelfHashTest() { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(Math.max(10, random.nextInt(1000))) - .setSequentialIds(false) .setAverageWeight(100) .setWeightStandardDeviation(50) .build(); @@ -430,7 +426,6 @@ void timeoutSelfHashAndSignaturesTest() { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(Math.max(10, random.nextInt(1000))) - .setSequentialIds(false) .setAverageWeight(100) .setWeightStandardDeviation(50) .build(); @@ -472,7 +467,6 @@ void timeoutSignaturesTest() { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(Math.max(10, random.nextInt(1000))) - .setSequentialIds(false) .setAverageWeight(100) .setWeightStandardDeviation(50) .build(); @@ -518,7 +512,6 @@ void timeoutWithSuperMajorityTest() { final AddressBook addressBook = new RandomAddressBookGenerator(random) .setSize(Math.max(10, random.nextInt(1000))) - .setSequentialIds(false) .setAverageWeight(100) .setWeightStandardDeviation(50) .build(); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/sync/ShadowGraphTest.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/sync/ShadowGraphTest.java index fffe26c9388e..12503431b600 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/sync/ShadowGraphTest.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/sync/ShadowGraphTest.java @@ -29,6 +29,8 @@ import static org.mockito.Mockito.mock; import com.swirlds.common.crypto.Hash; +import com.swirlds.common.system.address.AddressBook; +import com.swirlds.common.test.RandomAddressBookGenerator; import com.swirlds.common.test.RandomUtils; import com.swirlds.common.utility.CommonUtils; import com.swirlds.platform.event.EventUtils; @@ -73,6 +75,7 @@ class ShadowGraphTest { private Map> genToShadows; private long maxGen; private StandardEventEmitter emitter; + private AddressBook addressBook; private static Stream graphSizes() { return Stream.of( @@ -92,7 +95,8 @@ public void setup() { } private void initShadowGraph(final Random random, final int numEvents, final int numNodes) { - EventEmitterFactory factory = new EventEmitterFactory(random, numNodes); + addressBook = new RandomAddressBookGenerator(random).setSize(numNodes).build(); + final EventEmitterFactory factory = new EventEmitterFactory(random, addressBook); emitter = factory.newStandardEmitter(); shadowGraph = new ShadowGraph(mock(SyncMetrics.class)); @@ -725,7 +729,9 @@ void testInitFromEvents_EmptyEventList() { @Test void testInitFromEvents_EventList() { Random random = RandomUtils.getRandomPrintSeed(); - EventEmitterFactory factory = new EventEmitterFactory(random, 4); + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(4).build(); + final EventEmitterFactory factory = new EventEmitterFactory(random, addressBook); emitter = factory.newStandardEmitter(); shadowGraph = new ShadowGraph(mock(SyncMetrics.class)); @@ -750,7 +756,9 @@ void testInitFromEvents_EventList() { @Test void testInitFromEvents_EventListDifferentMinGen() { Random random = RandomUtils.getRandomPrintSeed(); - EventEmitterFactory factory = new EventEmitterFactory(random, 4); + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(4).build(); + final EventEmitterFactory factory = new EventEmitterFactory(random, addressBook); emitter = factory.newStandardEmitter(); shadowGraph = new ShadowGraph(mock(SyncMetrics.class)); @@ -773,7 +781,9 @@ void testInitFromEvents_EventListDifferentMinGen() { @Test void testInitFromEvents_AddEventThrows() { Random random = RandomUtils.getRandomPrintSeed(); - EventEmitterFactory factory = new EventEmitterFactory(random, 4); + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(4).build(); + final EventEmitterFactory factory = new EventEmitterFactory(random, addressBook); emitter = factory.newStandardEmitter(); shadowGraph = new ShadowGraph(mock(SyncMetrics.class)); @@ -800,7 +810,9 @@ void findAncestorsPerformance() throws ShadowGraphInsertionException { final int numRuns = 10; final Random random = RandomUtils.getRandomPrintSeed(); - EventEmitterFactory factory = new EventEmitterFactory(random, numNodes); + final AddressBook addressBook = + new RandomAddressBookGenerator(random).setSize(numNodes).build(); + final EventEmitterFactory factory = new EventEmitterFactory(random, addressBook); emitter = factory.newStandardEmitter(); shadowGraph = new ShadowGraph(mock(SyncMetrics.class)); for (int i = 0; i < numEvents; i++) { diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/sync/SyncTestExecutor.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/sync/SyncTestExecutor.java index c7117eb5fc32..7328380eb889 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/sync/SyncTestExecutor.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/sync/SyncTestExecutor.java @@ -19,6 +19,8 @@ import static com.swirlds.common.threading.manager.AdHocThreadManager.getStaticThreadManager; import static org.mockito.Mockito.when; +import com.swirlds.common.system.address.AddressBook; +import com.swirlds.common.test.RandomAddressBookGenerator; import com.swirlds.common.test.RandomUtils; import com.swirlds.common.threading.pool.CachedPoolParallelExecutor; import com.swirlds.common.threading.pool.ParallelExecutor; @@ -28,6 +30,7 @@ import com.swirlds.platform.test.event.emitter.EventEmitter; import com.swirlds.platform.test.event.emitter.EventEmitterFactory; import com.swirlds.platform.test.event.emitter.ShuffledEventEmitter; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.time.Duration; import java.util.List; @@ -63,9 +66,14 @@ public class SyncTestExecutor { private BiConsumer generationDefinitions; private Predicate callerAddToGraphTest; private Predicate listenerAddToGraphTest; + /** A randomly generated address book from the number of nodes in the parameters of the test. */ + private AddressBook addressBook; public SyncTestExecutor(final SyncTestParams params) { this.params = params; + this.addressBook = new RandomAddressBookGenerator() + .setSize(params.getNumNetworkNodes()) + .build(); factoryConfig = (f) -> {}; callerExecutorSupplier = () -> { @@ -104,6 +112,16 @@ public SyncTestExecutor(final SyncTestParams params) { connectionFactory = ConnectionFactory::createLocalConnections; } + /** + * Returns the address book. + * + * @return the address book + */ + @NonNull + public AddressBook getAddressBook() { + return addressBook; + } + /** * Executes the following test phases in order: *
    @@ -134,7 +152,8 @@ private void initialize() throws IOException { System.out.println("Using custom seed: " + params.getCustomSeed()); random = new Random(params.getCustomSeed()); } - final EventEmitterFactory factory = new EventEmitterFactory(random, params.getNumNetworkNodes()); + + final EventEmitterFactory factory = new EventEmitterFactory(random, addressBook); factoryConfig.accept(factory); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/sync/SyncTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/sync/SyncTests.java index 237a59a3f2b2..4c86a7d73323 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/sync/SyncTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/sync/SyncTests.java @@ -28,6 +28,7 @@ import com.swirlds.common.constructable.ConstructableRegistry; import com.swirlds.common.constructable.ConstructableRegistryException; +import com.swirlds.common.system.NodeId; import com.swirlds.common.test.threading.ReplaceSyncPhaseParallelExecutor; import com.swirlds.common.test.threading.SyncPhaseParallelExecutor; import com.swirlds.common.threading.pool.CachedPoolParallelExecutor; @@ -684,13 +685,15 @@ void testSendExpiredEvents() throws Exception { @ParameterizedTest @MethodSource({"tenNodeGraphParams", "tenNodeBigGraphParams", "tipExpiresBreakingSeed"}) void tipExpiresAfterPhase1(final SyncTestParams params) throws Exception { - final long creatorIdToExpire = 0; final SyncTestExecutor executor = new SyncTestExecutor(params); final AtomicLong maxGen = new AtomicLong(EventConstants.GENERATION_UNDEFINED); + final int creatorIndexToExpire = 0; + final NodeId creatorIdToExpire = executor.getAddressBook().getNodeId(creatorIndexToExpire); + // node 0 should not create any events after CommonEvents - executor.setFactoryConfig( - (factory) -> factory.getSourceFactory().addCustomSource((index) -> index == creatorIdToExpire, () -> { + executor.setFactoryConfig((factory) -> factory.getSourceFactory() + .addCustomSource((index) -> index == creatorIndexToExpire, () -> { final StandardEventSource source0 = new StandardEventSource(false); source0.setNewEventWeight((r, index, prev) -> { if (index <= params.getNumCommonEvents() / 2) { @@ -824,10 +827,12 @@ void requiredEventsExpire(final int expireAfterPhase, final SyncTestParams param final SyncTestExecutor executor = new SyncTestExecutor(params); final AtomicLong genToExpire = new AtomicLong(0); + final NodeId creatorId = executor.getAddressBook().getNodeId(0); + // Set the generation to expire such that half the listener's graph, and therefore some events that need // to be sent to the caller, will be expired executor.setCustomPreSyncConfiguration( - (c, l) -> genToExpire.set(l.getEmitter().getGraphGenerator().getMaxGeneration(0) / 2)); + (c, l) -> genToExpire.set(l.getEmitter().getGraphGenerator().getMaxGeneration(creatorId) / 2)); // Expire events from the listener's graph after the supplied phase final Runnable expireEvents = diff --git a/settings.gradle.kts b/settings.gradle.kts index 07a6c0bce47d..0a58cc755bb8 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -103,7 +103,7 @@ gradleEnterprise { // The HAPI API version to use for Protobuf sources. This can be a tag or branch // name from the hedera-protobufs GIT repo. val hapiProtoVersion = "0.40.0-blocks-state-SNAPSHOT" -val hapiProtoBranchOrTag = "change-numbers-to-ids-nft-proto" // hapiProtoVersion +val hapiProtoBranchOrTag = "add-pbj-types-for-state" // hapiProtoVersion gitRepositories { checkoutsDirectory.set(File(rootDir, "hedera-node/hapi"))