From 02f6067bf10844129b528b66b3c827689987075c Mon Sep 17 00:00:00 2001 From: chad Date: Fri, 13 Sep 2024 16:10:59 -0500 Subject: [PATCH 01/21] chore: added devnet config --- .github/workflows/bench-testnet.yaml | 32 +++ .github/workflows/bench.yml | 4 +- internal/benchmarks/package.json | 3 +- internal/benchmarks/src/config.ts | 11 + .../src/contract-interaction.bench.ts | 39 ++-- .../benchmarks/src/cost-estimation.bench.ts | 218 ++++++++++-------- internal/benchmarks/src/crypto.bench.ts | 61 ----- .../src/transaction-results.bench.ts | 81 ++++--- internal/benchmarks/src/wallet.bench.ts | 57 +++-- 9 files changed, 292 insertions(+), 214 deletions(-) create mode 100644 .github/workflows/bench-testnet.yaml create mode 100644 internal/benchmarks/src/config.ts delete mode 100644 internal/benchmarks/src/crypto.bench.ts diff --git a/.github/workflows/bench-testnet.yaml b/.github/workflows/bench-testnet.yaml new file mode 100644 index 0000000000..c035e382b4 --- /dev/null +++ b/.github/workflows/bench-testnet.yaml @@ -0,0 +1,32 @@ +name: "Bench Devnet" + +on: + # Remove this before merging, we only want devnet tests on master + pull_request: + branches: + - master + push: + branches: + - master + - release/* + +jobs: + benchmarks: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: CI Setup + uses: ./.github/actions/test-setup + + - name: Pretest + run: pnpm pretest + + - name: Run Node benchmarks + uses: CodSpeedHQ/action@v3 + with: + run: pnpm bench:node + token: ${{ secrets.CODSPEED_TOKEN }} + env: + DEVNET_WALLET_PVT_KEY: ${{ secrets.DEVNET_WALLET_PVT_KEY }} diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 2843553ba6..96f6a93255 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -1,9 +1,11 @@ name: Benchmarks on: pull_request: - push: branches: - master + push: + branches-ignore: + - master jobs: benchmarks: diff --git a/internal/benchmarks/package.json b/internal/benchmarks/package.json index 284e486449..9455f65991 100644 --- a/internal/benchmarks/package.json +++ b/internal/benchmarks/package.json @@ -11,7 +11,8 @@ }, "license": "Apache-2.0", "dependencies": { - "fuels": "workspace:*" + "fuels": "workspace:*", + "@internal/utils": "workspace:*" }, "version": "1.0.0" } diff --git a/internal/benchmarks/src/config.ts b/internal/benchmarks/src/config.ts new file mode 100644 index 0000000000..d8c15e599f --- /dev/null +++ b/internal/benchmarks/src/config.ts @@ -0,0 +1,11 @@ +import { DEVNET_NETWORK_URL } from '@internal/utils'; +import { TransactionType } from 'fuels'; + +export const DEVNET_CONFIG = { + networkUrl: DEVNET_NETWORK_URL, + faucetUrl: `https://faucet-devnet.fuel.network/`, + txIds: { + [TransactionType.Upgrade]: '0xe2c03044fe708e9b112027881baf9f892e6b64a630a629998922c1cab918c094', + [TransactionType.Upload]: '0x94bc2a189b8211796c8fe5b9c6b67624fe97d2007e104bf1b30739944f43bd73', + }, +}; diff --git a/internal/benchmarks/src/contract-interaction.bench.ts b/internal/benchmarks/src/contract-interaction.bench.ts index 1d35bb3b1e..774b77b9d1 100644 --- a/internal/benchmarks/src/contract-interaction.bench.ts +++ b/internal/benchmarks/src/contract-interaction.bench.ts @@ -1,12 +1,13 @@ /* eslint-disable import/no-extraneous-dependencies */ -import type { WalletUnlocked } from 'fuels'; -import { bn } from 'fuels'; +import { WalletUnlocked, bn, Provider } from 'fuels'; import { launchTestNode, TestAssetId } from 'fuels/test-utils'; import { bench } from 'vitest'; import type { CounterContract, CallTestContract } from '../test/typegen/contracts'; import { CounterContractFactory, CallTestContractFactory } from '../test/typegen/contracts'; + +import { DEVNET_CONFIG } from './config'; /** * @group node * @group browser @@ -16,20 +17,32 @@ describe('Contract Interaction Benchmarks', () => { let callTestContract: CallTestContract; let wallet: WalletUnlocked; let cleanup: () => void; - beforeEach(async () => { - const launched = await launchTestNode({ - contractsConfigs: [{ factory: CounterContractFactory }, { factory: CallTestContractFactory }], + + if (process.env.DEVNET_WALLET_PVT_KEY !== undefined) { + beforeAll(async () => { + const { networkUrl } = DEVNET_CONFIG; + const provider = await Provider.create(networkUrl); + wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); }); + } else { + beforeEach(async () => { + const launched = await launchTestNode({ + contractsConfigs: [ + { factory: CounterContractFactory }, + { factory: CallTestContractFactory }, + ], + }); - cleanup = launched.cleanup; - contract = launched.contracts[0]; - callTestContract = launched.contracts[1]; - wallet = launched.wallets[0]; - }); + cleanup = launched.cleanup; + contract = launched.contracts[0]; + callTestContract = launched.contracts[1]; + wallet = launched.wallets[0]; + }); - afterEach(() => { - cleanup(); - }); + afterEach(() => { + cleanup(); + }); + } bench('should successfully execute a contract read function', async () => { const tx = await contract.functions.get_count().call(); diff --git a/internal/benchmarks/src/cost-estimation.bench.ts b/internal/benchmarks/src/cost-estimation.bench.ts index 1ba18bcb3d..b34b63db5e 100644 --- a/internal/benchmarks/src/cost-estimation.bench.ts +++ b/internal/benchmarks/src/cost-estimation.bench.ts @@ -1,13 +1,15 @@ /* eslint-disable import/no-extraneous-dependencies */ -import type { TransferParams, Provider } from 'fuels'; -import { ScriptTransactionRequest, Wallet } from 'fuels'; +import type { TransferParams, WalletUnlocked } from 'fuels'; +import { Wallet, Provider, ScriptTransactionRequest } from 'fuels'; import { launchTestNode, TestAssetId } from 'fuels/test-utils'; import { bench } from 'vitest'; import type { CallTestContract } from '../test/typegen/contracts'; import { CallTestContractFactory } from '../test/typegen/contracts'; +import { DEVNET_CONFIG } from './config'; + /** * @group node * @group browser @@ -15,30 +17,93 @@ import { CallTestContractFactory } from '../test/typegen/contracts'; describe('Cost Estimation Benchmarks', () => { let contract: CallTestContract; let provider: Provider; + let request: ScriptTransactionRequest; + let recipient: WalletUnlocked; + let receiver1: WalletUnlocked; + let receiver2: WalletUnlocked; + let receiver3: WalletUnlocked; + let sender: WalletUnlocked; + let cleanup: () => void; - beforeEach(async () => { - const launched = await launchTestNode({ - contractsConfigs: [{ factory: CallTestContractFactory }], + + const setup = (testProvider: Provider) => { + request = new ScriptTransactionRequest({ gasLimit: 1000000 }); + + recipient = Wallet.generate({ + provider: testProvider, + }); + receiver1 = Wallet.generate({ + provider: testProvider, + }); + receiver2 = Wallet.generate({ + provider: testProvider, + }); + receiver3 = Wallet.generate({ + provider: testProvider, }); + sender = Wallet.fromPrivateKey( + '0x30bb0bc68f5d2ec3b523cee5a65503031b40679d9c72280cd8088c2cfbc34e38', + testProvider + ); + }; - cleanup = launched.cleanup; - contract = launched.contracts[0]; - provider = contract.provider; - }); + if (process.env.DEVNET_WALLET_PVT_KEY !== undefined) { + beforeAll(async () => { + const { networkUrl } = DEVNET_CONFIG; + provider = await Provider.create(networkUrl); - afterEach(() => { - cleanup(); - }); + setup(provider); + }); + } else { + beforeEach(async () => { + const launched = await launchTestNode({ + contractsConfigs: [{ factory: CallTestContractFactory }], + }); + + cleanup = launched.cleanup; + contract = launched.contracts[0]; + provider = contract.provider; + + setup(provider); + }); + + afterEach(() => { + cleanup(); + }); + } bench( - 'should successfully get transaction cost estimate for a single contract call', + 'should successfully get transaction cost estimate for a single contract call done 10 times', async () => { - const cost = await contract.functions - .return_context_amount() - .callParams({ + for (let i = 0; i < 10; i++) { + const cost = await contract.functions + .return_context_amount() + .callParams({ + forward: [100, contract.provider.getBaseAssetId()], + }) + .getTransactionCost(); + + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } + } + ); + + bench('should successfully get transaction cost estimate for multi contract calls', async () => { + for (let i = 0; i < 10; i++) { + const invocationScope = contract.multiCall([ + contract.functions.return_context_amount().callParams({ forward: [100, contract.provider.getBaseAssetId()], - }) - .getTransactionCost(); + }), + contract.functions.return_context_amount().callParams({ + forward: [200, TestAssetId.A.value], + }), + ]); + + const cost = await invocationScope.getTransactionCost(); expect(cost.minFee).toBeDefined(); expect(cost.maxFee).toBeDefined(); @@ -46,89 +111,62 @@ describe('Cost Estimation Benchmarks', () => { expect(cost.gasUsed).toBeDefined(); expect(cost.gasPrice).toBeDefined(); } - ); - - bench('should successfully get transaction cost estimate for multi contract calls', async () => { - const invocationScope = contract.multiCall([ - contract.functions.return_context_amount().callParams({ - forward: [100, contract.provider.getBaseAssetId()], - }), - contract.functions.return_context_amount().callParams({ - forward: [200, TestAssetId.A.value], - }), - ]); - - const cost = await invocationScope.getTransactionCost(); - - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); }); bench('should successfully get transaction cost estimate for a single transfer', async () => { - const request = new ScriptTransactionRequest({ gasLimit: 1000000 }); + for (let i = 0; i < 10; i++) { + request.addCoinOutput(recipient.address, 10, provider.getBaseAssetId()); - const recipient = Wallet.generate({ - provider, - }); - const sender = Wallet.fromPrivateKey( - '0x30bb0bc68f5d2ec3b523cee5a65503031b40679d9c72280cd8088c2cfbc34e38', - provider - ); - - request.addCoinOutput(recipient.address, 10, provider.getBaseAssetId()); + const cost = await sender.getTransactionCost(request); - const cost = await sender.getTransactionCost(request); - - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } }); bench('should successfully get transaction cost estimate for a batch transfer', async () => { - const receiver1 = Wallet.generate({ provider }); - const receiver2 = Wallet.generate({ provider }); - const receiver3 = Wallet.generate({ provider }); - - const amountToTransfer1 = 989; - const amountToTransfer2 = 699; - const amountToTransfer3 = 122; - - const transferParams: TransferParams[] = [ - { - destination: receiver1.address, - amount: amountToTransfer1, - assetId: provider.getBaseAssetId(), - }, - { destination: receiver2.address, amount: amountToTransfer2, assetId: TestAssetId.A.value }, - { destination: receiver3.address, amount: amountToTransfer3, assetId: TestAssetId.B.value }, - ]; - - const cost = await contract.functions - .sum(40, 50) - .addBatchTransfer(transferParams) - .getTransactionCost(); - - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); + for (let i = 0; i < 10; i++) { + const amountToTransfer1 = 989; + const amountToTransfer2 = 699; + const amountToTransfer3 = 122; + + const transferParams: TransferParams[] = [ + { + destination: receiver1.address, + amount: amountToTransfer1, + assetId: provider.getBaseAssetId(), + }, + { destination: receiver2.address, amount: amountToTransfer2, assetId: TestAssetId.A.value }, + { destination: receiver3.address, amount: amountToTransfer3, assetId: TestAssetId.B.value }, + ]; + + const cost = await contract.functions + .sum(40, 50) + .addBatchTransfer(transferParams) + .getTransactionCost(); + + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } }); - it('should successfully get transaction cost estimate for a mint', async () => { - const subId = '0x4a778acfad1abc155a009dc976d2cf0db6197d3d360194d74b1fb92b96986b00'; + it('should successfully get transaction cost estimate for a mint 10 times', async () => { + for (let i = 0; i < 10; i++) { + const subId = '0x4a778acfad1abc155a009dc976d2cf0db6197d3d360194d74b1fb92b96986b00'; - const cost = await contract.functions.mint_coins(subId, 1_000).getTransactionCost(); + const cost = await contract.functions.mint_coins(subId, 1_000).getTransactionCost(); - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } }); }); diff --git a/internal/benchmarks/src/crypto.bench.ts b/internal/benchmarks/src/crypto.bench.ts deleted file mode 100644 index b65db42a17..0000000000 --- a/internal/benchmarks/src/crypto.bench.ts +++ /dev/null @@ -1,61 +0,0 @@ -/* eslint-disable import/no-extraneous-dependencies */ -import type { Keystore } from 'fuels'; -import { bufferFromString, pbkdf2, computeHmac, encrypt, decrypt } from 'fuels'; -import { bench } from 'vitest'; - -/** - * @group node - * @group browser - */ -describe('crypto bench', () => { - bench( - 'should correctly convert string to Uint8Array with base64 encoding in a node environment', - () => { - const string = 'aGVsbG8='; // "hello" in Base64 - bufferFromString(string, 'base64'); - } - ); - - bench('should compute the PBKDF2 hash', () => { - const passwordBuffer = bufferFromString(String('password123').normalize('NFKC'), 'utf-8'); - const saltBuffer = bufferFromString(String('salt456').normalize('NFKC'), 'utf-8'); - const iterations = 1000; - const keylen = 32; - const algo = 'sha256'; - - pbkdf2(passwordBuffer, saltBuffer, iterations, keylen, algo); - }); - - bench('should compute HMAC correctly', () => { - const key = '0x0102030405060708090a0b0c0d0e0f10'; - const data = '0x11121314151617181920212223242526'; - const sha256Length = 64; - const sha512Length = 128; - const prefix = '0x'; - - expect(computeHmac('sha256', key, data).length).toBe(sha256Length + prefix.length); - expect(computeHmac('sha512', key, data).length).toBe(sha512Length + prefix.length); - }); - - bench('Encrypt via aes-ctr', async () => { - const password = '0b540281-f87b-49ca-be37-2264c7f260f7'; - const data = { - name: 'test', - }; - - const encryptedResult = await encrypt(password, data); - expect(encryptedResult.data).toBeTruthy(); - expect(encryptedResult.iv).toBeTruthy(); - expect(encryptedResult.salt).toBeTruthy(); - }); - - bench('Decrypt via aes-ctr', async () => { - const password = '0b540281-f87b-49ca-be37-2264c7f260f7'; - const encryptedResult: Keystore = { - data: 'vj1/JyHR+NiIaWXTpl5T', - iv: '0/lqnRVK5HE/5b1cQAHfqg==', - salt: 'nHdHXW2EmOEagAH2UUDYMRNhd7LJ5XLIcZoVQZMPSlU=', - }; - await decrypt(password, encryptedResult); - }); -}); diff --git a/internal/benchmarks/src/transaction-results.bench.ts b/internal/benchmarks/src/transaction-results.bench.ts index d35f5fc323..3a5d22d358 100644 --- a/internal/benchmarks/src/transaction-results.bench.ts +++ b/internal/benchmarks/src/transaction-results.bench.ts @@ -1,10 +1,12 @@ /* eslint-disable import/no-extraneous-dependencies */ -import { Wallet } from 'fuels'; -import type { WalletUnlocked, TransferParams, Provider } from 'fuels'; +import { Wallet, Provider } from 'fuels'; +import type { WalletUnlocked, TransferParams } from 'fuels'; import { launchTestNode, TestAssetId } from 'fuels/test-utils'; import { bench } from 'vitest'; +import { DEVNET_CONFIG } from './config'; + /** * @group node * @group browser @@ -12,33 +14,46 @@ import { bench } from 'vitest'; describe('Transaction Submission Benchmarks', () => { let provider: Provider; let wallet: WalletUnlocked; - let walletA: WalletUnlocked; + let receiver1: WalletUnlocked; + let receiver2: WalletUnlocked; + let receiver3: WalletUnlocked; let cleanup: () => void; - beforeEach(async () => { - const launched = await launchTestNode(); - - cleanup = launched.cleanup; - provider = launched.provider; - walletA = launched.wallets[0]; - wallet = launched.wallets[1]; - }); - afterEach(() => { - cleanup(); - }); - bench('should successfully transfer a single asset between wallets', async () => { - const receiver = Wallet.generate({ provider }); - - const tx = await walletA.transfer(receiver.address, 100, provider.getBaseAssetId()); + if (process.env.DEVNET_WALLET_PVT_KEY !== undefined) { + beforeAll(async () => { + const { networkUrl } = DEVNET_CONFIG; + provider = await Provider.create(networkUrl); + wallet = Wallet.fromPrivateKey(process.env.DEVNET_WALLET_PVT_KEY as string, provider); + receiver1 = Wallet.generate({ provider }); + receiver2 = Wallet.generate({ provider }); + receiver3 = Wallet.generate({ provider }); + }); + } else { + beforeEach(async () => { + const launched = await launchTestNode(); + + cleanup = launched.cleanup; + provider = launched.provider; + wallet = launched.wallets[1]; + receiver1 = Wallet.generate({ provider }); + receiver2 = Wallet.generate({ provider }); + receiver3 = Wallet.generate({ provider }); + }); + + afterEach(() => { + cleanup(); + }); + } + + const transfer = async () => { + const tx = await wallet.transfer(receiver1.address, 100, provider.getBaseAssetId()); const { isStatusSuccess } = await tx.waitForResult(); expect(isStatusSuccess).toBeTruthy(); - }); - - bench('should successfully conduct a custom transfer between wallets', async () => { - const receiver = Wallet.generate({ provider }); + }; + const customTransfer = async () => { const txParams = { tip: 4, witnessLimit: 800, @@ -46,7 +61,7 @@ describe('Transaction Submission Benchmarks', () => { }; const pendingTx = await wallet.transfer( - receiver.address, + receiver1.address, 500, provider.getBaseAssetId(), txParams @@ -55,13 +70,21 @@ describe('Transaction Submission Benchmarks', () => { const { transaction } = await pendingTx.waitForResult(); expect(transaction).toBeDefined(); + }; + + bench('should successfully transfer a single asset between wallets 10 times', async () => { + for (let i = 0; i < 10; i++) { + await transfer(); + } }); - bench('should successfully perform a batch transfer', async () => { - const receiver1 = Wallet.generate({ provider }); - const receiver2 = Wallet.generate({ provider }); - const receiver3 = Wallet.generate({ provider }); + bench('should successfully conduct a custom transfer between wallets 10 times', async () => { + for (let i = 0; i < 10; i++) { + await customTransfer(); + } + }); + bench('should successfully perform a batch transfer', async () => { const amountToTransfer1 = 989; const amountToTransfer2 = 699; const amountToTransfer3 = 122; @@ -84,14 +107,12 @@ describe('Transaction Submission Benchmarks', () => { }); bench('should successfully withdraw to the base layer', async () => { - const receiver = Wallet.generate({ provider }); - const txParams = { witnessLimit: 800, maxFee: 100_000, }; - const pendingTx = await wallet.withdrawToBaseLayer(receiver.address, 500, txParams); + const pendingTx = await wallet.withdrawToBaseLayer(receiver1.address, 500, txParams); const { transaction } = await pendingTx.waitForResult(); expect(transaction).toBeDefined(); diff --git a/internal/benchmarks/src/wallet.bench.ts b/internal/benchmarks/src/wallet.bench.ts index ff16cc2b1b..553b2f60be 100644 --- a/internal/benchmarks/src/wallet.bench.ts +++ b/internal/benchmarks/src/wallet.bench.ts @@ -1,9 +1,11 @@ /* eslint-disable import/no-extraneous-dependencies */ -import { WalletLocked, WalletUnlocked, Wallet } from 'fuels'; +import { Provider, WalletLocked, WalletUnlocked, Wallet } from 'fuels'; import { launchTestNode } from 'fuels/test-utils'; import { bench } from 'vitest'; +import { DEVNET_CONFIG } from './config'; + const expectedPrivateKey = '0x5f70feeff1f229e4a95e1056e8b4d80d0b24b565674860cc213bdb07127ce1b1'; const expectedPublicKey = '0x2f34bc0df4db0ec391792cedb05768832b49b1aa3a2dd8c30054d1af00f67d00b74b7acbbf3087c8e0b1a4c343db50aa471d21f278ff5ce09f07795d541fb47e'; @@ -15,30 +17,49 @@ const expectedLockedAddress = 'fuel1tac0aml37g57f227zptw3dxcp59jfdt9vayxpnpp80ds * @group browser */ describe('Wallet Benchmarks', () => { - bench('Instantiate a new Unlocked wallet', async () => { - using launched = await launchTestNode(); - const { provider } = launched; + let cleanup: () => void; + let provider: Provider; + + if (process.env.DEVNET_WALLET_PVT_KEY !== undefined) { + beforeAll(async () => { + provider = await Provider.create(DEVNET_CONFIG.networkUrl); + }); + } else { + beforeEach(async () => { + const launched = await launchTestNode(); + + cleanup = launched.cleanup; + provider = launched.provider; + }); + + afterEach(() => { + cleanup(); + }); + } - const unlockedWallet = new WalletUnlocked(expectedPrivateKey, provider); + bench('Instantiate a new Unlocked wallet', () => { + for (let i = 0; i < 10; i++) { + const unlockedWallet = new WalletUnlocked(expectedPrivateKey, provider); - expect(unlockedWallet.publicKey).toEqual(expectedPublicKey); - expect(unlockedWallet.address.toAddress()).toEqual(expectedAddress); + expect(unlockedWallet.publicKey).toEqual(expectedPublicKey); + expect(unlockedWallet.address.toAddress()).toEqual(expectedAddress); + } }); - bench('Instantiate from a constructor', async () => { - using launched = await launchTestNode(); - const { provider } = launched; - const lockedWallet = new WalletLocked(expectedPrivateKey, provider); + bench('Instantiate a new Locked wallet from a constructor 10 times', () => { + for (let i = 0; i < 10; i++) { + const lockedWallet = new WalletLocked(expectedPrivateKey, provider); - expect(lockedWallet.address.toAddress()).toEqual(expectedLockedAddress); + expect(lockedWallet.address.toAddress()).toEqual(expectedLockedAddress); + } }); - bench('Instantiate from an address', async () => { - using launched = await launchTestNode(); - const { provider } = launched; - const lockedWallet = Wallet.fromAddress(expectedAddress, provider); + bench('Instantiate from an address', () => { + for (let i = 0; i < 10; i++) { + const lockedWallet = Wallet.fromAddress(expectedAddress, provider); - expect(lockedWallet.address.toAddress()).toEqual(expectedAddress); - expect(lockedWallet).toBeInstanceOf(WalletLocked); + expect(lockedWallet.address.toAddress()).toEqual(expectedAddress); + expect(lockedWallet).toBeInstanceOf(WalletLocked); + } }); }); From 3f53e841b9b6dae82d264db6cfd58d91d2e2da9c Mon Sep 17 00:00:00 2001 From: chad Date: Fri, 13 Sep 2024 16:13:19 -0500 Subject: [PATCH 02/21] chore: updated lockfile --- pnpm-lock.yaml | 306 ++++++++++++++++++++++++++----------------------- 1 file changed, 160 insertions(+), 146 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index bd9bd6d3ac..d180976b53 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -163,7 +163,7 @@ importers: version: 0.1.1 tsup: specifier: ^6.7.0 - version: 6.7.0(@swc/core@1.7.14)(postcss@8.4.45)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(typescript@5.6.2) + version: 6.7.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(postcss@8.4.45)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(typescript@5.6.2) tsx: specifier: ^4.19.0 version: 4.19.0 @@ -272,7 +272,7 @@ importers: version: 8.4.45 tailwindcss: specifier: ^3.4.10 - version: 3.4.10(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2)) + version: 3.4.10(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2)) typescript: specifier: ~5.6.2 version: 5.6.2 @@ -369,7 +369,7 @@ importers: version: 18.3.0 eslint-config-react-app: specifier: ^7.0.1 - version: 7.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.22.5))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.22.5))(eslint@9.10.0(jiti@1.21.6))(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10))(typescript@5.6.2) + version: 7.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.22.5))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.22.5))(eslint@9.10.0(jiti@1.21.6))(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10))(typescript@5.6.2) fuels: specifier: workspace:* version: link:../../packages/fuels @@ -381,7 +381,7 @@ importers: version: 18.3.1(react@18.3.1) react-scripts: specifier: 5.0.1 - version: 5.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.22.5))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.22.5))(@swc/core@1.7.14)(@types/babel__core@7.20.5)(bufferutil@4.0.8)(esbuild@0.17.19)(eslint@9.10.0(jiti@1.21.6))(react@18.3.1)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(type-fest@3.1.0)(typescript@5.6.2)(utf-8-validate@5.0.10) + version: 5.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.22.5))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.22.5))(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/babel__core@7.20.5)(bufferutil@4.0.8)(esbuild@0.17.19)(eslint@9.10.0(jiti@1.21.6))(react@18.3.1)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(type-fest@3.1.0)(typescript@5.6.2)(utf-8-validate@5.0.10) typescript: specifier: ~5.6.2 version: 5.6.2 @@ -502,7 +502,7 @@ importers: version: 6.0.1(jiti@1.21.6)(postcss@8.4.45)(tsx@4.19.0)(yaml@2.5.1) tailwindcss: specifier: ^3.4.10 - version: 3.4.10(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2)) + version: 3.4.10(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2)) typescript: specifier: ~5.6.2 version: 5.6.2 @@ -579,6 +579,9 @@ importers: internal/benchmarks: dependencies: + '@internal/utils': + specifier: workspace:* + version: link:../utils fuels: specifier: workspace:* version: link:../../packages/fuels @@ -1312,7 +1315,7 @@ importers: version: 8.4.45 tailwindcss: specifier: ^3.4.10 - version: 3.4.10(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2)) + version: 3.4.10(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2)) typescript: specifier: ~5.6.2 version: 5.6.2 @@ -1397,7 +1400,7 @@ importers: version: 8.4.45 tailwindcss: specifier: ^3.4.10 - version: 3.4.10(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2)) + version: 3.4.10(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2)) typescript: specifier: ~5.6.2 version: 5.6.2 @@ -19234,7 +19237,7 @@ snapshots: jest-util: 28.1.3 slash: 3.0.0 - '@jest/core@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10)': + '@jest/core@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10)': dependencies: '@jest/console': 27.5.1 '@jest/reporters': 27.5.1 @@ -19248,7 +19251,7 @@ snapshots: exit: 0.1.2 graceful-fs: 4.2.11 jest-changed-files: 27.5.1 - jest-config: 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) + jest-config: 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) jest-haste-map: 27.5.1 jest-message-util: 27.5.1 jest-regex-util: 27.5.1 @@ -20097,7 +20100,7 @@ snapshots: dependencies: playwright: 1.47.0 - '@pmmmwh/react-refresh-webpack-plugin@0.5.10(react-refresh@0.11.0)(type-fest@3.1.0)(webpack-dev-server@4.15.1(bufferutil@4.0.8)(utf-8-validate@5.0.10)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)))(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19))': + '@pmmmwh/react-refresh-webpack-plugin@0.5.10(react-refresh@0.11.0)(type-fest@3.1.0)(webpack-dev-server@4.15.1(bufferutil@4.0.8)(utf-8-validate@5.0.10)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)))(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19))': dependencies: ansi-html-community: 0.0.8 common-path-prefix: 3.0.0 @@ -20109,10 +20112,10 @@ snapshots: react-refresh: 0.11.0 schema-utils: 3.3.0 source-map: 0.7.4 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) optionalDependencies: type-fest: 3.1.0 - webpack-dev-server: 4.15.1(bufferutil@4.0.8)(utf-8-validate@5.0.10)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + webpack-dev-server: 4.15.1(bufferutil@4.0.8)(utf-8-validate@5.0.10)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) '@polka/url@1.0.0-next.25': {} @@ -22057,7 +22060,7 @@ snapshots: vite: 5.4.3(@types/node@22.5.4)(terser@5.31.6) vue: 3.5.3(typescript@5.6.2) - '@vitest/browser@2.0.5(bufferutil@4.0.8)(playwright@1.47.0)(typescript@5.6.2)(utf-8-validate@5.0.10)(vitest@2.0.5)(webdriverio@9.0.9(bufferutil@4.0.8)(utf-8-validate@6.0.4))': + '@vitest/browser@2.0.5(bufferutil@4.0.8)(playwright@1.47.0)(typescript@5.6.2)(utf-8-validate@5.0.10)(vitest@2.0.5)(webdriverio@9.0.9(bufferutil@4.0.8)(utf-8-validate@5.0.10))': dependencies: '@testing-library/dom': 10.4.0 '@testing-library/user-event': 14.5.2(@testing-library/dom@10.4.0) @@ -22069,7 +22072,7 @@ snapshots: ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@5.0.10) optionalDependencies: playwright: 1.47.0 - webdriverio: 9.0.9(bufferutil@4.0.8)(utf-8-validate@6.0.4) + webdriverio: 9.0.9(bufferutil@4.0.8)(utf-8-validate@5.0.10) transitivePeerDependencies: - bufferutil - typescript @@ -24092,14 +24095,14 @@ snapshots: transitivePeerDependencies: - supports-color - babel-loader@8.3.0(@babel/core@7.22.5)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + babel-loader@8.3.0(@babel/core@7.22.5)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: '@babel/core': 7.22.5 find-cache-dir: 3.3.2 loader-utils: 2.0.4 make-dir: 3.1.0 schema-utils: 2.7.1 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) babel-plugin-istanbul@6.1.1: dependencies: @@ -25381,7 +25384,7 @@ snapshots: dependencies: hyphenate-style-name: 1.0.4 - css-loader@6.8.1(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + css-loader@6.8.1(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: icss-utils: 5.1.0(postcss@8.4.45) postcss: 8.4.45 @@ -25391,9 +25394,9 @@ snapshots: postcss-modules-values: 4.0.0(postcss@8.4.45) postcss-value-parser: 4.2.0 semver: 7.3.8 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) - css-minimizer-webpack-plugin@3.4.1(esbuild@0.17.19)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + css-minimizer-webpack-plugin@3.4.1(esbuild@0.17.19)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: cssnano: 5.1.15(postcss@8.4.45) jest-worker: 27.5.1 @@ -25401,7 +25404,7 @@ snapshots: schema-utils: 4.2.0 serialize-javascript: 6.0.1 source-map: 0.6.1 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) optionalDependencies: esbuild: 0.17.19 @@ -26454,7 +26457,7 @@ snapshots: eslint: 8.57.0 eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0) - eslint-plugin-import: 2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) + eslint-plugin-import: 2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0) eslint-plugin-jsx-a11y: 6.10.0(eslint@8.57.0) eslint-plugin-react: 7.35.0(eslint@8.57.0) eslint-plugin-react-hooks: 4.6.2(eslint@8.57.0) @@ -26468,7 +26471,7 @@ snapshots: dependencies: eslint: 8.57.0 - eslint-config-react-app@7.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.22.5))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.22.5))(eslint@9.10.0(jiti@1.21.6))(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10))(typescript@5.6.2): + eslint-config-react-app@7.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.22.5))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.22.5))(eslint@9.10.0(jiti@1.21.6))(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10))(typescript@5.6.2): dependencies: '@babel/core': 7.22.5 '@babel/eslint-parser': 7.22.5(@babel/core@7.22.5)(eslint@9.10.0(jiti@1.21.6)) @@ -26480,7 +26483,7 @@ snapshots: eslint: 9.10.0(jiti@1.21.6) eslint-plugin-flowtype: 8.0.3(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.22.5))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.22.5))(eslint@9.10.0(jiti@1.21.6)) eslint-plugin-import: 2.30.0(@typescript-eslint/parser@5.62.0(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2))(eslint@9.10.0(jiti@1.21.6)) - eslint-plugin-jest: 25.7.0(@typescript-eslint/eslint-plugin@5.62.0(@typescript-eslint/parser@5.62.0(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2))(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2))(eslint@9.10.0(jiti@1.21.6))(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10))(typescript@5.6.2) + eslint-plugin-jest: 25.7.0(@typescript-eslint/eslint-plugin@5.62.0(@typescript-eslint/parser@5.62.0(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2))(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2))(eslint@9.10.0(jiti@1.21.6))(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10))(typescript@5.6.2) eslint-plugin-jsx-a11y: 6.10.0(eslint@9.10.0(jiti@1.21.6)) eslint-plugin-react: 7.35.0(eslint@9.10.0(jiti@1.21.6)) eslint-plugin-react-hooks: 4.6.2(eslint@9.10.0(jiti@1.21.6)) @@ -26509,24 +26512,7 @@ snapshots: enhanced-resolve: 5.17.1 eslint: 8.57.0 eslint-module-utils: 2.8.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0) - eslint-plugin-import: 2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) - fast-glob: 3.3.2 - get-tsconfig: 4.8.0 - is-core-module: 2.15.1 - is-glob: 4.0.3 - transitivePeerDependencies: - - '@typescript-eslint/parser' - - eslint-import-resolver-node - - eslint-import-resolver-webpack - - supports-color - - eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0)(eslint@8.57.0): - dependencies: - debug: 4.3.7 - enhanced-resolve: 5.17.1 - eslint: 8.57.0 - eslint-module-utils: 2.8.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0)(eslint@8.57.0))(eslint@8.57.0) - eslint-plugin-import: 2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) + eslint-plugin-import: 2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0) fast-glob: 3.3.2 get-tsconfig: 4.8.0 is-core-module: 2.15.1 @@ -26536,7 +26522,6 @@ snapshots: - eslint-import-resolver-node - eslint-import-resolver-webpack - supports-color - optional: true eslint-module-utils@2.11.0(@typescript-eslint/parser@5.62.0(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint@9.10.0(jiti@1.21.6)): dependencies: @@ -26559,17 +26544,6 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-module-utils@2.11.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0)(eslint@8.57.0))(eslint@8.57.0): - dependencies: - debug: 3.2.7 - optionalDependencies: - '@typescript-eslint/parser': 6.21.0(eslint@8.57.0)(typescript@5.6.2) - eslint: 8.57.0 - eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0)(eslint@8.57.0) - transitivePeerDependencies: - - supports-color - eslint-module-utils@2.8.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0): dependencies: debug: 3.2.7 @@ -26581,18 +26555,6 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-module-utils@2.8.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0)(eslint@8.57.0))(eslint@8.57.0): - dependencies: - debug: 3.2.7 - optionalDependencies: - '@typescript-eslint/parser': 6.21.0(eslint@8.57.0)(typescript@5.6.2) - eslint: 8.57.0 - eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0)(eslint@8.57.0) - transitivePeerDependencies: - - supports-color - optional: true - eslint-plugin-eslint-comments@3.2.0(eslint@8.57.0): dependencies: escape-string-regexp: 1.0.5 @@ -26635,7 +26597,7 @@ snapshots: - eslint-import-resolver-webpack - supports-color - eslint-plugin-import@2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0): + eslint-plugin-import@2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0): dependencies: '@rtsao/scc': 1.1.0 array-includes: 3.1.8 @@ -26674,7 +26636,7 @@ snapshots: doctrine: 2.1.0 eslint: 8.57.0 eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.11.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0)(eslint@8.57.0))(eslint@8.57.0) + eslint-module-utils: 2.11.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0) hasown: 2.0.2 is-core-module: 2.15.1 is-glob: 4.0.3 @@ -26691,13 +26653,13 @@ snapshots: - eslint-import-resolver-webpack - supports-color - eslint-plugin-jest@25.7.0(@typescript-eslint/eslint-plugin@5.62.0(@typescript-eslint/parser@5.62.0(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2))(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2))(eslint@9.10.0(jiti@1.21.6))(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10))(typescript@5.6.2): + eslint-plugin-jest@25.7.0(@typescript-eslint/eslint-plugin@5.62.0(@typescript-eslint/parser@5.62.0(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2))(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2))(eslint@9.10.0(jiti@1.21.6))(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10))(typescript@5.6.2): dependencies: '@typescript-eslint/experimental-utils': 5.60.1(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2) eslint: 9.10.0(jiti@1.21.6) optionalDependencies: '@typescript-eslint/eslint-plugin': 5.62.0(@typescript-eslint/parser@5.62.0(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2))(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2) - jest: 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) + jest: 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) transitivePeerDependencies: - supports-color - typescript @@ -26857,7 +26819,7 @@ snapshots: eslint-visitor-keys@4.0.0: {} - eslint-webpack-plugin@3.2.0(eslint@9.10.0(jiti@1.21.6))(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + eslint-webpack-plugin@3.2.0(eslint@9.10.0(jiti@1.21.6))(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: '@types/eslint': 8.40.2 eslint: 9.10.0(jiti@1.21.6) @@ -26865,7 +26827,7 @@ snapshots: micromatch: 4.0.7 normalize-path: 3.0.0 schema-utils: 4.2.0 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) eslint@8.57.0: dependencies: @@ -27322,11 +27284,11 @@ snapshots: dependencies: flat-cache: 4.0.1 - file-loader@6.2.0(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + file-loader@6.2.0(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: loader-utils: 2.0.4 schema-utils: 3.3.0 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) file-uri-to-path@1.0.0: {} @@ -27463,7 +27425,7 @@ snapshots: forever-agent@0.6.1: {} - fork-ts-checker-webpack-plugin@6.5.3(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + fork-ts-checker-webpack-plugin@6.5.3(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: '@babel/code-frame': 7.24.7 '@types/json-schema': 7.0.12 @@ -27479,7 +27441,7 @@ snapshots: semver: 7.6.3 tapable: 1.1.3 typescript: 5.6.2 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) optionalDependencies: eslint: 9.10.0(jiti@1.21.6) @@ -28047,14 +28009,14 @@ snapshots: relateurl: 0.2.7 terser: 5.18.2 - html-webpack-plugin@5.5.3(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + html-webpack-plugin@5.5.3(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: '@types/html-minifier-terser': 6.1.0 html-minifier-terser: 6.1.0 lodash: 4.17.21 pretty-error: 4.0.0 tapable: 2.2.1 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) htmlescape@1.1.1: {} @@ -28802,16 +28764,16 @@ snapshots: transitivePeerDependencies: - supports-color - jest-cli@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10): + jest-cli@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10): dependencies: - '@jest/core': 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) + '@jest/core': 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) '@jest/test-result': 27.5.1 '@jest/types': 27.5.1 chalk: 4.1.2 exit: 0.1.2 graceful-fs: 4.2.11 import-local: 3.1.0 - jest-config: 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) + jest-config: 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) jest-util: 27.5.1 jest-validate: 27.5.1 prompts: 2.4.2 @@ -28823,7 +28785,7 @@ snapshots: - ts-node - utf-8-validate - jest-config@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10): + jest-config@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10): dependencies: '@babel/core': 7.25.2 '@jest/test-sequencer': 27.5.1 @@ -28850,7 +28812,7 @@ snapshots: slash: 3.0.0 strip-json-comments: 3.1.1 optionalDependencies: - ts-node: 10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2) + ts-node: 10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2) transitivePeerDependencies: - bufferutil - canvas @@ -29210,11 +29172,11 @@ snapshots: leven: 3.1.0 pretty-format: 29.7.0 - jest-watch-typeahead@1.1.0(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10)): + jest-watch-typeahead@1.1.0(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10)): dependencies: ansi-escapes: 4.3.2 chalk: 4.1.2 - jest: 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) + jest: 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) jest-regex-util: 28.0.2 jest-watcher: 28.1.3 slash: 4.0.0 @@ -29267,11 +29229,11 @@ snapshots: merge-stream: 2.0.0 supports-color: 8.1.1 - jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10): + jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10): dependencies: - '@jest/core': 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) + '@jest/core': 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) import-local: 3.1.0 - jest-cli: 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) + jest-cli: 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) transitivePeerDependencies: - bufferutil - canvas @@ -30322,10 +30284,10 @@ snapshots: mimic-response@1.0.1: {} - mini-css-extract-plugin@2.7.6(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + mini-css-extract-plugin@2.7.6(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: schema-utils: 4.2.0 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) minify-stream@2.1.0: dependencies: @@ -31570,21 +31532,21 @@ snapshots: postcss: 8.4.45 postcss-value-parser: 4.2.0 - postcss-load-config@3.1.4(postcss@8.4.45)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2)): + postcss-load-config@3.1.4(postcss@8.4.45)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2)): dependencies: lilconfig: 2.1.0 yaml: 1.10.2 optionalDependencies: postcss: 8.4.45 - ts-node: 10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2) + ts-node: 10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2) - postcss-load-config@4.0.1(postcss@8.4.45)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2)): + postcss-load-config@4.0.1(postcss@8.4.45)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2)): dependencies: lilconfig: 2.1.0 yaml: 2.5.0 optionalDependencies: postcss: 8.4.45 - ts-node: 10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2) + ts-node: 10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2) postcss-load-config@6.0.1(jiti@1.21.6)(postcss@8.4.45)(tsx@4.19.0)(yaml@2.5.1): dependencies: @@ -31595,13 +31557,13 @@ snapshots: tsx: 4.19.0 yaml: 2.5.1 - postcss-loader@6.2.1(postcss@8.4.45)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + postcss-loader@6.2.1(postcss@8.4.45)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: cosmiconfig: 7.1.0 klona: 2.0.6 postcss: 8.4.45 semver: 7.3.8 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) postcss-logical@5.0.4(postcss@8.4.45): dependencies: @@ -32173,7 +32135,7 @@ snapshots: regenerator-runtime: 0.13.11 whatwg-fetch: 3.6.2 - react-dev-utils@12.0.1(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + react-dev-utils@12.0.1(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: '@babel/code-frame': 7.22.5 address: 1.2.2 @@ -32184,7 +32146,7 @@ snapshots: escape-string-regexp: 4.0.0 filesize: 8.0.7 find-up: 5.0.0 - fork-ts-checker-webpack-plugin: 6.5.3(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + fork-ts-checker-webpack-plugin: 6.5.3(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) global-modules: 2.0.0 globby: 11.1.0 gzip-size: 6.0.0 @@ -32199,7 +32161,7 @@ snapshots: shell-quote: 1.8.1 strip-ansi: 6.0.1 text-table: 0.2.0 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) optionalDependencies: typescript: 5.6.2 transitivePeerDependencies: @@ -32317,56 +32279,56 @@ snapshots: optionalDependencies: '@types/react': 18.3.1 - react-scripts@5.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.22.5))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.22.5))(@swc/core@1.7.14)(@types/babel__core@7.20.5)(bufferutil@4.0.8)(esbuild@0.17.19)(eslint@9.10.0(jiti@1.21.6))(react@18.3.1)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(type-fest@3.1.0)(typescript@5.6.2)(utf-8-validate@5.0.10): + react-scripts@5.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.22.5))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.22.5))(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/babel__core@7.20.5)(bufferutil@4.0.8)(esbuild@0.17.19)(eslint@9.10.0(jiti@1.21.6))(react@18.3.1)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(type-fest@3.1.0)(typescript@5.6.2)(utf-8-validate@5.0.10): dependencies: '@babel/core': 7.22.5 - '@pmmmwh/react-refresh-webpack-plugin': 0.5.10(react-refresh@0.11.0)(type-fest@3.1.0)(webpack-dev-server@4.15.1(bufferutil@4.0.8)(utf-8-validate@5.0.10)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)))(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + '@pmmmwh/react-refresh-webpack-plugin': 0.5.10(react-refresh@0.11.0)(type-fest@3.1.0)(webpack-dev-server@4.15.1(bufferutil@4.0.8)(utf-8-validate@5.0.10)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)))(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) '@svgr/webpack': 5.5.0 babel-jest: 27.5.1(@babel/core@7.22.5) - babel-loader: 8.3.0(@babel/core@7.22.5)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + babel-loader: 8.3.0(@babel/core@7.22.5)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) babel-plugin-named-asset-import: 0.3.8(@babel/core@7.22.5) babel-preset-react-app: 10.0.1 bfj: 7.0.2 browserslist: 4.21.9 camelcase: 6.3.0 case-sensitive-paths-webpack-plugin: 2.4.0 - css-loader: 6.8.1(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) - css-minimizer-webpack-plugin: 3.4.1(esbuild@0.17.19)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + css-loader: 6.8.1(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) + css-minimizer-webpack-plugin: 3.4.1(esbuild@0.17.19)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) dotenv: 10.0.0 dotenv-expand: 5.1.0 eslint: 9.10.0(jiti@1.21.6) - eslint-config-react-app: 7.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.22.5))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.22.5))(eslint@9.10.0(jiti@1.21.6))(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10))(typescript@5.6.2) - eslint-webpack-plugin: 3.2.0(eslint@9.10.0(jiti@1.21.6))(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) - file-loader: 6.2.0(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + eslint-config-react-app: 7.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.22.5))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.22.5))(eslint@9.10.0(jiti@1.21.6))(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10))(typescript@5.6.2) + eslint-webpack-plugin: 3.2.0(eslint@9.10.0(jiti@1.21.6))(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) + file-loader: 6.2.0(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) fs-extra: 10.1.0 - html-webpack-plugin: 5.5.3(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + html-webpack-plugin: 5.5.3(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) identity-obj-proxy: 3.0.0 - jest: 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) + jest: 27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10) jest-resolve: 27.5.1 - jest-watch-typeahead: 1.1.0(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10)) - mini-css-extract-plugin: 2.7.6(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + jest-watch-typeahead: 1.1.0(jest@27.5.1(bufferutil@4.0.8)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(utf-8-validate@5.0.10)) + mini-css-extract-plugin: 2.7.6(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) postcss: 8.4.45 postcss-flexbugs-fixes: 5.0.2(postcss@8.4.45) - postcss-loader: 6.2.1(postcss@8.4.45)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + postcss-loader: 6.2.1(postcss@8.4.45)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) postcss-normalize: 10.0.1(browserslist@4.21.9)(postcss@8.4.45) postcss-preset-env: 7.8.3(postcss@8.4.45) prompts: 2.4.2 react: 18.3.1 react-app-polyfill: 3.0.0 - react-dev-utils: 12.0.1(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + react-dev-utils: 12.0.1(eslint@9.10.0(jiti@1.21.6))(typescript@5.6.2)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) react-refresh: 0.11.0 resolve: 1.22.2 resolve-url-loader: 4.0.0 - sass-loader: 12.6.0(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + sass-loader: 12.6.0(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) semver: 7.3.8 - source-map-loader: 3.0.2(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) - style-loader: 3.3.3(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) - tailwindcss: 3.4.10(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2)) - terser-webpack-plugin: 5.3.9(@swc/core@1.7.14)(esbuild@0.17.19)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) - webpack-dev-server: 4.15.1(bufferutil@4.0.8)(utf-8-validate@5.0.10)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) - webpack-manifest-plugin: 4.1.1(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) - workbox-webpack-plugin: 6.6.0(@types/babel__core@7.20.5)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + source-map-loader: 3.0.2(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) + style-loader: 3.3.3(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) + tailwindcss: 3.4.10(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2)) + terser-webpack-plugin: 5.3.9(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) + webpack-dev-server: 4.15.1(bufferutil@4.0.8)(utf-8-validate@5.0.10)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) + webpack-manifest-plugin: 4.1.1(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) + workbox-webpack-plugin: 6.6.0(@types/babel__core@7.20.5)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) optionalDependencies: fsevents: 2.3.3 typescript: 5.6.2 @@ -32888,11 +32850,11 @@ snapshots: sanitize.css@13.0.0: {} - sass-loader@12.6.0(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + sass-loader@12.6.0(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: klona: 2.0.6 neo-async: 2.6.2 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) sax@1.2.4: {} @@ -33248,12 +33210,12 @@ snapshots: source-map-js@1.2.1: {} - source-map-loader@3.0.2(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + source-map-loader@3.0.2(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: abab: 2.0.6 iconv-lite: 0.6.3 source-map-js: 1.2.1 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) source-map-support@0.5.21: dependencies: @@ -33647,9 +33609,9 @@ snapshots: dependencies: boundary: 2.0.0 - style-loader@3.3.3(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + style-loader@3.3.3(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) styled-jsx@5.1.1(@babel/core@7.25.2)(react@18.3.1): dependencies: @@ -33802,7 +33764,7 @@ snapshots: tachyons@4.12.0: {} - tailwindcss@3.4.10(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2)): + tailwindcss@3.4.10(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2)): dependencies: '@alloc/quick-lru': 5.2.0 arg: 5.0.2 @@ -33821,7 +33783,7 @@ snapshots: postcss: 8.4.45 postcss-import: 15.1.0(postcss@8.4.45) postcss-js: 4.0.1(postcss@8.4.45) - postcss-load-config: 4.0.1(postcss@8.4.45)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2)) + postcss-load-config: 4.0.1(postcss@8.4.45)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2)) postcss-nested: 6.0.1(postcss@8.4.45) postcss-selector-parser: 6.0.13 resolve: 1.22.8 @@ -33867,14 +33829,14 @@ snapshots: ansi-escapes: 4.3.2 supports-hyperlinks: 2.3.0 - terser-webpack-plugin@5.3.9(@swc/core@1.7.14)(esbuild@0.17.19)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + terser-webpack-plugin@5.3.9(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: '@jridgewell/trace-mapping': 0.3.18 jest-worker: 27.5.1 schema-utils: 3.3.0 serialize-javascript: 6.0.1 terser: 5.18.2 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) optionalDependencies: '@swc/core': 1.7.14(@swc/helpers@0.5.12) esbuild: 0.17.19 @@ -34150,7 +34112,7 @@ snapshots: ts-log@2.2.5: {} - ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2): + ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2): dependencies: '@cspotcode/source-map-support': 0.8.1 '@tsconfig/node10': 1.0.11 @@ -34192,7 +34154,7 @@ snapshots: tslib@2.7.0: {} - tsup@6.7.0(@swc/core@1.7.14)(postcss@8.4.45)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2))(typescript@5.6.2): + tsup@6.7.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(postcss@8.4.45)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2))(typescript@5.6.2): dependencies: bundle-require: 4.2.1(esbuild@0.17.19) cac: 6.7.14 @@ -34202,7 +34164,7 @@ snapshots: execa: 5.1.1 globby: 11.1.0 joycon: 3.1.1 - postcss-load-config: 3.1.4(postcss@8.4.45)(ts-node@10.9.1(@swc/core@1.7.14)(@types/node@22.5.4)(typescript@5.6.2)) + postcss-load-config: 3.1.4(postcss@8.4.45)(ts-node@10.9.1(@swc/core@1.7.14(@swc/helpers@0.5.12))(@types/node@22.5.4)(typescript@5.6.2)) resolve-from: 5.0.0 rollup: 3.29.4 source-map: 0.8.0-beta.0 @@ -34887,7 +34849,7 @@ snapshots: why-is-node-running: 2.3.0 optionalDependencies: '@types/node': 22.5.4 - '@vitest/browser': 2.0.5(bufferutil@4.0.8)(playwright@1.47.0)(typescript@5.6.2)(utf-8-validate@5.0.10)(vitest@2.0.5)(webdriverio@9.0.9(bufferutil@4.0.8)(utf-8-validate@6.0.4)) + '@vitest/browser': 2.0.5(bufferutil@4.0.8)(playwright@1.47.0)(typescript@5.6.2)(utf-8-validate@5.0.10)(vitest@2.0.5)(webdriverio@9.0.9(bufferutil@4.0.8)(utf-8-validate@5.0.10)) jsdom: 16.7.0(bufferutil@4.0.8)(utf-8-validate@5.0.10) transitivePeerDependencies: - less @@ -35002,6 +34964,23 @@ snapshots: pvtsutils: 1.3.2 tslib: 2.7.0 + webdriver@9.0.8(bufferutil@4.0.8)(utf-8-validate@5.0.10): + dependencies: + '@types/node': 20.16.5 + '@types/ws': 8.5.12 + '@wdio/config': 9.0.8 + '@wdio/logger': 9.0.8 + '@wdio/protocols': 9.0.8 + '@wdio/types': 9.0.8 + '@wdio/utils': 9.0.8 + deepmerge-ts: 7.1.0 + ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@5.0.10) + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + optional: true + webdriver@9.0.8(bufferutil@4.0.8)(utf-8-validate@6.0.4): dependencies: '@types/node': 20.16.5 @@ -35018,6 +34997,41 @@ snapshots: - supports-color - utf-8-validate + webdriverio@9.0.9(bufferutil@4.0.8)(utf-8-validate@5.0.10): + dependencies: + '@types/node': 20.16.5 + '@types/sinonjs__fake-timers': 8.1.5 + '@wdio/config': 9.0.8 + '@wdio/logger': 9.0.8 + '@wdio/protocols': 9.0.8 + '@wdio/repl': 9.0.8 + '@wdio/types': 9.0.8 + '@wdio/utils': 9.0.8 + archiver: 7.0.1 + aria-query: 5.3.0 + cheerio: 1.0.0 + css-shorthand-properties: 1.1.1 + css-value: 0.0.1 + grapheme-splitter: 1.0.4 + htmlfy: 0.2.1 + import-meta-resolve: 4.1.0 + is-plain-obj: 4.1.0 + jszip: 3.10.1 + lodash.clonedeep: 4.5.0 + lodash.zip: 4.2.0 + minimatch: 9.0.5 + query-selector-shadow-dom: 1.0.1 + resq: 1.11.0 + rgb2hex: 0.2.5 + serialize-error: 11.0.3 + urlpattern-polyfill: 10.0.0 + webdriver: 9.0.8(bufferutil@4.0.8)(utf-8-validate@5.0.10) + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + optional: true + webdriverio@9.0.9(bufferutil@4.0.8)(utf-8-validate@6.0.4): dependencies: '@types/node': 20.16.5 @@ -35064,16 +35078,16 @@ snapshots: webidl-conversions@6.1.0: {} - webpack-dev-middleware@5.3.3(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + webpack-dev-middleware@5.3.3(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: colorette: 2.0.20 memfs: 3.5.3 mime-types: 2.1.35 range-parser: 1.2.1 schema-utils: 4.2.0 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) - webpack-dev-server@4.15.1(bufferutil@4.0.8)(utf-8-validate@5.0.10)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + webpack-dev-server@4.15.1(bufferutil@4.0.8)(utf-8-validate@5.0.10)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: '@types/bonjour': 3.5.10 '@types/connect-history-api-fallback': 1.5.0 @@ -35103,20 +35117,20 @@ snapshots: serve-index: 1.9.1 sockjs: 0.3.24 spdy: 4.0.2 - webpack-dev-middleware: 5.3.3(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + webpack-dev-middleware: 5.3.3(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@5.0.10) optionalDependencies: - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) transitivePeerDependencies: - bufferutil - debug - supports-color - utf-8-validate - webpack-manifest-plugin@4.1.1(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + webpack-manifest-plugin@4.1.1(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: tapable: 2.2.1 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) webpack-sources: 2.3.1 webpack-sources@1.4.3: @@ -35133,7 +35147,7 @@ snapshots: webpack-virtual-modules@0.6.2: {} - webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19): + webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19): dependencies: '@types/eslint-scope': 3.7.4 '@types/estree': 1.0.1 @@ -35156,7 +35170,7 @@ snapshots: neo-async: 2.6.2 schema-utils: 3.3.0 tapable: 2.2.1 - terser-webpack-plugin: 5.3.9(@swc/core@1.7.14)(esbuild@0.17.19)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)) + terser-webpack-plugin: 5.3.9(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)) watchpack: 2.4.0 webpack-sources: 3.2.3 transitivePeerDependencies: @@ -35387,12 +35401,12 @@ snapshots: workbox-sw@6.6.0: {} - workbox-webpack-plugin@6.6.0(@types/babel__core@7.20.5)(webpack@5.88.0(@swc/core@1.7.14)(esbuild@0.17.19)): + workbox-webpack-plugin@6.6.0(@types/babel__core@7.20.5)(webpack@5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19)): dependencies: fast-json-stable-stringify: 2.1.0 pretty-bytes: 5.6.0 upath: 1.2.0 - webpack: 5.88.0(@swc/core@1.7.14)(esbuild@0.17.19) + webpack: 5.88.0(@swc/core@1.7.14(@swc/helpers@0.5.12))(esbuild@0.17.19) webpack-sources: 1.4.3 workbox-build: 6.6.0(@types/babel__core@7.20.5) transitivePeerDependencies: From 2e38db3236be3fc062bb6bcf8b5e3fe6f0871ea9 Mon Sep 17 00:00:00 2001 From: chad Date: Fri, 13 Sep 2024 16:16:44 -0500 Subject: [PATCH 03/21] docs: add changeset --- .changeset/ninety-carpets-watch.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/ninety-carpets-watch.md diff --git a/.changeset/ninety-carpets-watch.md b/.changeset/ninety-carpets-watch.md new file mode 100644 index 0000000000..618d280a15 --- /dev/null +++ b/.changeset/ninety-carpets-watch.md @@ -0,0 +1,5 @@ +--- +"@internal/benchmarks": minor +--- + +chore: run benchmarking utility in devnet environment From 2592c90857c0570440e4eaa1264032f69172bf2d Mon Sep 17 00:00:00 2001 From: chad Date: Fri, 13 Sep 2024 17:59:01 -0500 Subject: [PATCH 04/21] chore: use pyth large contract + update test config --- .changeset/ninety-carpets-watch.md | 2 +- .../{bench-testnet.yaml => bench-devnet.yaml} | 0 .github/workflows/bench.yml | 47 +- .../src/contract-interaction.bench.ts | 66 +- .../benchmarks/src/cost-estimation.bench.ts | 11 +- .../src/transaction-results.bench.ts | 56 +- .../test/fixtures/forc-projects/Forc.toml | 7 +- .../forc-projects/pyth-contract/Forc.toml | 10 + .../forc-projects/pyth-contract/src/main.sw | 934 ++++++++++++++++++ .../forc-projects/pyth-interface/Forc.toml | 8 + .../pyth-interface/src/data_structures.sw | 11 + .../src/data_structures/accumulator_update.sw | 136 +++ .../batch_attestation_update.sw | 94 ++ .../src/data_structures/data_source.sw | 39 + .../data_structures/governance_instruction.sw | 242 +++++ .../src/data_structures/governance_payload.sw | 29 + .../src/data_structures/price.sw | 343 +++++++ .../src/data_structures/update_type.sw | 38 + .../src/data_structures/wormhole_light.sw | 589 +++++++++++ .../pyth-interface/src/errors.sw | 72 ++ .../pyth-interface/src/events.sw | 43 + .../pyth-interface/src/interface.sw | 323 ++++++ .../pyth-interface/src/pyth_merkle_proof.sw | 63 ++ .../forc-projects/pyth-interface/src/utils.sw | 17 + 24 files changed, 3108 insertions(+), 72 deletions(-) rename .github/workflows/{bench-testnet.yaml => bench-devnet.yaml} (100%) create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-contract/Forc.toml create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-contract/src/main.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/Forc.toml create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/accumulator_update.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/batch_attestation_update.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/data_source.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_instruction.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_payload.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/price.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/update_type.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/wormhole_light.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/errors.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/events.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/interface.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/pyth_merkle_proof.sw create mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/utils.sw diff --git a/.changeset/ninety-carpets-watch.md b/.changeset/ninety-carpets-watch.md index 618d280a15..9b0c215837 100644 --- a/.changeset/ninety-carpets-watch.md +++ b/.changeset/ninety-carpets-watch.md @@ -1,5 +1,5 @@ --- -"@internal/benchmarks": minor +"@internal/benchmarks": patch --- chore: run benchmarking utility in devnet environment diff --git a/.github/workflows/bench-testnet.yaml b/.github/workflows/bench-devnet.yaml similarity index 100% rename from .github/workflows/bench-testnet.yaml rename to .github/workflows/bench-devnet.yaml diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 96f6a93255..075d7789e5 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -1,27 +1,28 @@ -name: Benchmarks -on: - pull_request: - branches: - - master - push: - branches-ignore: - - master +# Uncomment this when we want to run benchmarks on PRs +# name: Benchmarks +# on: +# pull_request: +# branches: +# - master +# push: +# branches-ignore: +# - master -jobs: - benchmarks: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 +# jobs: +# benchmarks: +# runs-on: ubuntu-latest +# steps: +# - name: Checkout +# uses: actions/checkout@v4 - - name: CI Setup - uses: ./.github/actions/test-setup +# - name: CI Setup +# uses: ./.github/actions/test-setup - - name: Pretest - run: pnpm pretest +# - name: Pretest +# run: pnpm pretest - - name: Run Node benchmarks - uses: CodSpeedHQ/action@v3 - with: - run: pnpm bench:node - token: ${{ secrets.CODSPEED_TOKEN }} +# - name: Run Node benchmarks +# uses: CodSpeedHQ/action@v3 +# with: +# run: pnpm bench:node +# token: ${{ secrets.CODSPEED_TOKEN }} diff --git a/internal/benchmarks/src/contract-interaction.bench.ts b/internal/benchmarks/src/contract-interaction.bench.ts index 774b77b9d1..e0c00f6801 100644 --- a/internal/benchmarks/src/contract-interaction.bench.ts +++ b/internal/benchmarks/src/contract-interaction.bench.ts @@ -5,7 +5,11 @@ import { launchTestNode, TestAssetId } from 'fuels/test-utils'; import { bench } from 'vitest'; import type { CounterContract, CallTestContract } from '../test/typegen/contracts'; -import { CounterContractFactory, CallTestContractFactory } from '../test/typegen/contracts'; +import { + CounterContractFactory, + CallTestContractFactory, + PythContractFactory, +} from '../test/typegen/contracts'; import { DEVNET_CONFIG } from './config'; /** @@ -44,38 +48,60 @@ describe('Contract Interaction Benchmarks', () => { }); } - bench('should successfully execute a contract read function', async () => { - const tx = await contract.functions.get_count().call(); + bench('should successfully execute a contract read function 10 times', async () => { + for (let i = 0; i < 10; i++) { + const tx = await contract.functions.get_count().call(); - const { value } = await tx.waitForResult(); + const { value } = await tx.waitForResult(); - expect(JSON.stringify(value)).toEqual(JSON.stringify(bn(0))); + expect(JSON.stringify(value)).toEqual(JSON.stringify(bn(0))); + } }); - bench('should successfully execute a contract multi call', async () => { - const tx = await contract - .multiCall([contract.functions.increment_counter(100), contract.functions.get_count()]) - .call(); + bench('should successfully execute a contract multi call 10 times', async () => { + const initialValue = 100; + for (let i = 1; i < 11; i++) { + const tx = await contract + .multiCall([contract.functions.increment_counter(100), contract.functions.get_count()]) + .call(); - const { value } = await tx.waitForResult(); + const { value } = await tx.waitForResult(); - expect(JSON.stringify(value)).toEqual(JSON.stringify([bn(100), bn(100)])); + expect(JSON.stringify(value)).toEqual( + JSON.stringify([bn(initialValue * i), bn(initialValue * i)]) + ); + } }); - bench('should successfully write to a contract', async () => { - const tx = await contract.functions.increment_counter(100).call(); - await tx.waitForResult(); + bench('should successfully write to a contract 10 times', async () => { + for (let i = 0; i < 10; i++) { + const tx = await contract.functions.increment_counter(100).call(); + await tx.waitForResult(); + } }); - bench('should successfully execute a contract mint', async () => { - const tx = await callTestContract.functions.mint_coins(TestAssetId.A.value, bn(100)).call(); + bench('should successfully execute a contract mint 10 times', async () => { + for (let i = 0; i < 10; i++) { + const tx = await callTestContract.functions.mint_coins(TestAssetId.A.value, bn(100)).call(); + await tx.waitForResult(); + } + }); + + bench('should successfully execute a contract deploy 10 times', async () => { + for (let i = 0; i < 10; i++) { + const factory = new CounterContractFactory(wallet); + const { waitForResult } = await factory.deploy(); + const { contract: deployedContract } = await waitForResult(); - await tx.waitForResult(); + expect(deployedContract).toBeDefined(); + } }); - bench('should successfully execute a contract deploy', async () => { - const factory = new CounterContractFactory(wallet); - const { waitForResult } = await factory.deploy(); + bench('should successfully execute a contract deploy as blobs', async () => { + const factory = new PythContractFactory(wallet); + const { waitForResult } = await factory.deployAsBlobTx({ + chunkSizeMultiplier: 0.9, + }); const { contract: deployedContract } = await waitForResult(); expect(deployedContract).toBeDefined(); diff --git a/internal/benchmarks/src/cost-estimation.bench.ts b/internal/benchmarks/src/cost-estimation.bench.ts index b34b63db5e..0796b7249d 100644 --- a/internal/benchmarks/src/cost-estimation.bench.ts +++ b/internal/benchmarks/src/cost-estimation.bench.ts @@ -1,6 +1,6 @@ /* eslint-disable import/no-extraneous-dependencies */ -import type { TransferParams, WalletUnlocked } from 'fuels'; +import type { TransferParams, WalletUnlocked, BytesLike } from 'fuels'; import { Wallet, Provider, ScriptTransactionRequest } from 'fuels'; import { launchTestNode, TestAssetId } from 'fuels/test-utils'; import { bench } from 'vitest'; @@ -51,8 +51,17 @@ describe('Cost Estimation Benchmarks', () => { beforeAll(async () => { const { networkUrl } = DEVNET_CONFIG; provider = await Provider.create(networkUrl); + const wallet = Wallet.fromPrivateKey( + process.env.DEVNET_WALLET_PVT_KEY as BytesLike, + provider + ); setup(provider); + + const contractFactory = new CallTestContractFactory(wallet); + const { waitForResult } = await contractFactory.deploy(); + const { contract: deployedContract } = await waitForResult(); + contract = deployedContract; }); } else { beforeEach(async () => { diff --git a/internal/benchmarks/src/transaction-results.bench.ts b/internal/benchmarks/src/transaction-results.bench.ts index 3a5d22d358..477e25436b 100644 --- a/internal/benchmarks/src/transaction-results.bench.ts +++ b/internal/benchmarks/src/transaction-results.bench.ts @@ -84,37 +84,41 @@ describe('Transaction Submission Benchmarks', () => { } }); - bench('should successfully perform a batch transfer', async () => { - const amountToTransfer1 = 989; - const amountToTransfer2 = 699; - const amountToTransfer3 = 122; - - const transferParams: TransferParams[] = [ - { - destination: receiver1.address, - amount: amountToTransfer1, - assetId: provider.getBaseAssetId(), - }, - { destination: receiver2.address, amount: amountToTransfer2, assetId: TestAssetId.A.value }, - { destination: receiver3.address, amount: amountToTransfer3, assetId: TestAssetId.B.value }, - ]; - - const tx = await wallet.batchTransfer(transferParams); + bench('should successfully perform a batch transfer 10 times', async () => { + for (let i = 0; i < 10; i++) { + const amountToTransfer1 = 989; + const amountToTransfer2 = 699; + const amountToTransfer3 = 122; - const { isStatusSuccess } = await tx.waitForResult(); + const transferParams: TransferParams[] = [ + { + destination: receiver1.address, + amount: amountToTransfer1, + assetId: provider.getBaseAssetId(), + }, + { destination: receiver2.address, amount: amountToTransfer2, assetId: TestAssetId.A.value }, + { destination: receiver3.address, amount: amountToTransfer3, assetId: TestAssetId.B.value }, + ]; - expect(isStatusSuccess).toBeTruthy(); + const tx = await wallet.batchTransfer(transferParams); + + const { isStatusSuccess } = await tx.waitForResult(); + + expect(isStatusSuccess).toBeTruthy(); + } }); - bench('should successfully withdraw to the base layer', async () => { - const txParams = { - witnessLimit: 800, - maxFee: 100_000, - }; + bench('should successfully withdraw to the base layer 10 times', async () => { + for (let i = 0; i < 10; i++) { + const txParams = { + witnessLimit: 800, + maxFee: 100_000, + }; - const pendingTx = await wallet.withdrawToBaseLayer(receiver1.address, 500, txParams); - const { transaction } = await pendingTx.waitForResult(); + const pendingTx = await wallet.withdrawToBaseLayer(receiver1.address, 500, txParams); + const { transaction } = await pendingTx.waitForResult(); - expect(transaction).toBeDefined(); + expect(transaction).toBeDefined(); + } }); }); diff --git a/internal/benchmarks/test/fixtures/forc-projects/Forc.toml b/internal/benchmarks/test/fixtures/forc-projects/Forc.toml index 2db5c4ff42..33f7786179 100644 --- a/internal/benchmarks/test/fixtures/forc-projects/Forc.toml +++ b/internal/benchmarks/test/fixtures/forc-projects/Forc.toml @@ -1,2 +1,7 @@ [workspace] -members = ["call-test-contract", "counter-contract"] +members = [ + "call-test-contract", + "counter-contract", + "pyth-contract", + "pyth-interface", +] diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-contract/Forc.toml b/internal/benchmarks/test/fixtures/forc-projects/pyth-contract/Forc.toml new file mode 100644 index 0000000000..01cffae12f --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-contract/Forc.toml @@ -0,0 +1,10 @@ +[project] +authors = ["Fuel Labs "] +entry = "main.sw" +license = "Apache-2.0" +name = "pyth-contract" + +[dependencies] +pyth_interface = { path = "../pyth-interface" } +standards = { git = "https://github.com/FuelLabs/sway-standards", tag = "v0.4.4" } +sway_libs = { git = "https://github.com/FuelLabs/sway-libs", tag = "v0.21.0" } diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-contract/src/main.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-contract/src/main.sw new file mode 100644 index 0000000000..8df0945616 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-contract/src/main.sw @@ -0,0 +1,934 @@ +contract; + +use std::{ + asset_id::AssetId, + block::timestamp, + bytes::Bytes, + call_frames::msg_asset_id, + constants::{ + ZERO_B256, + }, + context::msg_amount, + hash::{ + Hash, + keccak256, + sha256, + }, + revert::revert, + storage::{ + storage_map::StorageMap, + storage_vec::*, + }, +}; + +use pyth_interface::{ + data_structures::{ + batch_attestation_update::*, + data_source::*, + governance_instruction::*, + governance_payload::*, + price::*, + update_type::UpdateType, + wormhole_light::*, + }, + errors::{ + PythError, + WormholeError, + }, + events::{ + ConstructedEvent, + ContractUpgradedEvent, + DataSourcesSetEvent, + FeeSetEvent, + GovernanceDataSourceSetEvent, + NewGuardianSetEvent, + UpdatedPriceFeedsEvent, + ValidPeriodSetEvent, + }, + pyth_merkle_proof::validate_proof, + PythCore, + PythInfo, + PythInit, + utils::total_fee, + WormholeGuardians, +}; + +use sway_libs::ownership::*; +use standards::src5::{SRC5, State}; + +const GUARDIAN_SET_EXPIRATION_TIME_SECONDS: u64 = 86400; // 24 hours in seconds +configurable { + DEPLOYER: Identity = Identity::Address(Address::from(ZERO_B256)), +} + +storage { + // | | + // --+-- PYTH STATE --+-- + // | | + // (chainId, emitterAddress) => isValid; takes advantage of + // constant-time mapping lookup for VM verification + is_valid_data_source: StorageMap = StorageMap {}, + // Mapping of cached price information + // priceId => PriceInfo + latest_price_feed: StorageMap = StorageMap {}, + // Fee required for each update + single_update_fee: u64 = 0, + // For tracking all active emitter/chain ID pairs + valid_data_sources: StorageVec = StorageVec {}, + /// Maximum acceptable time period before price is considered to be stale. + /// This includes attestation delay, block time, and potential clock drift + /// between the source/target chains. + valid_time_period_seconds: u64 = 0, + /// Governance data source. VAA messages from this source can change this contract + /// state. e.g., upgrade the contract, change the valid data sources, and more. + governance_data_source: DataSource = DataSource { + chain_id: 0u16, + emitter_address: ZERO_B256, + }, + /// Index of the governance data source, increased each time the governance data source changes. + governance_data_source_index: u32 = 0, + /// Sequence number of the last executed governance message. Any governance message + /// with a lower or equal sequence number will be discarded. This prevents double-execution, + /// and also makes sure that messages are executed in the right order. + last_executed_governance_sequence: u64 = 0, + /// Chain ID of the contract + chain_id: u16 = 0, + /// | | + /// --+-- WORMHOLE STATE --+-- + /// | | + /// Mapping of consumed governance actions + wormhole_consumed_governance_actions: StorageMap = StorageMap {}, + /// Mapping of guardian_set_index => guardian set + wormhole_guardian_sets: StorageMap = StorageMap {}, + /// Current active guardian set index + wormhole_guardian_set_index: u32 = 0, + /// Using Ethereum's Wormhole governance + wormhole_governance_data_source: DataSource = DataSource { + chain_id: 0u16, + emitter_address: ZERO_B256, + }, + /// | | + /// --+-- GOVERNANCE STATE --+-- + /// | | + current_implementation: Identity = Identity::Address(Address::from(ZERO_B256)), +} + +impl SRC5 for Contract { + #[storage(read)] + fn owner() -> State { + _owner() + } +} + +impl PythCore for Contract { + #[storage(read)] + fn ema_price(price_feed_id: PriceFeedId) -> Price { + ema_price_no_older_than(valid_time_period(), price_feed_id) + } + + #[storage(read)] + fn ema_price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price { + ema_price_no_older_than(time_period, price_feed_id) + } + + #[storage(read)] + fn ema_price_unsafe(price_feed_id: PriceFeedId) -> Price { + ema_price_unsafe(price_feed_id) + } + + #[storage(read), payable] + fn parse_price_feed_updates( + max_publish_time: u64, + min_publish_time: u64, + target_price_feed_ids: Vec, + update_data: Vec, + ) -> Vec { + require( + msg_asset_id() == AssetId::base(), + PythError::FeesCanOnlyBePaidInTheBaseAsset, + ); + + let required_fee = update_fee(update_data); + require(msg_amount() >= required_fee, PythError::InsufficientFee); + + let mut output_price_feeds: Vec = Vec::with_capacity(target_price_feed_ids.len()); + let mut i = 0; + while i < update_data.len() { + let data = update_data.get(i).unwrap(); + + match UpdateType::determine_type(data) { + UpdateType::Accumulator(accumulator_update) => { + let (mut offset, digest, number_of_updates, encoded) = accumulator_update.verify_and_parse( + current_guardian_set_index(), + storage + .wormhole_guardian_sets, + storage + .is_valid_data_source, + ); + let mut i_2 = 0; + while i_2 < number_of_updates { + let (new_offset, price_feed) = PriceFeed::extract_from_merkle_proof(digest, encoded, offset); + + offset = new_offset; + + if price_feed.id.is_target(target_price_feed_ids) == false { + i_2 += 1; + continue; + } + + if price_feed.price.publish_time >= min_publish_time && price_feed.price.publish_time <= max_publish_time { + // check if output_price_feeds already contains a PriceFeed with price_feed.id, if so continue as we only want 1 + // output PriceFeed per target ID + if price_feed.id.is_contained_within(output_price_feeds) { + i_2 += 1; + continue; + } + + output_price_feeds.push(price_feed) + } + + i_2 += 1; + } + require(offset == encoded.len(), PythError::InvalidUpdateDataLength); + }, + UpdateType::BatchAttestation(batch_attestation_update) => { + let vm = WormholeVM::parse_and_verify_pyth_vm( + current_guardian_set_index(), + batch_attestation_update + .data, + storage + .wormhole_guardian_sets, + storage + .is_valid_data_source, + ); + + let (mut attestation_index, number_of_attestations, attestation_size) = parse_and_verify_batch_attestation_header(vm.payload); + let attestation_size_u16 = attestation_size.as_u64(); + + let mut i_2: u16 = 0; + while i_2 < number_of_attestations { + let (_, slice) = vm.payload.split_at(attestation_index + 32); + let (price_feed_id, _) = slice.split_at(32); + let price_feed_id: PriceFeedId = price_feed_id.into(); + + if price_feed_id.is_target(target_price_feed_ids) == false { + attestation_index += attestation_size_u16; + i_2 += 1; + continue; + } + + let price_feed = PriceFeed::parse_attestation(attestation_size, vm.payload, attestation_index); + + if price_feed.price.publish_time >= min_publish_time && price_feed.price.publish_time <= max_publish_time { + // check if output_price_feeds already contains a PriceFeed with price_feed.id, if so continue; + // as we only want 1 output PriceFeed per target ID + if price_feed.id.is_contained_within(output_price_feeds) { + attestation_index += attestation_size_u16; + i_2 += 1; + continue; + } + + output_price_feeds.push(price_feed) + } + + attestation_index += attestation_size_u16; + i_2 += 1; + } + } + } + + i += 1; + } + + require( + target_price_feed_ids + .len() == output_price_feeds + .len(), + PythError::PriceFeedNotFoundWithinRange, + ); + + output_price_feeds + } + + #[storage(read)] + fn price(price_feed_id: PriceFeedId) -> Price { + price_no_older_than(valid_time_period(), price_feed_id) + } + + #[storage(read)] + fn price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price { + price_no_older_than(time_period, price_feed_id) + } + + #[storage(read)] + fn price_unsafe(price_feed_id: PriceFeedId) -> Price { + price_unsafe(price_feed_id) + } + + #[storage(read)] + fn update_fee(update_data: Vec) -> u64 { + update_fee(update_data) + } + + #[storage(read, write), payable] + fn update_price_feeds(update_data: Vec) { + update_price_feeds(update_data) + } + + #[storage(read, write), payable] + fn update_price_feeds_if_necessary( + price_feed_ids: Vec, + publish_times: Vec, + update_data: Vec, + ) { + require( + price_feed_ids + .len() == publish_times + .len(), + PythError::LengthOfPriceFeedIdsAndPublishTimesMustMatch, + ); + + let mut i = 0; + while i < price_feed_ids.len() { + if latest_publish_time(price_feed_ids.get(i).unwrap()) < publish_times.get(i).unwrap() + { + update_price_feeds(update_data); + return; + } + + i += 1; + } + } + + #[storage(read)] + fn valid_time_period() -> u64 { + valid_time_period() + } +} + +/// PythCore Private Functions /// +#[storage(read)] +fn ema_price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price { + let price = ema_price_unsafe(price_feed_id); + let current_time = timestamp(); + require( + current_time - price.publish_time <= time_period, + PythError::OutdatedPrice, + ); + + price +} + +#[storage(read)] +fn ema_price_unsafe(price_feed_id: PriceFeedId) -> Price { + let price_feed = storage.latest_price_feed.get(price_feed_id).try_read(); + require(price_feed.is_some(), PythError::PriceFeedNotFound); + + price_feed.unwrap().ema_price +} + +#[storage(read)] +fn price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price { + let price = price_unsafe(price_feed_id); + let current_time = timestamp(); + require( + current_time - price.publish_time <= time_period, + PythError::OutdatedPrice, + ); + + price +} + +#[storage(read)] +fn price_unsafe(price_feed_id: PriceFeedId) -> Price { + let price_feed = storage.latest_price_feed.get(price_feed_id).try_read(); + require(price_feed.is_some(), PythError::PriceFeedNotFound); + + price_feed.unwrap().price +} + +#[storage(read)] +fn update_fee(update_data: Vec) -> u64 { + let mut total_number_of_updates = 0; + let mut i = 0; + while i < update_data.len() { + let data = update_data.get(i).unwrap(); + + match UpdateType::determine_type(data) { + UpdateType::Accumulator(accumulator_update) => { + let proof_size_offset = accumulator_update.verify(); + + total_number_of_updates += accumulator_update.total_updates(proof_size_offset); + }, + UpdateType::BatchAttestation => { + total_number_of_updates += 1; + }, + } + + i += 1; + } + + total_fee(total_number_of_updates, storage.single_update_fee) +} + +#[storage(read, write), payable] +fn update_price_feeds(update_data: Vec) { + require( + msg_asset_id() == AssetId::base(), + PythError::FeesCanOnlyBePaidInTheBaseAsset, + ); + + let mut total_number_of_updates = 0; + + // let mut updated_price_feeds: Vec = Vec::new(); // TODO: requires append for Vec + let mut i = 0; + while i < update_data.len() { + let data = update_data.get(i).unwrap(); + + match UpdateType::determine_type(data) { + UpdateType::Accumulator(accumulator_update) => { + let (number_of_updates, _updated_ids) = accumulator_update.update_price_feeds( + current_guardian_set_index(), + storage + .wormhole_guardian_sets, + storage + .latest_price_feed, + storage + .is_valid_data_source, + ); + // updated_price_feeds.append(updated_ids); // TODO: requires append for Vec + total_number_of_updates += number_of_updates; + }, + UpdateType::BatchAttestation(batch_attestation_update) => { + let _updated_ids = batch_attestation_update.update_price_feeds( + current_guardian_set_index(), + storage + .wormhole_guardian_sets, + storage + .latest_price_feed, + storage + .is_valid_data_source, + ); + // updated_price_feeds.append(updated_ids); // TODO: requires append for Vec + total_number_of_updates += 1; + }, + } + + i += 1; + } + + let required_fee = total_fee(total_number_of_updates, storage.single_update_fee); + require(msg_amount() >= required_fee, PythError::InsufficientFee); + + // log(UpdatedPriceFeedsEvent { // TODO: requires append for Vec + // updated_price_feeds, + // }) +} + +#[storage(read)] +fn valid_time_period() -> u64 { + storage.valid_time_period_seconds.read() +} + +#[storage(read)] +fn governance_data_source() -> DataSource { + storage.governance_data_source.read() +} + +#[storage(write)] +fn set_governance_data_source(data_source: DataSource) { + storage.governance_data_source.write(data_source); +} + +#[storage(read)] +fn governance_data_source_index() -> u32 { + storage.governance_data_source_index.read() +} + +#[storage(write)] +fn set_governance_data_source_index(index: u32) { + storage.governance_data_source_index.write(index); +} + +#[storage(read)] +fn last_executed_governance_sequence() -> u64 { + storage.last_executed_governance_sequence.read() +} + +#[storage(write)] +fn set_last_executed_governance_sequence(sequence: u64) { + storage.last_executed_governance_sequence.write(sequence); +} + +#[storage(read)] +fn chain_id() -> u16 { + storage.chain_id.read() +} + +#[storage(read)] +fn current_implementation() -> Identity { + storage.current_implementation.read() +} + +impl PythInit for Contract { + #[storage(read, write)] + fn constructor( + data_sources: Vec, + governance_data_source: DataSource, + wormhole_governance_data_source: DataSource, + single_update_fee: u64, + valid_time_period_seconds: u64, + wormhole_guardian_set_addresses: Vec, + wormhole_guardian_set_index: u32, + chain_id: u16, + ) { + // This function sets the passed identity as the initial owner. https://github.com/FuelLabs/sway-libs/blob/8045a19e3297599750abdf6300c11e9927a29d40/libs/src/ownership.sw#L127-L138 + initialize_ownership(DEPLOYER); + // This function ensures that the sender is the owner. https://github.com/FuelLabs/sway-libs/blob/8045a19e3297599750abdf6300c11e9927a29d40/libs/src/ownership.sw#L59-L65 + only_owner(); + + require(data_sources.len() > 0, PythError::InvalidDataSourcesLength); + + let mut i = 0; + while i < data_sources.len() { + let data_source = data_sources.get(i).unwrap(); + storage.is_valid_data_source.insert(data_source, true); + storage.valid_data_sources.push(data_source); + + i += 1; + } + storage + .latest_price_feed + .write(StorageMap:: {}); + + storage + .valid_time_period_seconds + .write(valid_time_period_seconds); + storage.single_update_fee.write(single_update_fee); + + let guardian_length: u8 = wormhole_guardian_set_addresses.len().try_as_u8().unwrap(); + let mut new_guardian_set = StorageGuardianSet::new( + 0, + StorageKey::>::new( + sha256(("guardian_set_keys", wormhole_guardian_set_index)), + 0, + ZERO_B256, + ), + ); + let mut i: u8 = 0; + while i < guardian_length { + let key: b256 = wormhole_guardian_set_addresses.get(i.as_u64()).unwrap(); + new_guardian_set.keys.push(key); + i += 1; + } + + storage + .wormhole_guardian_set_index + .write(wormhole_guardian_set_index); + storage + .wormhole_guardian_sets + .insert(wormhole_guardian_set_index, new_guardian_set); + + storage.governance_data_source.write(governance_data_source); + storage + .wormhole_governance_data_source + .write(wormhole_governance_data_source); + storage.governance_data_source_index.write(0); + storage + .wormhole_consumed_governance_actions + .write(StorageMap:: {}); + storage.chain_id.write(chain_id); + storage.last_executed_governance_sequence.write(0); + + storage + .current_implementation + .write(Identity::Address(Address::from(ZERO_B256))); + + // This function revokes ownership of the current owner and disallows any new owners. https://github.com/FuelLabs/sway-libs/blob/8045a19e3297599750abdf6300c11e9927a29d40/libs/src/ownership.sw#L89-L99 + renounce_ownership(); + + log(ConstructedEvent { + guardian_set_index: wormhole_guardian_set_index, + }) + } +} + +impl PythInfo for Contract { + #[storage(read)] + fn valid_data_sources() -> Vec { + storage.valid_data_sources.load_vec() + } + + #[storage(read)] + fn latest_publish_time(price_feed_id: PriceFeedId) -> u64 { + latest_publish_time(price_feed_id) + } + + #[storage(read)] + fn price_feed_exists(price_feed_id: PriceFeedId) -> bool { + match storage.latest_price_feed.get(price_feed_id).try_read() { + Some(_) => true, + None => false, + } + } + + #[storage(read)] + fn price_feed_unsafe(price_feed_id: PriceFeedId) -> PriceFeed { + let price_feed = storage.latest_price_feed.get(price_feed_id).try_read(); + require(price_feed.is_some(), PythError::PriceFeedNotFound); + price_feed.unwrap() + } + + #[storage(read)] + fn single_update_fee() -> u64 { + storage.single_update_fee.read() + } + + #[storage(read)] + fn is_valid_data_source(data_source: DataSource) -> bool { + data_source.is_valid_data_source(storage.is_valid_data_source) + } + + #[storage(read)] + fn last_executed_governance_sequence() -> u64 { + last_executed_governance_sequence() + } + + #[storage(read)] + fn chain_id() -> u16 { + chain_id() + } +} + +/// PythInfo Private Functions /// +#[storage(read)] +fn latest_publish_time(price_feed_id: PriceFeedId) -> u64 { + match storage.latest_price_feed.get(price_feed_id).try_read() { + Some(price_feed) => price_feed.price.publish_time, + None => 0, + } +} + +impl WormholeGuardians for Contract { + #[storage(read)] + fn current_guardian_set_index() -> u32 { + current_guardian_set_index() + } + + #[storage(read)] + fn current_wormhole_provider() -> DataSource { + current_wormhole_provider() + } + + #[storage(read)] + fn guardian_set(index: u32) -> GuardianSet { + let stored_guardian_set = storage.wormhole_guardian_sets.get(index).try_read(); + require( + stored_guardian_set + .is_some(), + PythError::GuardianSetNotFound, + ); + GuardianSet::from_stored(stored_guardian_set.unwrap()) + } + + #[storage(read)] + fn governance_action_is_consumed(governance_action_hash: b256) -> bool { + governance_action_is_consumed(governance_action_hash) + } + + #[storage(read, write)] + fn submit_new_guardian_set(encoded_vm: Bytes) { + submit_new_guardian_set(encoded_vm) + } +} + +/// WormholeGuardians Private Functions /// +#[storage(read)] +fn current_guardian_set_index() -> u32 { + storage.wormhole_guardian_set_index.read() +} + +#[storage(read)] +fn current_wormhole_provider() -> DataSource { + storage.wormhole_governance_data_source.read() +} + +#[storage(read)] +fn governance_action_is_consumed(governance_action_hash: b256) -> bool { + match storage.wormhole_consumed_governance_actions.get(governance_action_hash).try_read() { + Some(bool_) => bool_, + None => false, + } +} + +#[storage(read, write)] +fn submit_new_guardian_set(encoded_vm: Bytes) { + let vm: WormholeVM = WormholeVM::parse_and_verify_wormhole_vm( + current_guardian_set_index(), + encoded_vm, + storage + .wormhole_guardian_sets, + ); + require( + vm.guardian_set_index == current_guardian_set_index(), + WormholeError::NotSignedByCurrentGuardianSet, + ); + let current_wormhole_provider: DataSource = current_wormhole_provider(); + require( + vm.emitter_chain_id == current_wormhole_provider + .chain_id, + WormholeError::InvalidGovernanceChain, + ); + require( + vm.emitter_address == current_wormhole_provider + .emitter_address, + WormholeError::InvalidGovernanceContract, + ); + require( + governance_action_is_consumed(vm.governance_action_hash) == false, + WormholeError::GovernanceActionAlreadyConsumed, + ); + + let current_guardian_set_index: u32 = current_guardian_set_index(); + let upgrade: GuardianSetUpgrade = GuardianSetUpgrade::parse_encoded_upgrade(current_guardian_set_index, vm.payload); + + storage + .wormhole_consumed_governance_actions + .insert(vm.governance_action_hash, true); + + // Set expiry if current GuardianSet exists + let current_guardian_set = storage.wormhole_guardian_sets.get(current_guardian_set_index).try_read(); + if current_guardian_set.is_some() { + let mut current_guardian_set = current_guardian_set.unwrap(); + current_guardian_set.expiration_time = timestamp() + GUARDIAN_SET_EXPIRATION_TIME_SECONDS; + storage + .wormhole_guardian_sets + .insert(current_guardian_set_index, current_guardian_set); + } + + storage + .wormhole_guardian_sets + .insert(upgrade.new_guardian_set_index, upgrade.new_guardian_set); + storage + .wormhole_guardian_set_index + .write(upgrade.new_guardian_set_index); + + log(NewGuardianSetEvent { + governance_action_hash: vm.governance_action_hash, + new_guardian_set_index: upgrade.new_guardian_set_index, + }) +} + +/// Transfer the governance data source to a new value with sanity checks to ensure the new governance data source can manage the contract. +#[storage(read, write)] +fn authorize_governance_data_source_transfer( + payload: AuthorizeGovernanceDataSourceTransferPayload, +) { + let old_governance_data_source = governance_data_source(); + + // Parse and verify the VAA contained in the payload to ensure it's valid and can manage the contract + let vm: WormholeVM = WormholeVM::parse_and_verify_wormhole_vm( + current_guardian_set_index(), + payload + .claim_vaa, + storage + .wormhole_guardian_sets, + ); + + let gi = GovernanceInstruction::parse_governance_instruction(vm.payload); + require( + gi.target_chain_id == chain_id() || gi.target_chain_id == 0, + PythError::InvalidGovernanceTarget, + ); + + require( + match gi.action { + GovernanceAction::RequestGovernanceDataSourceTransfer => true, + _ => false, + }, + PythError::InvalidGovernanceMessage, + ); + + let claim_payload = GovernanceInstruction::parse_request_governance_data_source_transfer_payload(gi.payload); + + require( + governance_data_source_index() < claim_payload + .governance_data_source_index, + PythError::OldGovernanceMessage, + ); + + set_governance_data_source_index(claim_payload.governance_data_source_index); + + let new_governance_data_source = DataSource { + chain_id: vm.emitter_chain_id, + emitter_address: vm.emitter_address, + }; + + set_governance_data_source(new_governance_data_source); + + // Setting the last executed governance to the claimVaa sequence to avoid using older sequences. + set_last_executed_governance_sequence(vm.sequence); + + log(GovernanceDataSourceSetEvent { + old_data_source: old_governance_data_source, + new_data_source: new_governance_data_source, + initial_sequence: vm.sequence, + }); +} + +#[storage(read, write)] +fn set_data_sources(payload: SetDataSourcesPayload) { + let old_data_sources = storage.valid_data_sources.load_vec(); + + let mut i = 0; + while i < old_data_sources.len() { + let data_source = old_data_sources.get(i).unwrap(); + storage.is_valid_data_source.insert(data_source, false); + i += 1; + } + + // Clear the current list of valid data sources + storage.valid_data_sources.clear(); + + i = 0; + // Add new data sources from the payload and mark them as valid + while i < payload.data_sources.len() { + let data_source = payload.data_sources.get(i).unwrap(); + storage.valid_data_sources.push(data_source); + storage.is_valid_data_source.insert(data_source, true); + + i += 1; + } + + // Emit an event with the old and new data sources + log(DataSourcesSetEvent { + old_data_sources: old_data_sources, + new_data_sources: storage.valid_data_sources.load_vec(), + }); +} + +#[storage(read, write)] +fn set_fee(payload: SetFeePayload) { + let old_fee = storage.single_update_fee.read(); + storage.single_update_fee.write(payload.new_fee); + + log(FeeSetEvent { + old_fee, + new_fee: payload.new_fee, + }); +} + +#[storage(read, write)] +fn set_valid_period(payload: SetValidPeriodPayload) { + let old_valid_period = storage.valid_time_period_seconds.read(); + storage + .valid_time_period_seconds + .write(payload.new_valid_period); + + log(ValidPeriodSetEvent { + old_valid_period, + new_valid_period: payload.new_valid_period, + }); +} + +abi PythGovernance { + #[storage(read)] + fn governance_data_source() -> DataSource; + + #[storage(read, write)] + fn execute_governance_instruction(encoded_vm: Bytes); +} + +impl PythGovernance for Contract { + #[storage(read)] + fn governance_data_source() -> DataSource { + governance_data_source() + } + + #[storage(read, write)] + fn execute_governance_instruction(encoded_vm: Bytes) { + execute_governance_instruction(encoded_vm) + } +} + +#[storage(read, write)] +fn execute_governance_instruction(encoded_vm: Bytes) { + let vm = verify_governance_vm(encoded_vm); + // Log so that the WormholeVM struct will show up in the ABI and can be used in the tests + log(vm); + + let gi = GovernanceInstruction::parse_governance_instruction(vm.payload); + // Log so that the GovernanceInstruction struct will show up in the ABI and can be used in the tests + log(gi); + + require( + gi.target_chain_id == chain_id() || gi.target_chain_id == 0, + PythError::InvalidGovernanceTarget, + ); + + match gi.action { + GovernanceAction::UpgradeContract => { + require(gi.target_chain_id != 0, PythError::InvalidGovernanceTarget); + // TODO: implement upgrade_upgradeable_contract(uc) when Fuel releases the upgrade standard library; + log("Upgrade functionality not implemented"); + revert(0u64); + }, + GovernanceAction::AuthorizeGovernanceDataSourceTransfer => { + let agdst = GovernanceInstruction::parse_authorize_governance_data_source_transfer_payload(gi.payload); + log(agdst); + authorize_governance_data_source_transfer(agdst); + }, + GovernanceAction::SetDataSources => { + let sdsp = GovernanceInstruction::parse_set_data_sources_payload(gi.payload); + log(sdsp); + set_data_sources(sdsp); + }, + GovernanceAction::SetFee => { + let sf = GovernanceInstruction::parse_set_fee_payload(gi.payload); + log(sf); + set_fee(sf); + }, + GovernanceAction::SetValidPeriod => { + let svp = GovernanceInstruction::parse_set_valid_period_payload(gi.payload); + log(svp); + set_valid_period(svp); + }, + GovernanceAction::RequestGovernanceDataSourceTransfer => { + // RequestGovernanceDataSourceTransfer can be only part of AuthorizeGovernanceDataSourceTransfer message + // The `revert` function only accepts u64, so as + // a workaround we use require. + require(false, PythError::InvalidGovernanceMessage); + }, + _ => { + // The `revert` function only accepts u64, so as + // a workaround we use require. + require(false, PythError::InvalidGovernanceMessage); + } + } +} + +#[storage(read, write)] +fn verify_governance_vm(encoded_vm: Bytes) -> WormholeVM { + let vm: WormholeVM = WormholeVM::parse_and_verify_wormhole_vm( + current_guardian_set_index(), + encoded_vm, + storage + .wormhole_guardian_sets, + ); + + require( + storage + .governance_data_source + .read() + .is_valid_governance_data_source(vm.emitter_chain_id, vm.emitter_address), + PythError::InvalidGovernanceDataSource, + ); + + require( + vm.sequence > last_executed_governance_sequence(), + PythError::OldGovernanceMessage, + ); + + set_last_executed_governance_sequence(vm.sequence); + vm +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/Forc.toml b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/Forc.toml new file mode 100644 index 0000000000..7cb44a4edc --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/Forc.toml @@ -0,0 +1,8 @@ +[project] +authors = ["Fuel Labs "] +entry = "interface.sw" +license = "Apache-2.0" +name = "pyth_interface" + +[dependencies] +standards = { git = "https://github.com/FuelLabs/sway-standards", tag = "v0.4.4" } diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures.sw new file mode 100644 index 0000000000..4e7d6ddb91 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures.sw @@ -0,0 +1,11 @@ +library; + +// The order of the modules is important because of the dependencies between them. +pub mod data_source; +pub mod wormhole_light; +pub mod price; +pub mod accumulator_update; +pub mod batch_attestation_update; +pub mod governance_payload; +pub mod governance_instruction; +pub mod update_type; diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/accumulator_update.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/accumulator_update.sw new file mode 100644 index 0000000000..014cee8975 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/accumulator_update.sw @@ -0,0 +1,136 @@ +library; + +use ::errors::PythError; +use ::data_structures::{data_source::*, price::*, wormhole_light::{StorageGuardianSet, WormholeVM}}; +use std::{bytes::Bytes, hash::Hash}; + +pub struct AccumulatorUpdate { + data: Bytes, +} +const MINIMUM_ALLOWED_MINOR_VERSION = 0; +const MAJOR_VERSION = 1; +impl AccumulatorUpdate { + pub fn new(data: Bytes) -> Self { + Self { data } + } + pub fn total_updates(self, ref mut offset: u64) -> u64 { + let proof_size = u16::from_be_bytes([self.data.get(offset).unwrap(), self.data.get(offset + 1).unwrap()]).as_u64(); + offset += proof_size + 2; + self.data.get(offset).unwrap().as_u64() + } + pub fn verify(self) -> u64 { + // skip magic as already checked when this is called + let major_version = self.data.get(4); + require( + major_version + .is_some() && major_version + .unwrap() == MAJOR_VERSION, + PythError::InvalidMajorVersion, + ); + let minor_version = self.data.get(5); + require( + minor_version + .is_some() && minor_version + .unwrap() >= MINIMUM_ALLOWED_MINOR_VERSION, + PythError::InvalidMinorVersion, + ); + let trailing_header_size = self.data.get(6); + require(trailing_header_size.is_some(), PythError::InvalidHeaderSize); + // skip trailing headers and update type + let offset = 8 + trailing_header_size.unwrap().as_u64(); + require( + self.data + .len() >= offset, + PythError::InvalidUpdateDataLength, + ); + offset + } +} +impl AccumulatorUpdate { + #[storage(read)] + pub fn verify_and_parse( + self, + current_guardian_set_index: u32, + wormhole_guardian_sets: StorageKey>, + is_valid_data_source: StorageKey>, +) -> (u64, Bytes, u64, Bytes) { + let encoded_offset = self.verify(); + let (_, slice) = self.data.split_at(encoded_offset); + let (encoded_slice, _) = slice.split_at(self.data.len() - encoded_offset); + let mut offset = 0; + let wormhole_proof_size = u16::from_be_bytes([encoded_slice.get(offset).unwrap(), encoded_slice.get(offset + 1).unwrap()]).as_u64(); + offset += 2; + let (_, slice) = encoded_slice.split_at(offset); + let (encoded_vm, _) = slice.split_at(wormhole_proof_size); + let vm = WormholeVM::parse_and_verify_pyth_vm( + current_guardian_set_index, + encoded_vm, + wormhole_guardian_sets, + is_valid_data_source, + ); + offset += wormhole_proof_size; + let encoded_payload = vm.payload; + /* + Payload offset: + skip magic (4 bytes) as already checked when this is called + skip update_type as (1 byte) it can only be WormholeMerkle + skip slot (8 bytes) as unused + skip ring_size (4 bytes) as unused + */ + let mut payload_offset = 17; + let (_, slice) = encoded_payload.split_at(payload_offset); + let (digest, _) = slice.split_at(20); + payload_offset += 20; + require( + payload_offset <= encoded_payload + .len(), + PythError::InvalidPayloadLength, + ); + let number_of_updates = encoded_slice.get(offset); + require( + number_of_updates + .is_some(), + PythError::NumberOfUpdatesIrretrievable, + ); + offset += 1; + (offset, digest, number_of_updates.unwrap().as_u64(), encoded_slice) + } +} +impl AccumulatorUpdate { + #[storage(read, write)] + pub fn update_price_feeds( + self, + current_guardian_set_index: u32, + wormhole_guardian_sets: StorageKey>, + latest_price_feed: StorageKey>, + is_valid_data_source: StorageKey>, +) -> (u64, Vec) { + let (mut offset, digest, number_of_updates, encoded_data) = self.verify_and_parse( + current_guardian_set_index, + wormhole_guardian_sets, + is_valid_data_source, + ); + + let mut updated_ids = Vec::new(); + let mut i = 0; + while i < number_of_updates { + let (new_offset, price_feed) = PriceFeed::extract_from_merkle_proof(digest, encoded_data, offset); + offset = new_offset; + let latest_publish_time = match latest_price_feed.get(price_feed.id).try_read() { + Some(price_feed) => price_feed.price.publish_time, + None => 0, + }; + if price_feed.price.publish_time > latest_publish_time { + latest_price_feed.insert(price_feed.id, price_feed); + updated_ids.push(price_feed.id); + } + i += 1; + } + require( + offset == encoded_data + .len(), + PythError::InvalidUpdateDataLength, + ); + (number_of_updates, updated_ids) + } +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/batch_attestation_update.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/batch_attestation_update.sw new file mode 100644 index 0000000000..f1ef0a21b8 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/batch_attestation_update.sw @@ -0,0 +1,94 @@ +library; + +use ::errors::PythError; +use ::data_structures::{data_source::*, price::*, wormhole_light::{StorageGuardianSet, WormholeVM}}; +use std::{bytes::Bytes, hash::Hash}; + +const BATCH_MAGIC: u32 = 0x50325748; + +pub struct BatchAttestationUpdate { + pub data: Bytes, +} +impl BatchAttestationUpdate { + pub fn new(data: Bytes) -> Self { + Self { data } + } + #[storage(read, write)] + pub fn update_price_feeds( + self, + current_guardian_set_index: u32, + wormhole_guardian_sets: StorageKey>, + latest_price_feed: StorageKey>, + is_valid_data_source: StorageKey>, +) -> Vec { + let vm = WormholeVM::parse_and_verify_pyth_vm( + current_guardian_set_index, + self.data, + wormhole_guardian_sets, + is_valid_data_source, + ); + let (mut attestation_index, number_of_attestations, attestation_size) = parse_and_verify_batch_attestation_header(vm.payload); + let mut updated_ids = Vec::new(); + let mut i: u16 = 0; + while i < number_of_attestations { + let price_feed = PriceFeed::parse_attestation(attestation_size, vm.payload, attestation_index); + // Respect specified attestation size for forward-compatibility + attestation_index += attestation_size.as_u64(); + let latest_publish_time = match latest_price_feed.get(price_feed.id).try_read() { + Some(price_feed) => price_feed.price.publish_time, + None => 0, + }; + if price_feed.price.publish_time > latest_publish_time { + latest_price_feed.insert(price_feed.id, price_feed); + updated_ids.push(price_feed.id); + } + i += 1; + } + updated_ids + } +} +pub fn parse_and_verify_batch_attestation_header(encoded_payload: Bytes) -> (u64, u16, u16) { + let mut index = 0; + //Check header + let magic = u32::from_be_bytes([ + encoded_payload.get(index).unwrap(), + encoded_payload.get(index + 1).unwrap(), + encoded_payload.get(index + 2).unwrap(), + encoded_payload.get(index + 3).unwrap(), + ]); + require(magic == BATCH_MAGIC, PythError::InvalidMagic); + index += 4; + let major_version = u16::from_be_bytes([encoded_payload.get(index).unwrap(), encoded_payload.get(index + 1).unwrap()]); + require(major_version == 3, PythError::InvalidMajorVersion); + // addtionally skip minor_version(2 bytes) as unused + index += 4; + let header_size = u16::from_be_bytes([encoded_payload.get(index).unwrap(), encoded_payload.get(index + 1).unwrap()]); + index += 2; + // From solidity impl: + // NOTE(2022-04-19): Currently, only payloadId comes after + // hdrSize. Future extra header fields must be read using a + // separate offset to respect hdrSize, i.e.: + // uint hdrIndex = 0; + // bpa.header.payloadId = UnsafeBytesLib.toUint8(encoded, index + hdrIndex); + // hdrIndex += 1; + // bpa.header.someNewField = UnsafeBytesLib.toUint32(encoded, index + hdrIndex); + // hdrIndex += 4; + // Skip remaining unknown header bytes + // index += bpa.header.hdrSize; + let payload_id = encoded_payload.get(index).unwrap(); + // Payload ID of 2 required for batch header + require(payload_id == 2, PythError::InvalidPayloadId); + // Skip remaining unknown header bytes + index += header_size.as_u64(); + let number_of_attestations = u16::from_be_bytes([encoded_payload.get(index).unwrap(), encoded_payload.get(index + 1).unwrap()]); + index += 2; + let attestation_size = u16::from_be_bytes([encoded_payload.get(index).unwrap(), encoded_payload.get(index + 1).unwrap()]); + index += 2; + require( + encoded_payload + .len() == index + (attestation_size * number_of_attestations) + .as_u64(), + PythError::InvalidPayloadLength, + ); + return (index, number_of_attestations, attestation_size); +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/data_source.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/data_source.sw new file mode 100644 index 0000000000..2cb65cd0b1 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/data_source.sw @@ -0,0 +1,39 @@ +library; + +use std::hash::{Hash, Hasher}; + +pub struct DataSource { + pub chain_id: u16, + pub emitter_address: b256, +} + +impl Hash for DataSource { + fn hash(self, ref mut state: Hasher) { + self.chain_id.hash(state); + self.emitter_address.hash(state); + } +} + +impl DataSource { + pub fn new(chain_id: u16, emitter_address: b256) -> Self { + Self { + chain_id, + emitter_address, + } + } + + #[storage(read)] + pub fn is_valid_data_source( + self, + is_valid_data_source: StorageKey>, +) -> bool { + match is_valid_data_source.get(self).try_read() { + Some(bool) => bool, + None => false, + } + } + + pub fn is_valid_governance_data_source(self, chain_id: u16, emitter_address: b256) -> bool { + self.chain_id == chain_id && self.emitter_address == emitter_address + } +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_instruction.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_instruction.sw new file mode 100644 index 0000000000..645b440bed --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_instruction.sw @@ -0,0 +1,242 @@ +library; + +use ::errors::PythError; +use ::data_structures::{ + data_source::*, + governance_payload::*, + price::*, + wormhole_light::{ + StorageGuardianSet, + WormholeVM, + }, +}; +use std::{bytes::Bytes, hash::Hash}; +use std::math::*; +use std::primitive_conversions::{u32::*, u64::*}; + +pub const MAGIC: u32 = 0x5054474d; + +pub struct GovernanceInstruction { + pub magic: u32, + pub module: GovernanceModule, + pub action: GovernanceAction, + pub target_chain_id: u16, + pub payload: Bytes, +} + +pub enum GovernanceModule { + Executor: (), // 0 + Target: (), // 1 + EvmExecutor: (), // 2 + StacksTarget: (), // 3 + Invalid: (), +} + +pub enum GovernanceAction { + UpgradeContract: (), // 0 + AuthorizeGovernanceDataSourceTransfer: (), // 1 + SetDataSources: (), // 2 + SetFee: (), // 3 + SetValidPeriod: (), // 4 + RequestGovernanceDataSourceTransfer: (), // 5 + Invalid: (), +} + +impl GovernanceInstruction { + pub fn new( + magic: u32, + module: GovernanceModule, + action: GovernanceAction, + target_chain_id: u16, + payload: Bytes, + ) -> Self { + Self { + magic, + module, + action, + target_chain_id, + payload, + } + } + + pub fn parse_governance_instruction(encoded_instruction: Bytes) -> Self { + let mut index = 0; + let magic = u32::from_be_bytes([ + encoded_instruction.get(index).unwrap(), + encoded_instruction.get(index + 1).unwrap(), + encoded_instruction.get(index + 2).unwrap(), + encoded_instruction.get(index + 3).unwrap(), + ]); + require(magic == MAGIC, PythError::InvalidMagic); + index += 4; + + let mod_number = encoded_instruction.get(index).unwrap(); + let module = match mod_number { + 0 => GovernanceModule::Executor, + 1 => GovernanceModule::Target, + 2 => GovernanceModule::EvmExecutor, + 3 => GovernanceModule::StacksTarget, + _ => GovernanceModule::Invalid, + }; + require( + match module { + GovernanceModule::Target => true, + _ => false, + }, + PythError::InvalidGovernanceTarget, + ); + index += 1; + + let action_number = encoded_instruction.get(index).unwrap(); + let governance_action = match action_number { + 0 => GovernanceAction::UpgradeContract, // Not implemented + 1 => GovernanceAction::AuthorizeGovernanceDataSourceTransfer, + 2 => GovernanceAction::SetDataSources, + 3 => GovernanceAction::SetFee, + 4 => GovernanceAction::SetValidPeriod, + 5 => GovernanceAction::RequestGovernanceDataSourceTransfer, + _ => GovernanceAction::Invalid, + }; + require( + match governance_action { + GovernanceAction::Invalid => false, + _ => true, + }, + PythError::InvalidGovernanceAction, + ); + index += 1; + + let target_chain_id = u16::from_be_bytes([ + encoded_instruction.get(index).unwrap(), + encoded_instruction.get(index + 1).unwrap(), + ]); + index += 2; + + let (_, payload) = encoded_instruction.split_at(index); + + GovernanceInstruction::new(magic, module, governance_action, target_chain_id, payload) + } + + /// Parse an AuthorizeGovernanceDataSourceTransferPayload (action 2) with minimal validation + pub fn parse_authorize_governance_data_source_transfer_payload( + encoded_payload: Bytes, + ) -> AuthorizeGovernanceDataSourceTransferPayload { + AuthorizeGovernanceDataSourceTransferPayload { + claim_vaa: encoded_payload, + } + } + + pub fn parse_request_governance_data_source_transfer_payload( + encoded_payload: Bytes, + ) -> RequestGovernanceDataSourceTransferPayload { + let mut index = 0; + let governance_data_source_index = u32::from_be_bytes([ + encoded_payload.get(index).unwrap(), + encoded_payload.get(index + 1).unwrap(), + encoded_payload.get(index + 2).unwrap(), + encoded_payload.get(index + 3).unwrap(), + ]); + index += 4; + require( + index == encoded_payload + .len(), + PythError::InvalidGovernanceMessage, + ); + let rdgst = RequestGovernanceDataSourceTransferPayload { + governance_data_source_index, + }; + rdgst + } + + pub fn parse_set_data_sources_payload(encoded_payload: Bytes) -> SetDataSourcesPayload { + let mut index = 0; + let data_sources_length = encoded_payload.get(index).unwrap().as_u64(); + index += 1; + let mut data_sources = Vec::with_capacity(data_sources_length); + + let mut i = 0; + while i < data_sources_length { + let (_, slice) = encoded_payload.split_at(index); + let (slice, _) = slice.split_at(2); + let chain_id = u16::from_be_bytes([slice.get(0).unwrap(), slice.get(1).unwrap()]); + index += 2; + let (_, slice) = encoded_payload.split_at(index); + let (slice, _) = slice.split_at(32); + let emitter_address: b256 = slice.into(); + index += 32; + + data_sources.push(DataSource { + chain_id, + emitter_address, + }); + i += 1 + } + + require( + index == encoded_payload + .len(), + PythError::InvalidGovernanceMessage, + ); + let sds = SetDataSourcesPayload { data_sources }; + sds + } + + pub fn parse_set_fee_payload(encoded_payload: Bytes) -> SetFeePayload { + let mut index = 0; + let val = u64::from_be_bytes([ + encoded_payload.get(index).unwrap(), + encoded_payload.get(index + 1).unwrap(), + encoded_payload.get(index + 2).unwrap(), + encoded_payload.get(index + 3).unwrap(), + encoded_payload.get(index + 4).unwrap(), + encoded_payload.get(index + 5).unwrap(), + encoded_payload.get(index + 6).unwrap(), + encoded_payload.get(index + 7).unwrap(), + ]); + index += 8; + let expo = u64::from_be_bytes([ + encoded_payload.get(index).unwrap(), + encoded_payload.get(index + 1).unwrap(), + encoded_payload.get(index + 2).unwrap(), + encoded_payload.get(index + 3).unwrap(), + encoded_payload.get(index + 4).unwrap(), + encoded_payload.get(index + 5).unwrap(), + encoded_payload.get(index + 6).unwrap(), + encoded_payload.get(index + 7).unwrap(), + ]); + index += 8; + require( + encoded_payload + .len() == index, + PythError::InvalidGovernanceMessage, + ); + let sf = SetFeePayload { + new_fee: val * 10u64.pow(expo.try_as_u32().unwrap()), + }; + sf + } + + pub fn parse_set_valid_period_payload(encoded_payload: Bytes) -> SetValidPeriodPayload { + let mut index = 0; + let valid_time_period_seconds = u64::from_be_bytes([ + encoded_payload.get(index).unwrap(), + encoded_payload.get(index + 1).unwrap(), + encoded_payload.get(index + 2).unwrap(), + encoded_payload.get(index + 3).unwrap(), + encoded_payload.get(index + 4).unwrap(), + encoded_payload.get(index + 5).unwrap(), + encoded_payload.get(index + 6).unwrap(), + encoded_payload.get(index + 7).unwrap(), + ]); + index += 8; + require( + index == encoded_payload + .len(), + PythError::InvalidGovernanceMessage, + ); + let svp = SetValidPeriodPayload { + new_valid_period: valid_time_period_seconds, + }; + svp + } +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_payload.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_payload.sw new file mode 100644 index 0000000000..c00838a915 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_payload.sw @@ -0,0 +1,29 @@ +library; + +use std::bytes::Bytes; + +use ::data_structures::data_source::DataSource; + +pub struct UpgradeContractPayload { + pub new_implementation: Identity, +} + +pub struct AuthorizeGovernanceDataSourceTransferPayload { + pub claim_vaa: Bytes, +} + +pub struct RequestGovernanceDataSourceTransferPayload { + pub governance_data_source_index: u32, +} + +pub struct SetDataSourcesPayload { + pub data_sources: Vec, +} + +pub struct SetFeePayload { + pub new_fee: u64, +} + +pub struct SetValidPeriodPayload { + pub new_valid_period: u64, +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/price.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/price.sw new file mode 100644 index 0000000000..d3d619d751 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/price.sw @@ -0,0 +1,343 @@ +library; + +use std::{block::timestamp, bytes::Bytes}; + +use ::errors::PythError; +use ::utils::absolute_of_exponent; +use ::data_structures::wormhole_light::WormholeVM; +use ::pyth_merkle_proof::validate_proof; +const TAI64_DIFFERENCE = 4611686018427387904; +// A price with a degree of uncertainty, represented as a price +- a confidence interval. +// +// The confidence interval roughly corresponds to the standard error of a normal distribution. +// Both the price and confidence are stored in a fixed-point numeric representation, +// `x * (10^expo)`, where `expo` is the exponent. +// +// Please refer to the documentation at https://docs.pyth.network/documentation/pythnet-price-feeds/best-practices for how +// to how this price safely. +pub struct Price { + // Confidence interval around the price + pub confidence: u64, + // Price exponent + // This value represents the absolute value of an i32 in the range -255 to 0. Values other than 0, should be considered negative: + // exponent of 5 means the Pyth Price exponent was -5 + pub exponent: u32, + // Price + pub price: u64, + // The TAI64 timestamp describing when the price was published + pub publish_time: u64, +} +impl Price { + pub fn new( + confidence: u64, + exponent: u32, + price: u64, + publish_time: u64, + ) -> Self { + Self { + confidence, + exponent, + price, + publish_time, + } + } +} +// The `PriceFeedId` type is an alias for `b256` that represents the id for a specific Pyth price feed. +pub type PriceFeedId = b256; +// PriceFeed represents a current aggregate price from Pyth publisher feeds. +pub struct PriceFeed { + // Latest available exponentially-weighted moving average price + pub ema_price: Price, + // The price ID. + pub id: PriceFeedId, + // Latest available price + pub price: Price, +} +impl PriceFeedId { + pub fn is_target(self, target_price_feed_ids: Vec) -> bool { + let mut i = 0; + while i < target_price_feed_ids.len() { + if target_price_feed_ids.get(i).unwrap() == self { + return true; + } + i += 1; + } + false + } + pub fn is_contained_within(self, output_price_feeds: Vec) -> bool { + let mut i = 0; + while i < output_price_feeds.len() { + if output_price_feeds.get(i).unwrap().id == self { + return true; + } + i += 1; + } + false + } +} +impl PriceFeed { + pub fn new(ema_price: Price, id: PriceFeedId, price: Price) -> Self { + Self { + ema_price, + id, + price, + } + } +} +impl PriceFeed { + pub fn parse_message(encoded_price_feed: Bytes) -> Self { + let mut offset = 1u64; + let (_, slice) = encoded_price_feed.split_at(offset); + let (price_feed_id, _) = slice.split_at(32); + let price_feed_id: PriceFeedId = price_feed_id.into(); + offset += 32; + let price = u64::from_be_bytes([ + encoded_price_feed.get(offset).unwrap(), + encoded_price_feed.get(offset + 1).unwrap(), + encoded_price_feed.get(offset + 2).unwrap(), + encoded_price_feed.get(offset + 3).unwrap(), + encoded_price_feed.get(offset + 4).unwrap(), + encoded_price_feed.get(offset + 5).unwrap(), + encoded_price_feed.get(offset + 6).unwrap(), + encoded_price_feed.get(offset + 7).unwrap(), + ]); + offset += 8; + let confidence = u64::from_be_bytes([ + encoded_price_feed.get(offset).unwrap(), + encoded_price_feed.get(offset + 1).unwrap(), + encoded_price_feed.get(offset + 2).unwrap(), + encoded_price_feed.get(offset + 3).unwrap(), + encoded_price_feed.get(offset + 4).unwrap(), + encoded_price_feed.get(offset + 5).unwrap(), + encoded_price_feed.get(offset + 6).unwrap(), + encoded_price_feed.get(offset + 7).unwrap(), + ]); + offset += 8; + // exponent is an i32, expected to be in the range -255 to 0 + let exponent = u32::from_be_bytes([ + encoded_price_feed.get(offset).unwrap(), + encoded_price_feed.get(offset + 1).unwrap(), + encoded_price_feed.get(offset + 2).unwrap(), + encoded_price_feed.get(offset + 3).unwrap(), + ]); + let exponent = absolute_of_exponent(exponent); + require(exponent < 256u32, PythError::InvalidExponent); + offset += 4; + let mut publish_time = u64::from_be_bytes([ + encoded_price_feed.get(offset).unwrap(), + encoded_price_feed.get(offset + 1).unwrap(), + encoded_price_feed.get(offset + 2).unwrap(), + encoded_price_feed.get(offset + 3).unwrap(), + encoded_price_feed.get(offset + 4).unwrap(), + encoded_price_feed.get(offset + 5).unwrap(), + encoded_price_feed.get(offset + 6).unwrap(), + encoded_price_feed.get(offset + 7).unwrap(), + ]); + // skip unused previous_publish_times (8 bytes) + offset += 16; + let ema_price = u64::from_be_bytes([ + encoded_price_feed.get(offset).unwrap(), + encoded_price_feed.get(offset + 1).unwrap(), + encoded_price_feed.get(offset + 2).unwrap(), + encoded_price_feed.get(offset + 3).unwrap(), + encoded_price_feed.get(offset + 4).unwrap(), + encoded_price_feed.get(offset + 5).unwrap(), + encoded_price_feed.get(offset + 6).unwrap(), + encoded_price_feed.get(offset + 7).unwrap(), + ]); + offset += 8; + let ema_confidence = u64::from_be_bytes([ + encoded_price_feed.get(offset).unwrap(), + encoded_price_feed.get(offset + 1).unwrap(), + encoded_price_feed.get(offset + 2).unwrap(), + encoded_price_feed.get(offset + 3).unwrap(), + encoded_price_feed.get(offset + 4).unwrap(), + encoded_price_feed.get(offset + 5).unwrap(), + encoded_price_feed.get(offset + 6).unwrap(), + encoded_price_feed.get(offset + 7).unwrap(), + ]); + offset += 8; + require( + offset <= encoded_price_feed + .len(), + PythError::InvalidPriceFeedDataLength, + ); + //convert publish_time from UNIX to TAI64 + publish_time += TAI64_DIFFERENCE; + require( + publish_time <= timestamp(), + PythError::FuturePriceNotAllowed, + ); + PriceFeed::new( + Price::new(ema_confidence, exponent, ema_price, publish_time), + price_feed_id, + Price::new(confidence, exponent, price, publish_time), + ) + } + pub fn parse_attestation(attestation_size: u16, encoded_payload: Bytes, index: u64) -> Self { + // Skip product id (32 bytes) as unused + let mut attestation_index = index + 32; + let (_, slice) = encoded_payload.split_at(attestation_index); + let (price_feed_id, _) = slice.split_at(32); + let price_feed_id: PriceFeedId = price_feed_id.into(); + attestation_index += 32; + let mut price = u64::from_be_bytes([ + encoded_payload.get(attestation_index).unwrap(), + encoded_payload.get(attestation_index + 1).unwrap(), + encoded_payload.get(attestation_index + 2).unwrap(), + encoded_payload.get(attestation_index + 3).unwrap(), + encoded_payload.get(attestation_index + 4).unwrap(), + encoded_payload.get(attestation_index + 5).unwrap(), + encoded_payload.get(attestation_index + 6).unwrap(), + encoded_payload.get(attestation_index + 7).unwrap(), + ]); + attestation_index += 8; + let mut confidence = u64::from_be_bytes([ + encoded_payload.get(attestation_index).unwrap(), + encoded_payload.get(attestation_index + 1).unwrap(), + encoded_payload.get(attestation_index + 2).unwrap(), + encoded_payload.get(attestation_index + 3).unwrap(), + encoded_payload.get(attestation_index + 4).unwrap(), + encoded_payload.get(attestation_index + 5).unwrap(), + encoded_payload.get(attestation_index + 6).unwrap(), + encoded_payload.get(attestation_index + 7).unwrap(), + ]); + attestation_index += 8; + // exponent is an i32, expected to be in the range -255 to 0 + let exponent = u32::from_be_bytes([ + encoded_payload.get(attestation_index).unwrap(), + encoded_payload.get(attestation_index + 1).unwrap(), + encoded_payload.get(attestation_index + 2).unwrap(), + encoded_payload.get(attestation_index + 3).unwrap(), + ]); + let exponent = absolute_of_exponent(exponent); + require(exponent < 256u32, PythError::InvalidExponent); + attestation_index += 4; + let ema_price = u64::from_be_bytes([ + encoded_payload.get(attestation_index).unwrap(), + encoded_payload.get(attestation_index + 1).unwrap(), + encoded_payload.get(attestation_index + 2).unwrap(), + encoded_payload.get(attestation_index + 3).unwrap(), + encoded_payload.get(attestation_index + 4).unwrap(), + encoded_payload.get(attestation_index + 5).unwrap(), + encoded_payload.get(attestation_index + 6).unwrap(), + encoded_payload.get(attestation_index + 7).unwrap(), + ]); + attestation_index += 8; + let ema_confidence = u64::from_be_bytes([ + encoded_payload.get(attestation_index).unwrap(), + encoded_payload.get(attestation_index + 1).unwrap(), + encoded_payload.get(attestation_index + 2).unwrap(), + encoded_payload.get(attestation_index + 3).unwrap(), + encoded_payload.get(attestation_index + 4).unwrap(), + encoded_payload.get(attestation_index + 5).unwrap(), + encoded_payload.get(attestation_index + 6).unwrap(), + encoded_payload.get(attestation_index + 7).unwrap(), + ]); + attestation_index += 8; + // Status is an enum (encoded as u8) with the following values: + // 0 = UNKNOWN: The price feed is not currently updating for an unknown reason. + // 1 = TRADING: The price feed is updating as expected. + // 2 = HALTED: The price feed is not currently updating because trading in the product has been halted. + // 3 = AUCTION: The price feed is not currently updating because an auction is setting the price. + let status = encoded_payload.get(attestation_index).unwrap(); + // Additionally skip number_of publishers (8 bytes) and attestation_time (8 bytes); as unused + attestation_index += 17; + let mut publish_time = u64::from_be_bytes([ + encoded_payload.get(attestation_index).unwrap(), + encoded_payload.get(attestation_index + 1).unwrap(), + encoded_payload.get(attestation_index + 2).unwrap(), + encoded_payload.get(attestation_index + 3).unwrap(), + encoded_payload.get(attestation_index + 4).unwrap(), + encoded_payload.get(attestation_index + 5).unwrap(), + encoded_payload.get(attestation_index + 6).unwrap(), + encoded_payload.get(attestation_index + 7).unwrap(), + ]); + attestation_index += 8; + if status == 1u8 { + attestation_index += 24; + } else { + // If status is not trading then the latest available price is + // the previous price that is parsed here. + + // previous publish time + publish_time = u64::from_be_bytes([ + encoded_payload.get(attestation_index).unwrap(), + encoded_payload.get(attestation_index + 1).unwrap(), + encoded_payload.get(attestation_index + 2).unwrap(), + encoded_payload.get(attestation_index + 3).unwrap(), + encoded_payload.get(attestation_index + 4).unwrap(), + encoded_payload.get(attestation_index + 5).unwrap(), + encoded_payload.get(attestation_index + 6).unwrap(), + encoded_payload.get(attestation_index + 7).unwrap(), + ]); + attestation_index += 8; + // previous price + price = u64::from_be_bytes([ + encoded_payload.get(attestation_index).unwrap(), + encoded_payload.get(attestation_index + 1).unwrap(), + encoded_payload.get(attestation_index + 2).unwrap(), + encoded_payload.get(attestation_index + 3).unwrap(), + encoded_payload.get(attestation_index + 4).unwrap(), + encoded_payload.get(attestation_index + 5).unwrap(), + encoded_payload.get(attestation_index + 6).unwrap(), + encoded_payload.get(attestation_index + 7).unwrap(), + ]); + attestation_index += 8; + // previous confidence + confidence = u64::from_be_bytes([ + encoded_payload.get(attestation_index).unwrap(), + encoded_payload.get(attestation_index + 1).unwrap(), + encoded_payload.get(attestation_index + 2).unwrap(), + encoded_payload.get(attestation_index + 3).unwrap(), + encoded_payload.get(attestation_index + 4).unwrap(), + encoded_payload.get(attestation_index + 5).unwrap(), + encoded_payload.get(attestation_index + 6).unwrap(), + encoded_payload.get(attestation_index + 7).unwrap(), + ]); + attestation_index += 8; + } + require( + (attestation_index - index) <= attestation_size + .as_u64(), + PythError::InvalidAttestationSize, + ); + //convert publish_time from UNIX to TAI64 + publish_time += TAI64_DIFFERENCE; + PriceFeed::new( + Price::new(ema_confidence, exponent, ema_price, publish_time), + price_feed_id, + Price::new(confidence, exponent, price, publish_time), + ) + } +} +impl PriceFeed { + pub fn extract_from_merkle_proof(digest: Bytes, encoded_proof: Bytes, offset: u64) -> (u64, self) { + // In order to avoid `ref mut` param related MemoryWriteOverlap error + let mut current_offset = offset; + let message_size = u16::from_be_bytes([ + encoded_proof.get(current_offset).unwrap(), + encoded_proof.get(current_offset + 1).unwrap(), + ]).as_u64(); + current_offset += 2; + let (_, slice) = encoded_proof.split_at(current_offset); + let (encoded_message, _) = slice.split_at(message_size); + current_offset += message_size; + let end_offset = validate_proof( + encoded_proof, + current_offset, + digest, + encoded_message + .clone(), + ); + // Message type of 0 is a Price Feed + require( + encoded_message + .get(0) + .unwrap() == 0, + PythError::IncorrectMessageType, + ); + let price_feed = PriceFeed::parse_message(encoded_message); + (end_offset, price_feed) + } +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/update_type.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/update_type.sw new file mode 100644 index 0000000000..bf3706275c --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/update_type.sw @@ -0,0 +1,38 @@ +library; + +use std::{array_conversions::u32::*, bytes::Bytes}; + +use ::data_structures::{ + accumulator_update::AccumulatorUpdate, + batch_attestation_update::BatchAttestationUpdate, +}; + +const ACCUMULATOR_MAGIC: u32 = 0x504e4155; + +pub enum UpdateType { + Accumulator: AccumulatorUpdate, + BatchAttestation: BatchAttestationUpdate, +} + +impl UpdateType { + pub fn determine_type(data: Bytes) -> Self { + let (magic, _) = data.split_at(4); //TODO: Convert to u32 for comparison with const ACCUMULATOR_MAGIC. Use raw_ptr.read::()? Remove accumulator_magic_bytes() + if data.len() > 4 && magic == accumulator_magic_bytes() { + UpdateType::Accumulator(AccumulatorUpdate::new(data)) + } else { + UpdateType::BatchAttestation((BatchAttestationUpdate::new(data))) + } + } +} + +pub fn accumulator_magic_bytes() -> Bytes { + let accumulator_magic_array = ACCUMULATOR_MAGIC.to_be_bytes(); + + let mut accumulator_magic_bytes = Bytes::with_capacity(4); + accumulator_magic_bytes.push(accumulator_magic_array[0]); + accumulator_magic_bytes.push(accumulator_magic_array[1]); + accumulator_magic_bytes.push(accumulator_magic_array[2]); + accumulator_magic_bytes.push(accumulator_magic_array[3]); + + accumulator_magic_bytes +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/wormhole_light.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/wormhole_light.sw new file mode 100644 index 0000000000..848ff66f35 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/wormhole_light.sw @@ -0,0 +1,589 @@ +library; + +use ::data_structures::data_source::*; +use ::errors::WormholeError; + +use std::{ + array_conversions::{ + b256::*, + u16::*, + u32::*, + }, + b512::B512, + block::timestamp, + bytes::Bytes, + constants::ZERO_B256, + hash::{ + Hash, + keccak256, + sha256, + }, + storage::storage_vec::*, + vm::evm::ecr::ec_recover_evm_address, +}; + +pub const UPGRADE_MODULE: b256 = 0x00000000000000000000000000000000000000000000000000000000436f7265; + +pub struct GuardianSet { + pub expiration_time: u64, + pub keys: Vec, +} + +impl GuardianSet { + #[storage(read)] + pub fn from_stored(stored: StorageGuardianSet) -> Self { + Self { + expiration_time: stored.expiration_time, + keys: stored.keys.load_vec(), + } + } +} + +pub struct StorageGuardianSet { + pub expiration_time: u64, + pub keys: StorageKey>, +} + +impl StorageGuardianSet { + pub fn new(expiration_time: u64, keys: StorageKey>) -> Self { + StorageGuardianSet { + expiration_time, + keys, + } + } +} + +pub struct GuardianSetUpgrade { + pub action: u8, + pub chain: u16, + pub module: b256, + pub new_guardian_set: StorageGuardianSet, + pub new_guardian_set_index: u32, +} + +impl GuardianSetUpgrade { + pub fn new( + action: u8, + chain: u16, + module: b256, + new_guardian_set: StorageGuardianSet, + new_guardian_set_index: u32, + ) -> Self { + GuardianSetUpgrade { + action, + chain, + module, + new_guardian_set, + new_guardian_set_index, + } + } +} + +impl GuardianSetUpgrade { + #[storage(read, write)] + pub fn parse_encoded_upgrade(current_guardian_set_index: u32, encoded_upgrade: Bytes) -> Self { + let mut index = 0; + let (_, slice) = encoded_upgrade.split_at(index); + let (module, _) = slice.split_at(32); + let module: b256 = module.into(); + require(module == UPGRADE_MODULE, WormholeError::InvalidModule); + index += 32; + let action = encoded_upgrade.get(index).unwrap(); + require(action == 2, WormholeError::InvalidGovernanceAction); + index += 1; + let chain = u16::from_be_bytes([encoded_upgrade.get(index).unwrap(), encoded_upgrade.get(index + 1).unwrap()]); + index += 2; + let new_guardian_set_index = u32::from_be_bytes([ + encoded_upgrade.get(index).unwrap(), + encoded_upgrade.get(index + 1).unwrap(), + encoded_upgrade.get(index + 2).unwrap(), + encoded_upgrade.get(index + 3).unwrap(), + ]); + require( + new_guardian_set_index > current_guardian_set_index, + WormholeError::NewGuardianSetIndexIsInvalid, + ); + index += 4; + let guardian_length = encoded_upgrade.get(index).unwrap(); + index += 1; + let mut new_guardian_set: StorageGuardianSet = StorageGuardianSet::new( + 0, + StorageKey::>::new( + ZERO_B256, + 0, + sha256(("guardian_set_keys", new_guardian_set_index)), + ), + ); + let mut i: u8 = 0; + while i < guardian_length { + let (_, slice) = encoded_upgrade.split_at(index); + let (key, _) = slice.split_at(20); + let key: b256 = key.into(); + new_guardian_set.keys.push(key.rsh(96)); + index += 20; + i += 1; + } + require( + new_guardian_set + .keys + .len() == guardian_length + .as_u64(), + WormholeError::GuardianSetKeysLengthNotEqual, + ); + require( + encoded_upgrade + .len() == index, + WormholeError::InvalidGuardianSetUpgradeLength, + ); + GuardianSetUpgrade::new( + action, + chain, + module, + new_guardian_set, + new_guardian_set_index, + ) + } +} + +pub struct GuardianSignature { + guardian_index: u8, + r: b256, + s: b256, + v: u8, +} + +impl GuardianSignature { + pub fn new(guardian_index: u8, r: b256, s: b256, v: u8) -> Self { + GuardianSignature { + guardian_index, + r, + s, + v, + } + } + // eip-2098: Compact Signature Representation + pub fn compact(self) -> B512 { + let y_parity = b256::from_be_bytes([ + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + 0u8, + self.v - 27u8, + ]); + let shifted_y_parity = y_parity.lsh(255); + let y_parity_and_s = b256::binary_or(shifted_y_parity, self.s); + B512::from((self.r, y_parity_and_s)) + } +} + +impl GuardianSignature { + pub fn verify( + self, + guardian_set_key: b256, + hash: b256, + index: u64, + last_index: u64, +) { + // Ensure that provided signature indices are ascending only + if index > 0 { + require( + self.guardian_index + .as_u64() > last_index, + WormholeError::SignatureIndicesNotAscending, + ); + } + let recovered_signer = ec_recover_evm_address(self.compact(), hash); + require( + recovered_signer + .is_ok() && recovered_signer + .unwrap() + .bits() == guardian_set_key, + WormholeError::SignatureInvalid, + ); + } +} + +pub struct WormholeVM { + pub version: u8, + pub guardian_set_index: u32, + pub governance_action_hash: b256, + // signatures: Vec, // Shown here to represent data layout of VM, but not needed + pub timestamp: u32, + pub nonce: u32, + pub emitter_chain_id: u16, + pub emitter_address: b256, + pub sequence: u64, + pub consistency_level: u8, + pub payload: Bytes, +} + +impl WormholeVM { + pub fn default() -> Self { + WormholeVM { + version: 0u8, + guardian_set_index: 0u32, + governance_action_hash: ZERO_B256, + timestamp: 0u32, + nonce: 0u32, + emitter_chain_id: 0u16, + emitter_address: ZERO_B256, + sequence: 0u64, + consistency_level: 0u8, + payload: Bytes::new(), + } + } + pub fn new( + version: u8, + guardian_set_index: u32, + governance_action_hash: b256, + timestamp_: u32, + nonce: u32, + emitter_chain_id: u16, + emitter_address: b256, + sequence: u64, + consistency_level: u8, + payload: Bytes, + ) -> Self { + WormholeVM { + version, + guardian_set_index, + governance_action_hash, + timestamp: timestamp_, + nonce, + emitter_chain_id, + emitter_address, + sequence, + consistency_level, + payload, + } + } +} + +impl WormholeVM { + #[storage(read)] + pub fn parse_and_verify_wormhole_vm( + current_guardian_set_index: u32, + encoded_vm: Bytes, + wormhole_guardian_sets: StorageKey>, + ) -> Self { + let mut index = 0; + let version = encoded_vm.get(index); + require( + version + .is_some() && version + .unwrap() == 1, + WormholeError::VMVersionIncompatible, + ); + index += 1; + let (_, slice) = encoded_vm.split_at(index); + let (slice, _) = slice.split_at(4); //replace with slice() + let guardian_set_index = u32::from_be_bytes([ + //replace with func + slice.get(0).unwrap(), + slice.get(1).unwrap(), + slice.get(2).unwrap(), + slice.get(3).unwrap(), + ]); + index += 4; + let guardian_set = wormhole_guardian_sets.get(guardian_set_index).try_read(); + require(guardian_set.is_some(), WormholeError::GuardianSetNotFound); + let guardian_set = guardian_set.unwrap(); + require( + guardian_set + .keys + .len() > 0, + WormholeError::InvalidGuardianSetKeysLength, + ); + require( + guardian_set_index == current_guardian_set_index && (guardian_set + .expiration_time == 0 || guardian_set + .expiration_time > timestamp()), + WormholeError::InvalidGuardianSet, + ); + let signers_length = encoded_vm.get(index); + require( + signers_length + .is_some(), + WormholeError::SignersLengthIrretrievable, + ); + let signers_length = signers_length.unwrap().as_u64(); + index += 1; + // 66 is the length of each guardian signature + // 1 (guardianIndex) + 32 (r) + 32 (s) + 1 (v) + let hash_index = index + (signers_length * 66); + require( + hash_index < encoded_vm + .len(), + WormholeError::InvalidSignatureLength, + ); + let (_, slice) = encoded_vm.split_at(hash_index); + let hash = keccak256(keccak256(slice)); + let mut last_index = 0; + let mut i = 0; + while i < signers_length { + let guardian_index = encoded_vm.get(index); + require( + guardian_index + .is_some(), + WormholeError::GuardianIndexIrretrievable, + ); + let guardian_index = guardian_index.unwrap(); + index += 1; + let (_, slice) = encoded_vm.split_at(index); + let (slice, remainder) = slice.split_at(32); + let r: b256 = slice.into(); + index += 32; + let (slice, remainder) = remainder.split_at(32); + let s: b256 = slice.into(); + index += 32; + let v = remainder.get(0); + require(v.is_some(), WormholeError::SignatureVIrretrievable); + let v = v.unwrap() + 27; + index += 1; + let guardian_set_key = guardian_set.keys.get(guardian_index.as_u64()); + require( + guardian_set_key + .is_some(), + WormholeError::GuardianSetKeyIrretrievable, + ); + GuardianSignature::new(guardian_index, r, s, v) + .verify(guardian_set_key.unwrap().read(), hash, i, last_index); + last_index = guardian_index.as_u64(); + i += 1; + } + /* + We're using a fixed point number transformation with 1 decimal to deal with rounding. + This quorum check is critical to assessing whether we have enough Guardian signatures to validate a VM. + If guardian set key length is 0 and signatures length is 0, this could compromise the integrity of both VM and signature verification. + */ + require( + ((((guardian_set + .keys + .len() * 10) / 3) * 2) / 10 + 1) <= signers_length, + WormholeError::NoQuorum, + ); + //ignore VM.signatures + let (_, slice) = encoded_vm.split_at(index); + let (slice, _) = slice.split_at(4); + let _timestamp = u32::from_be_bytes([ + slice.get(0).unwrap(), + slice.get(1).unwrap(), + slice.get(2).unwrap(), + slice.get(3).unwrap(), + ]); + index += 4; + let (_, slice) = encoded_vm.split_at(index); + let (slice, _) = slice.split_at(4); + let nonce = u32::from_be_bytes([ + slice.get(0).unwrap(), + slice.get(1).unwrap(), + slice.get(2).unwrap(), + slice.get(3).unwrap(), + ]); + index += 4; + let (_, slice) = encoded_vm.split_at(index); + let (slice, _) = slice.split_at(2); + let emitter_chain_id = u16::from_be_bytes([slice.get(0).unwrap(), slice.get(1).unwrap()]); + index += 2; + let (_, slice) = encoded_vm.split_at(index); + let (slice, _) = slice.split_at(32); + let emitter_address: b256 = slice.into(); + index += 32; + let (_, slice) = encoded_vm.split_at(index); + let (slice, _) = slice.split_at(8); + let sequence = u64::from_be_bytes([ + slice.get(0).unwrap(), + slice.get(1).unwrap(), + slice.get(2).unwrap(), + slice.get(3).unwrap(), + slice.get(4).unwrap(), + slice.get(5).unwrap(), + slice.get(6).unwrap(), + slice.get(7).unwrap(), + ]); + index += 8; + let consistency_level = encoded_vm.get(index); + require( + consistency_level + .is_some(), + WormholeError::ConsistencyLevelIrretrievable, + ); + index += 1; + require( + index <= encoded_vm + .len(), + WormholeError::InvalidPayloadLength, + ); + let (_, payload) = encoded_vm.split_at(index); + WormholeVM::new( + version + .unwrap(), + guardian_set_index, + hash, + _timestamp, + nonce, + emitter_chain_id, + emitter_address, + sequence, + consistency_level + .unwrap(), + payload, + ) + } + pub fn parse_initial_wormhole_vm(encoded_vm: Bytes) -> Self { + let mut index = 0; + let version = encoded_vm.get(index); + require( + version + .is_some() && version + .unwrap() == 1, + WormholeError::VMVersionIncompatible, + ); + index += 1; + let (_, slice) = encoded_vm.split_at(index); + let (slice, _) = slice.split_at(4); //replace with slice() + let guardian_set_index = u32::from_be_bytes([ + //replace with func + slice.get(0).unwrap(), + slice.get(1).unwrap(), + slice.get(2).unwrap(), + slice.get(3).unwrap(), + ]); + index += 4; + let signers_length = encoded_vm.get(index); + require( + signers_length + .is_some(), + WormholeError::SignersLengthIrretrievable, + ); + let signers_length = signers_length.unwrap().as_u64(); + index += 1; + // 66 is the length of each guardian signature + // 1 (guardianIndex) + 32 (r) + 32 (s) + 1 (v) + let hash_index = index + (signers_length * 66); + require( + hash_index < encoded_vm + .len(), + WormholeError::InvalidSignatureLength, + ); + let (_, slice) = encoded_vm.split_at(hash_index); + let hash = keccak256(keccak256(slice)); + // account for signatures + index += 66 * signers_length; + let (_, slice) = encoded_vm.split_at(index); + let (slice, _) = slice.split_at(4); + let timestamp_ = u32::from_be_bytes([ + slice.get(0).unwrap(), + slice.get(1).unwrap(), + slice.get(2).unwrap(), + slice.get(3).unwrap(), + ]); + index += 4; + let (_, slice) = encoded_vm.split_at(index); + let (slice, _) = slice.split_at(4); + let nonce = u32::from_be_bytes([ + slice.get(0).unwrap(), + slice.get(1).unwrap(), + slice.get(2).unwrap(), + slice.get(3).unwrap(), + ]); + index += 4; + let (_, slice) = encoded_vm.split_at(index); + let (slice, _) = slice.split_at(2); + let emitter_chain_id = u16::from_be_bytes([slice.get(0).unwrap(), slice.get(1).unwrap()]); + index += 2; + let (_, slice) = encoded_vm.split_at(index); + let (slice, _) = slice.split_at(32); + let emitter_address: b256 = slice.into(); + index += 32; + let (_, slice) = encoded_vm.split_at(index); + let (slice, _) = slice.split_at(8); + let sequence = u64::from_be_bytes([ + slice.get(0).unwrap(), + slice.get(1).unwrap(), + slice.get(2).unwrap(), + slice.get(3).unwrap(), + slice.get(4).unwrap(), + slice.get(5).unwrap(), + slice.get(6).unwrap(), + slice.get(7).unwrap(), + ]); + index += 8; + let consistency_level = encoded_vm.get(index); + require( + consistency_level + .is_some(), + WormholeError::ConsistencyLevelIrretrievable, + ); + index += 1; + require( + index <= encoded_vm + .len(), + WormholeError::InvalidPayloadLength, + ); + let (_, payload) = encoded_vm.split_at(index); + WormholeVM::new( + version + .unwrap(), + guardian_set_index, + hash, + timestamp_, + nonce, + emitter_chain_id, + emitter_address, + sequence, + consistency_level + .unwrap(), + payload, + ) + } +} + +impl WormholeVM { + #[storage(read)] + pub fn parse_and_verify_pyth_vm( + current_guardian_set_index: u32, + encoded_vm: Bytes, + wormhole_guardian_sets: StorageKey>, + is_valid_data_source: StorageKey>, + ) -> Self { + let vm = WormholeVM::parse_and_verify_wormhole_vm( + current_guardian_set_index, + encoded_vm, + wormhole_guardian_sets, + ); + require( + DataSource::new(vm.emitter_chain_id, vm.emitter_address) + .is_valid_data_source(is_valid_data_source), + WormholeError::InvalidUpdateDataSource, + ); + vm + } +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/errors.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/errors.sw new file mode 100644 index 0000000000..ab74947ae8 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/errors.sw @@ -0,0 +1,72 @@ +library; + +pub enum PythError { + FeesCanOnlyBePaidInTheBaseAsset: (), + FuturePriceNotAllowed: (), + GuardianSetNotFound: (), + IncorrectMessageType: (), + InsufficientFee: (), + InvalidArgument: (), + InvalidAttestationSize: (), + InvalidDataSourcesLength: (), + InvalidExponent: (), + InvalidGovernanceDataSource: (), + InvalidGovernanceAction: (), + InvalidGovernanceMessage: (), + InvalidGovernanceModule: (), + InvalidGovernanceTarget: (), + InvalidHeaderSize: (), + InvalidMagic: (), + InvalidMajorVersion: (), + InvalidMinorVersion: (), + InvalidPayloadId: (), + InvalidPayloadLength: (), + InvalidPriceFeedDataLength: (), + InvalidProof: (), + InvalidUpdateData: (), + InvalidUpdateDataLength: (), + InvalidUpdateDataSource: (), + InvalidUpgradeModule: (), + InvalidWormholeAddressToSet: (), + LengthOfPriceFeedIdsAndPublishTimesMustMatch: (), + NewGuardianSetIsEmpty: (), + NumberOfUpdatesIrretrievable: (), + OldGovernanceMessage: (), + /// Emitted when a Price's `publish_time` is stale. + OutdatedPrice: (), + /// Emitted when a PriceFeed could not be retrieved. + PriceFeedNotFound: (), + PriceFeedNotFoundWithinRange: (), + WormholeGovernanceActionNotFound: (), +} + +pub enum WormholeError { + ConsistencyLevelIrretrievable: (), + GovernanceActionAlreadyConsumed: (), + GuardianIndexIrretrievable: (), + GuardianSetHasExpired: (), + GuardianSetKeyIrretrievable: (), + GuardianSetKeysLengthNotEqual: (), + GuardianSetNotFound: (), + InvalidGovernanceAction: (), + InvalidGovernanceChain: (), + InvalidGovernanceContract: (), + InvalidGuardianSet: (), + InvalidGuardianSetKeysLength: (), + InvalidGuardianSetUpgrade: (), + InvalidGuardianSetUpgradeLength: (), + InvalidModule: (), + InvalidPayloadLength: (), + InvalidSignatureLength: (), + InvalidUpdateDataSource: (), + NewGuardianSetIsEmpty: (), + NewGuardianSetIndexIsInvalid: (), + NoQuorum: (), + NotSignedByCurrentGuardianSet: (), + SignatureInvalid: (), + SignatureIndicesNotAscending: (), + SignatureVIrretrievable: (), + SignersLengthIrretrievable: (), + VMSignatureInvalid: (), + VMVersionIncompatible: (), +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/events.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/events.sw new file mode 100644 index 0000000000..4ccfecf2e4 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/events.sw @@ -0,0 +1,43 @@ +library; + +use ::data_structures::{data_source::DataSource, price::PriceFeedId,}; + +pub struct ConstructedEvent { + pub guardian_set_index: u32, +} + +pub struct NewGuardianSetEvent { + pub governance_action_hash: b256, + // new_guardian_set: GuardianSet, // TODO: Uncomment when SDK supports logs with nested Vecs https://github.com/FuelLabs/fuels-rs/issues/1046 + pub new_guardian_set_index: u32, +} + +pub struct UpdatedPriceFeedsEvent { + pub updated_price_feeds: Vec, +} + +pub struct ContractUpgradedEvent { + pub old_implementation: Identity, + pub new_implementation: Identity, +} + +pub struct GovernanceDataSourceSetEvent { + pub old_data_source: DataSource, + pub new_data_source: DataSource, + pub initial_sequence: u64, +} + +pub struct DataSourcesSetEvent { + pub old_data_sources: Vec, + pub new_data_sources: Vec, +} + +pub struct FeeSetEvent { + pub old_fee: u64, + pub new_fee: u64, +} + +pub struct ValidPeriodSetEvent { + pub old_valid_period: u64, + pub new_valid_period: u64, +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/interface.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/interface.sw new file mode 100644 index 0000000000..43ac8c334f --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/interface.sw @@ -0,0 +1,323 @@ +library; + +// The order of the modules is important because of the dependencies between them. +pub mod pyth_merkle_proof; +pub mod errors; +pub mod utils; +pub mod events; +pub mod data_structures; + +use ::data_structures::{ + data_source::DataSource, + governance_payload::UpgradeContractPayload, + price::{ + Price, + PriceFeed, + PriceFeedId, + }, + wormhole_light::{ + GuardianSet, + }, +}; +use std::{bytes::Bytes, storage::storage_vec::*}; + +abi PythCore { + /// This function returns the exponentially-weighted moving average price and confidence interval. + /// + /// # Arguments + /// + /// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval. + /// + /// # Returns + /// + /// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely. + /// + /// # Reverts + /// + /// * When the EMA price is not available. + #[storage(read)] + fn ema_price(price_feed_id: PriceFeedId) -> Price; + + /// This function Returns the exponentially-weighted moving average price that is no older than `time` seconds + /// from the current time. + /// + /// # Additional Information + /// + /// This function is a sanity-checked version of `ema_price_unsafe` which is useful in + /// applications that require a sufficiently-recent price. + /// + /// # Arguments + /// + /// * `time_period`: [u64] - The period (in seconds) that a price feed is considered valid since its publish time. + /// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval. + /// + /// # Returns + /// + /// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely. + /// + /// # Reverts + /// + /// * When the EMA price is not available. + /// * When the EMA price wasn't updated recently enough. + #[storage(read)] + fn ema_price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price; + + /// This function returns the exponentially-weighted moving average price of a price feed without any sanity checks. + /// + /// # Additional Information + /// + /// This function returns the same price as `ema_price` in the case where the price is available. + /// However, if the price is not recent this function returns the latest available price. + /// + /// The returned price can be from arbitrarily far in the past; this function makes no guarantees that + /// the returned price is recent or useful for any particular application. + /// + /// Users of this function should check the `publish_time` in the `Price` to ensure that the returned price is + /// sufficiently recent for their application. If you are considering using this function, it may be + /// safer / easier to use either `ema_price` or `ema_price_no_older_than`. + /// + /// # Arguments + /// + /// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval. + /// + /// # Returns + /// + /// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely. + #[storage(read)] + fn ema_price_unsafe(price_feed_id: PriceFeedId) -> Price; + + /// This function parses `update_data` and returns price feeds of the given `price_feed_ids` if they are all published + /// within `min_publish_time` and `max_publish_time`. + /// + /// # Additional Information + /// + /// You can use this method if you want to use a Pyth price at a fixed time and not the most recent price; + /// otherwise, please consider using `update_price_feeds`. This method does not store the price updates on-chain. + /// + /// This method requires the caller to pay a fee in wei; the required fee can be computed by calling + /// `update_fee`. + /// + /// # Arguments + /// + /// * `max_publish_time`: [u64] - The maximum acceptable `publish_time` for the given `price_feed_ids`. + /// * `min_publish_time`: [u64] - The minimum acceptable `publish_time` for the given `price_feed_ids`. + /// * `price_feed_ids`: [Vec] - The ids of the price feeds to return PriceFeed data for. + /// * `update_data`: [Bytes] - The price update data. + /// + /// # Returns + /// + /// * [u64] - The number of hashes performed. + /// + /// # Reverts + /// + /// * When the transferred fee is not sufficient + /// * When the update_data is invalid + /// * When there is no update for any of the given `priceIds` within the given time range. + #[storage(read), payable] + fn parse_price_feed_updates( + max_publish_time: u64, + min_publish_time: u64, + price_feed_ids: Vec, + update_data: Vec, + ) -> Vec; + + /// This function returns the price and confidence interval. + /// + /// # Additional Information + /// + /// This function also has some complex behaviours. + /// + /// # Arguments + /// + /// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval. + /// + /// # Returns + /// + /// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely. + /// + /// # Reverts + /// + /// * When the price has not been updated within the last valid time period. + #[storage(read)] + fn price(price_feed_id: PriceFeedId) -> Price; + + /// This function returns the price that is no older than `time` seconds of the current time. + /// + /// # Additional Information + /// + /// This function is a sanity-checked version of `price_unsafe` which is useful in applications that require a + /// sufficiently-recent price. Reverts if the price wasn't updated sufficiently recently. + /// + /// # Arguments + /// + /// * `time_period`: [u64] - The period (in seconds) that a price feed is considered valid since its publish time. + /// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval. + /// + /// # Returns + /// + /// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely. + /// + /// # Reverts + /// + /// * When the price is not available. + /// * When the price wasn't updated recently enough. + #[storage(read)] + fn price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price; + + /// This function returns the price of a price feed without any sanity checks. + /// + /// # Additional Information + /// + /// This function returns the most recent price update in this contract without any recency checks. + /// This function is unsafe as the returned price update may be arbitrarily far in the past. + /// + /// Users of this function should check the `publish_time` in the price to ensure that the returned price is + /// sufficiently recent for their application. If you are considering using this function, it may be + /// safer / easier to use either `getPrice` or `price_no_older_than`. + /// + /// # Arguments + /// + /// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval. + /// + /// # Returns + /// + /// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely. + #[storage(read)] + fn price_unsafe(price_feed_id: PriceFeedId) -> Price; + + /// This function returns the required fee in Wei to update an array of price updates. + /// + /// # Arguments + /// + /// * `update_data`: [Bytes] - The price update data. + /// + /// # Returns + /// + /// * [u64] - The required fee in Wei. + #[storage(read)] + fn update_fee(update_data: Vec) -> u64; + + /// This function updates price feeds with the given update messages. + /// + /// # Additional Information + /// + /// This function requires the caller to pay a fee in wei; the required fee can be computed by calling + /// `update_fee`. + /// Prices will be updated if they are more recent than the current stored prices. + /// The call will succeed even if the update is not the most recent. + /// + /// # Arguments + /// + /// * `update_data`: [Bytes] - The price update data. + /// + /// # Reverts + /// + /// * When the transferred fee is not sufficient. + /// * When the `update_data` is invalid. + #[storage(read, write), payable] + fn update_price_feeds(update_data: Vec); + + /// This function is a wrapper around `update_price_feeds` that reverts fast if a price update is not necessary. + /// + /// # Additional Information + /// + /// A price update is necessary if the current on-chain `publish_time` is older than the given `publish_time`. It relies solely on the + /// given `publish_time` for the price feeds and does not read the actual price update publish time within `update_data`. + /// + /// This method requires the caller to pay a fee in wei; the required fee can be computed by calling + /// `update_fee`. + /// + /// `price_feed_ids` and `publish_times` are two arrays with the same size that correspond to senders known `publish_time` + /// of each PriceFeedId when calling this method. If all of price feeds within `price_feed_ids` have updated and have + /// a newer or equal publish time than the given publish time, it will reject the transaction to save gas. + /// Otherwise, it calls `update_price_feeds` to update the prices. + /// + /// # Arguments + /// + /// * `price_feed_ids`: [Vec] - Vector of price feed ids; `price_feed_ids[i]` corresponds to known price feed id of `publish_times[i]`. + /// * `publish_times`: [Vec] - Vector of publish times; `publish_times[i]` corresponds to known publish time of `price_feed_ids[i]`. + /// * `update_data`: [Bytes] - The price update data. + /// + /// + /// # Reverts + /// + /// * When update is not necessary. + /// * When the transferred fee is not sufficient. + /// * When the `update_data` is invalid. + #[storage(read, write), payable] + fn update_price_feeds_if_necessary( + price_feed_ids: Vec, + publish_times: Vec, + update_data: Vec, + ); + + /// This function returns the period (in seconds) that a price feed is considered valid since its publish time. + /// + /// # Returns + /// + /// * [u64] - The period (in seconds) that a price feed is considered valid since its publish time. + #[storage(read)] + fn valid_time_period() -> u64; +} + +abi PythInit { + #[storage(read, write)] + fn constructor( + data_sources: Vec, + governance_data_source: DataSource, + wormhole_governance_data_source: DataSource, + single_update_fee: u64, + valid_time_period_seconds: u64, + wormhole_guardian_set_addresses: Vec, + wormhole_guardian_set_index: u32, + chain_id: u16, + ); +} + +abi PythInfo { + #[storage(read)] + fn latest_publish_time(price_feed_id: PriceFeedId) -> u64; + + /// @notice Returns true if a price feed with the given id exists. + /// @param price_feed_id The Pyth Price Feed ID of which to check its existence. + #[storage(read)] + fn price_feed_exists(price_feed_id: PriceFeedId) -> bool; + + /// @notice Returns the price feed with given id. + /// @dev Reverts if the price does not exist. + /// @param price_feed_id The Pyth Price Feed ID of which to fetch the PriceFeed. + #[storage(read)] + fn price_feed_unsafe(price_feed_id: PriceFeedId) -> PriceFeed; + + #[storage(read)] + fn single_update_fee() -> u64; + + #[storage(read)] + fn is_valid_data_source(data_source: DataSource) -> bool; + + #[storage(read)] + fn valid_data_sources() -> Vec; + + #[storage(read)] + fn last_executed_governance_sequence() -> u64; + + #[storage(read)] + fn chain_id() -> u16; +} + +abi WormholeGuardians { + #[storage(read)] + fn current_guardian_set_index() -> u32; + + #[storage(read)] + fn current_wormhole_provider() -> DataSource; + + #[storage(read)] + fn governance_action_is_consumed(hash: b256) -> bool; + + #[storage(read)] + fn guardian_set(index: u32) -> GuardianSet; + + #[storage(read, write)] + fn submit_new_guardian_set(vm: Bytes); +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/pyth_merkle_proof.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/pyth_merkle_proof.sw new file mode 100644 index 0000000000..a26f91fcb0 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/pyth_merkle_proof.sw @@ -0,0 +1,63 @@ +library; + +use std::{bytes::Bytes, hash::{Hash, keccak256}}; +use ::errors::PythError; + +pub const MERKLE_LEAF_PREFIX = 0u8; +pub const MERKLE_NODE_PREFIX = 1u8; + +fn leaf_hash(data: Bytes) -> Bytes { + let mut bytes = Bytes::new(); + bytes.push(MERKLE_LEAF_PREFIX); + bytes.append(data); + + let (slice, _) = Bytes::from(keccak256(bytes)).split_at(20); + + slice +} + +fn node_hash(child_a: Bytes, child_b: Bytes) -> Bytes { + let mut bytes = Bytes::with_capacity(41); + bytes.push(MERKLE_NODE_PREFIX); + + let a: b256 = child_a.into(); + let b: b256 = child_b.into(); + if a > b { + bytes.append(child_b); + bytes.append(child_a); + } else { + bytes.append(child_a); + bytes.append(child_b); + } + + let (slice, _) = Bytes::from(keccak256(bytes)).split_at(20); + + slice +} + +pub fn validate_proof( + encoded_proof: Bytes, + ref mut proof_offset: u64, + root: Bytes, + leaf_data: Bytes, +) -> u64 { + let mut current_digest = leaf_hash(leaf_data); + let proof_size = encoded_proof.get(proof_offset).unwrap().as_u64(); + proof_offset += 1; + + let mut i = 0; + while i < proof_size { + let (_, slice) = encoded_proof.split_at(proof_offset); + let (sibling_digest, _) = slice.split_at(20); + proof_offset += 20; + current_digest = node_hash(current_digest, sibling_digest); + i += 1; + } + + let current_digest_b256: b256 = current_digest.into(); + let root_b256: b256 = root.into(); + + require(current_digest_b256 == root_b256, PythError::InvalidProof); + + proof_offset +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/utils.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/utils.sw new file mode 100644 index 0000000000..28ef0b430b --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/utils.sw @@ -0,0 +1,17 @@ +library; + +pub fn absolute_of_exponent(exponent: u32) -> u32 { + if exponent == 0u32 { + exponent + } else { + u32::max() - exponent + 1 + } +} + +#[storage(read)] +pub fn total_fee( + total_number_of_updates: u64, + single_update_fee: StorageKey, +) -> u64 { + total_number_of_updates * single_update_fee.read() +} From 5bd8e740da864f5891e28dfe373178928a702951 Mon Sep 17 00:00:00 2001 From: chad Date: Sun, 15 Sep 2024 21:04:03 -0500 Subject: [PATCH 05/21] test: testing with 10 iterations per bench --- .github/workflows/bench.yml | 47 ++++++++++++++++++------------------- 1 file changed, 23 insertions(+), 24 deletions(-) diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 075d7789e5..96f6a93255 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -1,28 +1,27 @@ -# Uncomment this when we want to run benchmarks on PRs -# name: Benchmarks -# on: -# pull_request: -# branches: -# - master -# push: -# branches-ignore: -# - master +name: Benchmarks +on: + pull_request: + branches: + - master + push: + branches-ignore: + - master -# jobs: -# benchmarks: -# runs-on: ubuntu-latest -# steps: -# - name: Checkout -# uses: actions/checkout@v4 +jobs: + benchmarks: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 -# - name: CI Setup -# uses: ./.github/actions/test-setup + - name: CI Setup + uses: ./.github/actions/test-setup -# - name: Pretest -# run: pnpm pretest + - name: Pretest + run: pnpm pretest -# - name: Run Node benchmarks -# uses: CodSpeedHQ/action@v3 -# with: -# run: pnpm bench:node -# token: ${{ secrets.CODSPEED_TOKEN }} + - name: Run Node benchmarks + uses: CodSpeedHQ/action@v3 + with: + run: pnpm bench:node + token: ${{ secrets.CODSPEED_TOKEN }} From 6a5b867312a492cff775ed4ec84a4ab93951079e Mon Sep 17 00:00:00 2001 From: chad Date: Mon, 16 Sep 2024 16:31:17 -0500 Subject: [PATCH 06/21] test: update contract interaction test --- .../benchmarks/src/contract-interaction.bench.ts | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/internal/benchmarks/src/contract-interaction.bench.ts b/internal/benchmarks/src/contract-interaction.bench.ts index e0c00f6801..cd06d05415 100644 --- a/internal/benchmarks/src/contract-interaction.bench.ts +++ b/internal/benchmarks/src/contract-interaction.bench.ts @@ -27,6 +27,18 @@ describe('Contract Interaction Benchmarks', () => { const { networkUrl } = DEVNET_CONFIG; const provider = await Provider.create(networkUrl); wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); + + const { waitForResult } = await new CounterContractFactory(wallet).deploy(); + const { contract: contractDeployed } = await waitForResult(); + + contract = contractDeployed; + + const { waitForResult: waitForResultCallTestContract } = await new CallTestContractFactory( + wallet + ).deploy(); + const { contract: callTestContractDeployed } = await waitForResultCallTestContract(); + + callTestContract = callTestContractDeployed; }); } else { beforeEach(async () => { From 89a2f486735b67f355bac6952b2b27ffd50df847 Mon Sep 17 00:00:00 2001 From: chad Date: Mon, 16 Sep 2024 19:53:08 -0500 Subject: [PATCH 07/21] chore: wallet method updates --- .../benchmarks/src/cost-estimation.bench.ts | 132 ++++++++++-------- .../src/transaction-results.bench.ts | 5 +- 2 files changed, 75 insertions(+), 62 deletions(-) diff --git a/internal/benchmarks/src/cost-estimation.bench.ts b/internal/benchmarks/src/cost-estimation.bench.ts index 0796b7249d..70eb0fb2fd 100644 --- a/internal/benchmarks/src/cost-estimation.bench.ts +++ b/internal/benchmarks/src/cost-estimation.bench.ts @@ -1,7 +1,7 @@ /* eslint-disable import/no-extraneous-dependencies */ -import type { TransferParams, WalletUnlocked, BytesLike } from 'fuels'; -import { Wallet, Provider, ScriptTransactionRequest } from 'fuels'; +import type { TransferParams } from 'fuels'; +import { Wallet, Provider, ScriptTransactionRequest, WalletUnlocked } from 'fuels'; import { launchTestNode, TestAssetId } from 'fuels/test-utils'; import { bench } from 'vitest'; @@ -51,10 +51,7 @@ describe('Cost Estimation Benchmarks', () => { beforeAll(async () => { const { networkUrl } = DEVNET_CONFIG; provider = await Provider.create(networkUrl); - const wallet = Wallet.fromPrivateKey( - process.env.DEVNET_WALLET_PVT_KEY as BytesLike, - provider - ); + const wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); setup(provider); @@ -101,69 +98,86 @@ describe('Cost Estimation Benchmarks', () => { } ); - bench('should successfully get transaction cost estimate for multi contract calls', async () => { - for (let i = 0; i < 10; i++) { - const invocationScope = contract.multiCall([ - contract.functions.return_context_amount().callParams({ - forward: [100, contract.provider.getBaseAssetId()], - }), - contract.functions.return_context_amount().callParams({ - forward: [200, TestAssetId.A.value], - }), - ]); + bench( + 'should successfully get transaction cost estimate for multi contract calls 10 times', + async () => { + for (let i = 0; i < 10; i++) { + const invocationScope = contract.multiCall([ + contract.functions.return_context_amount().callParams({ + forward: [100, contract.provider.getBaseAssetId()], + }), + contract.functions.return_context_amount().callParams({ + forward: [200, TestAssetId.A.value], + }), + ]); - const cost = await invocationScope.getTransactionCost(); + const cost = await invocationScope.getTransactionCost(); - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } } - }); + ); - bench('should successfully get transaction cost estimate for a single transfer', async () => { - for (let i = 0; i < 10; i++) { - request.addCoinOutput(recipient.address, 10, provider.getBaseAssetId()); + bench( + 'should successfully get transaction cost estimate for a single transfer 10 times', + async () => { + for (let i = 0; i < 10; i++) { + request.addCoinOutput(recipient.address, 10, provider.getBaseAssetId()); - const cost = await sender.getTransactionCost(request); + const cost = await sender.getTransactionCost(request); - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } } - }); + ); - bench('should successfully get transaction cost estimate for a batch transfer', async () => { - for (let i = 0; i < 10; i++) { - const amountToTransfer1 = 989; - const amountToTransfer2 = 699; - const amountToTransfer3 = 122; - - const transferParams: TransferParams[] = [ - { - destination: receiver1.address, - amount: amountToTransfer1, - assetId: provider.getBaseAssetId(), - }, - { destination: receiver2.address, amount: amountToTransfer2, assetId: TestAssetId.A.value }, - { destination: receiver3.address, amount: amountToTransfer3, assetId: TestAssetId.B.value }, - ]; - - const cost = await contract.functions - .sum(40, 50) - .addBatchTransfer(transferParams) - .getTransactionCost(); + bench( + 'should successfully get transaction cost estimate for a batch transfer 10 times', + async () => { + for (let i = 0; i < 10; i++) { + const amountToTransfer1 = 989; + const amountToTransfer2 = 699; + const amountToTransfer3 = 122; + + const transferParams: TransferParams[] = [ + { + destination: receiver1.address, + amount: amountToTransfer1, + assetId: provider.getBaseAssetId(), + }, + { + destination: receiver2.address, + amount: amountToTransfer2, + assetId: TestAssetId.A.value, + }, + { + destination: receiver3.address, + amount: amountToTransfer3, + assetId: TestAssetId.B.value, + }, + ]; - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); + const cost = await contract.functions + .sum(40, 50) + .addBatchTransfer(transferParams) + .getTransactionCost(); + + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } } - }); + ); it('should successfully get transaction cost estimate for a mint 10 times', async () => { for (let i = 0; i < 10; i++) { diff --git a/internal/benchmarks/src/transaction-results.bench.ts b/internal/benchmarks/src/transaction-results.bench.ts index 477e25436b..79cda0fd67 100644 --- a/internal/benchmarks/src/transaction-results.bench.ts +++ b/internal/benchmarks/src/transaction-results.bench.ts @@ -1,7 +1,6 @@ /* eslint-disable import/no-extraneous-dependencies */ -import { Wallet, Provider } from 'fuels'; -import type { WalletUnlocked, TransferParams } from 'fuels'; +import { Wallet, Provider, WalletUnlocked, TransferParams } from 'fuels'; import { launchTestNode, TestAssetId } from 'fuels/test-utils'; import { bench } from 'vitest'; @@ -23,7 +22,7 @@ describe('Transaction Submission Benchmarks', () => { beforeAll(async () => { const { networkUrl } = DEVNET_CONFIG; provider = await Provider.create(networkUrl); - wallet = Wallet.fromPrivateKey(process.env.DEVNET_WALLET_PVT_KEY as string, provider); + wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); receiver1 = Wallet.generate({ provider }); receiver2 = Wallet.generate({ provider }); receiver3 = Wallet.generate({ provider }); From 7d2389cbb65860212a8ea9028f251e9d5e25a637 Mon Sep 17 00:00:00 2001 From: chad Date: Tue, 17 Sep 2024 12:28:41 -0500 Subject: [PATCH 08/21] test: add console logs for wallet addresses in CI --- internal/benchmarks/src/transaction-results.bench.ts | 7 ++++++- internal/benchmarks/src/wallet.bench.ts | 4 ++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/internal/benchmarks/src/transaction-results.bench.ts b/internal/benchmarks/src/transaction-results.bench.ts index 79cda0fd67..663b15a5f1 100644 --- a/internal/benchmarks/src/transaction-results.bench.ts +++ b/internal/benchmarks/src/transaction-results.bench.ts @@ -1,6 +1,7 @@ /* eslint-disable import/no-extraneous-dependencies */ -import { Wallet, Provider, WalletUnlocked, TransferParams } from 'fuels'; +import type { TransferParams } from 'fuels'; +import { Wallet, Provider, WalletUnlocked } from 'fuels'; import { launchTestNode, TestAssetId } from 'fuels/test-utils'; import { bench } from 'vitest'; @@ -23,9 +24,13 @@ describe('Transaction Submission Benchmarks', () => { const { networkUrl } = DEVNET_CONFIG; provider = await Provider.create(networkUrl); wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); + console.log('instantiated wallet', wallet.address.toString()); receiver1 = Wallet.generate({ provider }); + console.log('instantiated receiver1', receiver1.address.toString()); receiver2 = Wallet.generate({ provider }); + console.log('instantiated receiver2', receiver2.address.toString()); receiver3 = Wallet.generate({ provider }); + console.log('instantiated receiver3', receiver3.address.toString()); }); } else { beforeEach(async () => { diff --git a/internal/benchmarks/src/wallet.bench.ts b/internal/benchmarks/src/wallet.bench.ts index 553b2f60be..4b957c263e 100644 --- a/internal/benchmarks/src/wallet.bench.ts +++ b/internal/benchmarks/src/wallet.bench.ts @@ -37,7 +37,7 @@ describe('Wallet Benchmarks', () => { }); } - bench('Instantiate a new Unlocked wallet', () => { + bench('Instantiate a new Unlocked wallet 10 times', () => { for (let i = 0; i < 10; i++) { const unlockedWallet = new WalletUnlocked(expectedPrivateKey, provider); @@ -54,7 +54,7 @@ describe('Wallet Benchmarks', () => { } }); - bench('Instantiate from an address', () => { + bench('Instantiate from an address 10 times', () => { for (let i = 0; i < 10; i++) { const lockedWallet = Wallet.fromAddress(expectedAddress, provider); From aae74db04d352016c0746ab5d4015ca2f153c9f8 Mon Sep 17 00:00:00 2001 From: chad Date: Tue, 17 Sep 2024 12:42:36 -0500 Subject: [PATCH 09/21] test: add more debug logs... --- internal/benchmarks/src/contract-interaction.bench.ts | 5 ++++- internal/benchmarks/src/transaction-results.bench.ts | 2 ++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/internal/benchmarks/src/contract-interaction.bench.ts b/internal/benchmarks/src/contract-interaction.bench.ts index cd06d05415..7e2f9a0be8 100644 --- a/internal/benchmarks/src/contract-interaction.bench.ts +++ b/internal/benchmarks/src/contract-interaction.bench.ts @@ -27,9 +27,12 @@ describe('Contract Interaction Benchmarks', () => { const { networkUrl } = DEVNET_CONFIG; const provider = await Provider.create(networkUrl); wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); + console.log('instantiated provider', provider.url); + console.log('instantiated wallet', wallet.address.toString()); const { waitForResult } = await new CounterContractFactory(wallet).deploy(); const { contract: contractDeployed } = await waitForResult(); + console.log('instantiated contract', contractDeployed.id); contract = contractDeployed; @@ -37,7 +40,7 @@ describe('Contract Interaction Benchmarks', () => { wallet ).deploy(); const { contract: callTestContractDeployed } = await waitForResultCallTestContract(); - + console.log('instantiated callTestContract', callTestContractDeployed.id); callTestContract = callTestContractDeployed; }); } else { diff --git a/internal/benchmarks/src/transaction-results.bench.ts b/internal/benchmarks/src/transaction-results.bench.ts index 663b15a5f1..dffc64ebb0 100644 --- a/internal/benchmarks/src/transaction-results.bench.ts +++ b/internal/benchmarks/src/transaction-results.bench.ts @@ -24,6 +24,8 @@ describe('Transaction Submission Benchmarks', () => { const { networkUrl } = DEVNET_CONFIG; provider = await Provider.create(networkUrl); wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); + console.log('instantiated provider', provider.url); + console.log('instantiated wallet', wallet.address.toString()); receiver1 = Wallet.generate({ provider }); console.log('instantiated receiver1', receiver1.address.toString()); From e510839de27ad97de11c0b6ae12681dedb69acf1 Mon Sep 17 00:00:00 2001 From: chad Date: Tue, 17 Sep 2024 12:55:21 -0500 Subject: [PATCH 10/21] test: attempt using static deploy method --- internal/benchmarks/src/contract-interaction.bench.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/internal/benchmarks/src/contract-interaction.bench.ts b/internal/benchmarks/src/contract-interaction.bench.ts index 7e2f9a0be8..d47c1410a0 100644 --- a/internal/benchmarks/src/contract-interaction.bench.ts +++ b/internal/benchmarks/src/contract-interaction.bench.ts @@ -30,15 +30,14 @@ describe('Contract Interaction Benchmarks', () => { console.log('instantiated provider', provider.url); console.log('instantiated wallet', wallet.address.toString()); - const { waitForResult } = await new CounterContractFactory(wallet).deploy(); + const { waitForResult } = await CounterContractFactory.deploy(wallet); const { contract: contractDeployed } = await waitForResult(); console.log('instantiated contract', contractDeployed.id); contract = contractDeployed; - const { waitForResult: waitForResultCallTestContract } = await new CallTestContractFactory( - wallet - ).deploy(); + const { waitForResult: waitForResultCallTestContract } = + await CallTestContractFactory.deploy(wallet); const { contract: callTestContractDeployed } = await waitForResultCallTestContract(); console.log('instantiated callTestContract', callTestContractDeployed.id); callTestContract = callTestContractDeployed; From 3998316db319d1149d9de211c1249a5f005cff12 Mon Sep 17 00:00:00 2001 From: chad Date: Tue, 17 Sep 2024 15:14:39 -0500 Subject: [PATCH 11/21] chore: reduce contract calls for test --- .../src/contract-interaction.bench.ts | 55 ++++++---------- .../src/transaction-results.bench.ts | 64 ++++++++----------- 2 files changed, 49 insertions(+), 70 deletions(-) diff --git a/internal/benchmarks/src/contract-interaction.bench.ts b/internal/benchmarks/src/contract-interaction.bench.ts index d47c1410a0..8b53136de0 100644 --- a/internal/benchmarks/src/contract-interaction.bench.ts +++ b/internal/benchmarks/src/contract-interaction.bench.ts @@ -62,53 +62,40 @@ describe('Contract Interaction Benchmarks', () => { }); } - bench('should successfully execute a contract read function 10 times', async () => { - for (let i = 0; i < 10; i++) { - const tx = await contract.functions.get_count().call(); + bench('should successfully execute a contract read function', async () => { + const tx = await contract.functions.get_count().call(); - const { value } = await tx.waitForResult(); + const { value } = await tx.waitForResult(); - expect(JSON.stringify(value)).toEqual(JSON.stringify(bn(0))); - } + expect(JSON.stringify(value)).toEqual(JSON.stringify(bn(0))); }); - bench('should successfully execute a contract multi call 10 times', async () => { - const initialValue = 100; - for (let i = 1; i < 11; i++) { - const tx = await contract - .multiCall([contract.functions.increment_counter(100), contract.functions.get_count()]) - .call(); + bench('should successfully execute a contract multi call', async () => { + const tx = await contract + .multiCall([contract.functions.increment_counter(100), contract.functions.get_count()]) + .call(); - const { value } = await tx.waitForResult(); + const { value } = await tx.waitForResult(); - expect(JSON.stringify(value)).toEqual( - JSON.stringify([bn(initialValue * i), bn(initialValue * i)]) - ); - } + expect(JSON.stringify(value)).toEqual(JSON.stringify([bn(100), bn(100)])); }); - bench('should successfully write to a contract 10 times', async () => { - for (let i = 0; i < 10; i++) { - const tx = await contract.functions.increment_counter(100).call(); - await tx.waitForResult(); - } + bench('should successfully write to a contract', async () => { + const tx = await contract.functions.increment_counter(100).call(); + await tx.waitForResult(); }); - bench('should successfully execute a contract mint 10 times', async () => { - for (let i = 0; i < 10; i++) { - const tx = await callTestContract.functions.mint_coins(TestAssetId.A.value, bn(100)).call(); - await tx.waitForResult(); - } + bench('should successfully execute a contract mint', async () => { + const tx = await callTestContract.functions.mint_coins(TestAssetId.A.value, bn(100)).call(); + await tx.waitForResult(); }); - bench('should successfully execute a contract deploy 10 times', async () => { - for (let i = 0; i < 10; i++) { - const factory = new CounterContractFactory(wallet); - const { waitForResult } = await factory.deploy(); - const { contract: deployedContract } = await waitForResult(); + bench('should successfully execute a contract deploy', async () => { + const factory = new CounterContractFactory(wallet); + const { waitForResult } = await factory.deploy(); + const { contract: deployedContract } = await waitForResult(); - expect(deployedContract).toBeDefined(); - } + expect(deployedContract).toBeDefined(); }); bench('should successfully execute a contract deploy as blobs', async () => { diff --git a/internal/benchmarks/src/transaction-results.bench.ts b/internal/benchmarks/src/transaction-results.bench.ts index dffc64ebb0..1b48dbc8a2 100644 --- a/internal/benchmarks/src/transaction-results.bench.ts +++ b/internal/benchmarks/src/transaction-results.bench.ts @@ -78,53 +78,45 @@ describe('Transaction Submission Benchmarks', () => { expect(transaction).toBeDefined(); }; - bench('should successfully transfer a single asset between wallets 10 times', async () => { - for (let i = 0; i < 10; i++) { - await transfer(); - } + bench('should successfully transfer a single asset between wallets', async () => { + await transfer(); }); - bench('should successfully conduct a custom transfer between wallets 10 times', async () => { - for (let i = 0; i < 10; i++) { - await customTransfer(); - } + bench('should successfully conduct a custom transfer between wallets', async () => { + await customTransfer(); }); - bench('should successfully perform a batch transfer 10 times', async () => { - for (let i = 0; i < 10; i++) { - const amountToTransfer1 = 989; - const amountToTransfer2 = 699; - const amountToTransfer3 = 122; + bench('should successfully perform a batch transfer', async () => { + const amountToTransfer1 = 989; + const amountToTransfer2 = 699; + const amountToTransfer3 = 122; - const transferParams: TransferParams[] = [ - { - destination: receiver1.address, - amount: amountToTransfer1, - assetId: provider.getBaseAssetId(), - }, - { destination: receiver2.address, amount: amountToTransfer2, assetId: TestAssetId.A.value }, - { destination: receiver3.address, amount: amountToTransfer3, assetId: TestAssetId.B.value }, - ]; + const transferParams: TransferParams[] = [ + { + destination: receiver1.address, + amount: amountToTransfer1, + assetId: provider.getBaseAssetId(), + }, + { destination: receiver2.address, amount: amountToTransfer2, assetId: TestAssetId.A.value }, + { destination: receiver3.address, amount: amountToTransfer3, assetId: TestAssetId.B.value }, + ]; - const tx = await wallet.batchTransfer(transferParams); + const tx = await wallet.batchTransfer(transferParams); - const { isStatusSuccess } = await tx.waitForResult(); + const { isStatusSuccess } = await tx.waitForResult(); - expect(isStatusSuccess).toBeTruthy(); - } + expect(isStatusSuccess).toBeTruthy(); }); - bench('should successfully withdraw to the base layer 10 times', async () => { - for (let i = 0; i < 10; i++) { - const txParams = { - witnessLimit: 800, - maxFee: 100_000, - }; + bench('should successfully withdraw to the base layer', async () => { + const txParams = { + witnessLimit: 800, + maxFee: 100_000, + }; - const pendingTx = await wallet.withdrawToBaseLayer(receiver1.address, 500, txParams); - const { transaction } = await pendingTx.waitForResult(); + const pendingTx = await wallet.withdrawToBaseLayer(receiver1.address, 500, txParams); + const { transaction } = await pendingTx.waitForResult(); - expect(transaction).toBeDefined(); - } + expect(transaction).toBeDefined(); }); }); From deb5f77fc36eb083ff13fd97a29a522daad590ba Mon Sep 17 00:00:00 2001 From: chad Date: Tue, 17 Sep 2024 16:29:09 -0500 Subject: [PATCH 12/21] test: removing problematic tests --- .../src/contract-interaction.bench.ts | 14 +++---- .../src/transaction-results.bench.ts | 42 +++++++++---------- 2 files changed, 28 insertions(+), 28 deletions(-) diff --git a/internal/benchmarks/src/contract-interaction.bench.ts b/internal/benchmarks/src/contract-interaction.bench.ts index 8b53136de0..a58da0c027 100644 --- a/internal/benchmarks/src/contract-interaction.bench.ts +++ b/internal/benchmarks/src/contract-interaction.bench.ts @@ -70,15 +70,15 @@ describe('Contract Interaction Benchmarks', () => { expect(JSON.stringify(value)).toEqual(JSON.stringify(bn(0))); }); - bench('should successfully execute a contract multi call', async () => { - const tx = await contract - .multiCall([contract.functions.increment_counter(100), contract.functions.get_count()]) - .call(); + // bench('should successfully execute a contract multi call', async () => { + // const tx = await contract + // .multiCall([contract.functions.increment_counter(100), contract.functions.get_count()]) + // .call(); - const { value } = await tx.waitForResult(); + // const { value } = await tx.waitForResult(); - expect(JSON.stringify(value)).toEqual(JSON.stringify([bn(100), bn(100)])); - }); + // expect(JSON.stringify(value)).toEqual(JSON.stringify([bn(100), bn(100)])); + // }); bench('should successfully write to a contract', async () => { const tx = await contract.functions.increment_counter(100).call(); diff --git a/internal/benchmarks/src/transaction-results.bench.ts b/internal/benchmarks/src/transaction-results.bench.ts index 1b48dbc8a2..f388cba73b 100644 --- a/internal/benchmarks/src/transaction-results.bench.ts +++ b/internal/benchmarks/src/transaction-results.bench.ts @@ -86,27 +86,27 @@ describe('Transaction Submission Benchmarks', () => { await customTransfer(); }); - bench('should successfully perform a batch transfer', async () => { - const amountToTransfer1 = 989; - const amountToTransfer2 = 699; - const amountToTransfer3 = 122; - - const transferParams: TransferParams[] = [ - { - destination: receiver1.address, - amount: amountToTransfer1, - assetId: provider.getBaseAssetId(), - }, - { destination: receiver2.address, amount: amountToTransfer2, assetId: TestAssetId.A.value }, - { destination: receiver3.address, amount: amountToTransfer3, assetId: TestAssetId.B.value }, - ]; - - const tx = await wallet.batchTransfer(transferParams); - - const { isStatusSuccess } = await tx.waitForResult(); - - expect(isStatusSuccess).toBeTruthy(); - }); + // bench('should successfully perform a batch transfer', async () => { + // const amountToTransfer1 = 989; + // const amountToTransfer2 = 699; + // const amountToTransfer3 = 122; + + // const transferParams: TransferParams[] = [ + // { + // destination: receiver1.address, + // amount: amountToTransfer1, + // assetId: provider.getBaseAssetId(), + // }, + // { destination: receiver2.address, amount: amountToTransfer2, assetId: TestAssetId.A.value }, + // { destination: receiver3.address, amount: amountToTransfer3, assetId: TestAssetId.B.value }, + // ]; + + // const tx = await wallet.batchTransfer(transferParams); + + // const { isStatusSuccess } = await tx.waitForResult(); + + // expect(isStatusSuccess).toBeTruthy(); + // }); bench('should successfully withdraw to the base layer', async () => { const txParams = { From d559940862add658378aab650e8d26587606b648 Mon Sep 17 00:00:00 2001 From: chad Date: Tue, 17 Sep 2024 20:02:55 -0500 Subject: [PATCH 13/21] test: only loop local node tests --- .../src/contract-interaction.bench.ts | 157 +++++++++++++----- .../src/transaction-results.bench.ts | 143 +++++++++++----- 2 files changed, 214 insertions(+), 86 deletions(-) diff --git a/internal/benchmarks/src/contract-interaction.bench.ts b/internal/benchmarks/src/contract-interaction.bench.ts index a58da0c027..88af15eecb 100644 --- a/internal/benchmarks/src/contract-interaction.bench.ts +++ b/internal/benchmarks/src/contract-interaction.bench.ts @@ -21,25 +21,23 @@ describe('Contract Interaction Benchmarks', () => { let callTestContract: CallTestContract; let wallet: WalletUnlocked; let cleanup: () => void; + const isDevnet = process.env.DEVNET_WALLET_PVT_KEY !== undefined; - if (process.env.DEVNET_WALLET_PVT_KEY !== undefined) { + if (isDevnet) { beforeAll(async () => { const { networkUrl } = DEVNET_CONFIG; const provider = await Provider.create(networkUrl); wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); - console.log('instantiated provider', provider.url); - console.log('instantiated wallet', wallet.address.toString()); const { waitForResult } = await CounterContractFactory.deploy(wallet); const { contract: contractDeployed } = await waitForResult(); - console.log('instantiated contract', contractDeployed.id); contract = contractDeployed; const { waitForResult: waitForResultCallTestContract } = await CallTestContractFactory.deploy(wallet); const { contract: callTestContractDeployed } = await waitForResultCallTestContract(); - console.log('instantiated callTestContract', callTestContractDeployed.id); + callTestContract = callTestContractDeployed; }); } else { @@ -62,41 +60,120 @@ describe('Contract Interaction Benchmarks', () => { }); } - bench('should successfully execute a contract read function', async () => { - const tx = await contract.functions.get_count().call(); - - const { value } = await tx.waitForResult(); - - expect(JSON.stringify(value)).toEqual(JSON.stringify(bn(0))); - }); - - // bench('should successfully execute a contract multi call', async () => { - // const tx = await contract - // .multiCall([contract.functions.increment_counter(100), contract.functions.get_count()]) - // .call(); - - // const { value } = await tx.waitForResult(); - - // expect(JSON.stringify(value)).toEqual(JSON.stringify([bn(100), bn(100)])); - // }); - - bench('should successfully write to a contract', async () => { - const tx = await contract.functions.increment_counter(100).call(); - await tx.waitForResult(); - }); - - bench('should successfully execute a contract mint', async () => { - const tx = await callTestContract.functions.mint_coins(TestAssetId.A.value, bn(100)).call(); - await tx.waitForResult(); - }); - - bench('should successfully execute a contract deploy', async () => { - const factory = new CounterContractFactory(wallet); - const { waitForResult } = await factory.deploy(); - const { contract: deployedContract } = await waitForResult(); - - expect(deployedContract).toBeDefined(); - }); + bench( + isDevnet + ? 'should successfully execute a contract read function' + : 'should successfully execute a contract read function 10 times', + async () => { + if (isDevnet) { + const tx = await contract.functions.get_count().call(); + + const { value } = await tx.waitForResult(); + + expect(JSON.stringify(value)).toEqual(JSON.stringify(bn(0))); + } else { + for (let i = 0; i < 10; i++) { + const tx = await contract.functions.get_count().call(); + + const { value } = await tx.waitForResult(); + + expect(JSON.stringify(value)).toEqual(JSON.stringify(bn(0))); + } + } + } + ); + + bench( + isDevnet + ? 'should successfully execute a contract multi call' + : 'should successfully execute a contract multi call 10 times', + async () => { + const initialValue = 100; + if (isDevnet) { + const tx = await contract + .multiCall([ + contract.functions.increment_counter(initialValue), + contract.functions.get_count(), + ]) + .call(); + + const { value } = await tx.waitForResult(); + + expect(JSON.stringify(value)).toEqual(JSON.stringify([bn(initialValue), bn(initialValue)])); + } else { + for (let i = 1; i < 11; i++) { + const tx = await contract + .multiCall([contract.functions.increment_counter(100), contract.functions.get_count()]) + .call(); + + const { value } = await tx.waitForResult(); + + expect(JSON.stringify(value)).toEqual( + JSON.stringify([bn(initialValue * i), bn(initialValue * i)]) + ); + } + } + } + ); + + bench( + isDevnet + ? 'should successfully write to a contract' + : 'should successfully write to a contract 10 times', + async () => { + if (isDevnet) { + const tx = await contract.functions.increment_counter(100).call(); + await tx.waitForResult(); + } else { + for (let i = 0; i < 10; i++) { + const tx = await contract.functions.increment_counter(100).call(); + await tx.waitForResult(); + } + } + } + ); + + bench( + isDevnet + ? 'should successfully execute a contract mint' + : 'should successfully execute a contract mint 10 times', + async () => { + if (isDevnet) { + const tx = await callTestContract.functions.mint_coins(TestAssetId.A.value, bn(100)).call(); + await tx.waitForResult(); + } else { + for (let i = 0; i < 10; i++) { + const tx = await callTestContract.functions + .mint_coins(TestAssetId.A.value, bn(100)) + .call(); + await tx.waitForResult(); + } + } + } + ); + + bench( + isDevnet + ? 'should successfully execute a contract deploy' + : 'should successfully execute a contract deploy 10 times', + async () => { + if (isDevnet) { + const factory = new CounterContractFactory(wallet); + const { waitForResult } = await factory.deploy(); + const { contract: deployedContract } = await waitForResult(); + + expect(deployedContract).toBeDefined(); + } else { + for (let i = 0; i < 10; i++) { + const factory = new CounterContractFactory(wallet); + const { waitForResult } = await factory.deploy(); + const { contract: deployedContract } = await waitForResult(); + + expect(deployedContract).toBeDefined(); + } + } + } + ); bench('should successfully execute a contract deploy as blobs', async () => { const factory = new PythContractFactory(wallet); diff --git a/internal/benchmarks/src/transaction-results.bench.ts b/internal/benchmarks/src/transaction-results.bench.ts index f388cba73b..202206eecd 100644 --- a/internal/benchmarks/src/transaction-results.bench.ts +++ b/internal/benchmarks/src/transaction-results.bench.ts @@ -2,7 +2,7 @@ import type { TransferParams } from 'fuels'; import { Wallet, Provider, WalletUnlocked } from 'fuels'; -import { launchTestNode, TestAssetId } from 'fuels/test-utils'; +import { launchTestNode } from 'fuels/test-utils'; import { bench } from 'vitest'; import { DEVNET_CONFIG } from './config'; @@ -19,20 +19,17 @@ describe('Transaction Submission Benchmarks', () => { let receiver3: WalletUnlocked; let cleanup: () => void; - if (process.env.DEVNET_WALLET_PVT_KEY !== undefined) { + const isDevnet = process.env.DEVNET_WALLET_PVT_KEY !== undefined; + + if (isDevnet) { beforeAll(async () => { const { networkUrl } = DEVNET_CONFIG; provider = await Provider.create(networkUrl); wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); - console.log('instantiated provider', provider.url); - console.log('instantiated wallet', wallet.address.toString()); receiver1 = Wallet.generate({ provider }); - console.log('instantiated receiver1', receiver1.address.toString()); receiver2 = Wallet.generate({ provider }); - console.log('instantiated receiver2', receiver2.address.toString()); receiver3 = Wallet.generate({ provider }); - console.log('instantiated receiver3', receiver3.address.toString()); }); } else { beforeEach(async () => { @@ -78,45 +75,99 @@ describe('Transaction Submission Benchmarks', () => { expect(transaction).toBeDefined(); }; - bench('should successfully transfer a single asset between wallets', async () => { - await transfer(); - }); - - bench('should successfully conduct a custom transfer between wallets', async () => { - await customTransfer(); + bench( + isDevnet + ? 'should successfully transfer a single asset between wallets' + : 'should successfully transfer a single asset between wallets 10 times', + async () => { + if (isDevnet) { + await transfer(); + } else { + for (let i = 0; i < 10; i++) { + await transfer(); + } + } + } + ); + + bench( + isDevnet + ? 'should successfully conduct a custom transfer between wallets' + : 'should successfully conduct a custom transfer between wallets 10 times', + async () => { + if (isDevnet) { + await customTransfer(); + } else { + for (let i = 0; i < 10; i++) { + await customTransfer(); + } + } + } + ); + + bench('should successfully perform a batch transfer', async () => { + const amountToTransfer1 = 989; + const amountToTransfer2 = 699; + const amountToTransfer3 = 122; + + const transferParams: TransferParams[] = [ + { + destination: receiver1.address, + amount: amountToTransfer1, + assetId: provider.getBaseAssetId(), + }, + { + destination: receiver2.address, + amount: amountToTransfer2, + assetId: provider.getBaseAssetId(), + }, + { + destination: receiver3.address, + amount: amountToTransfer3, + assetId: provider.getBaseAssetId(), + }, + ]; + + if (isDevnet) { + const tx = await wallet.batchTransfer(transferParams); + + const { isStatusSuccess } = await tx.waitForResult(); + + expect(isStatusSuccess).toBeTruthy(); + } else { + for (let i = 0; i < 10; i++) { + const tx = await wallet.batchTransfer(transferParams); + + const { isStatusSuccess } = await tx.waitForResult(); + + expect(isStatusSuccess).toBeTruthy(); + } + } }); - // bench('should successfully perform a batch transfer', async () => { - // const amountToTransfer1 = 989; - // const amountToTransfer2 = 699; - // const amountToTransfer3 = 122; - - // const transferParams: TransferParams[] = [ - // { - // destination: receiver1.address, - // amount: amountToTransfer1, - // assetId: provider.getBaseAssetId(), - // }, - // { destination: receiver2.address, amount: amountToTransfer2, assetId: TestAssetId.A.value }, - // { destination: receiver3.address, amount: amountToTransfer3, assetId: TestAssetId.B.value }, - // ]; - - // const tx = await wallet.batchTransfer(transferParams); - - // const { isStatusSuccess } = await tx.waitForResult(); - - // expect(isStatusSuccess).toBeTruthy(); - // }); - - bench('should successfully withdraw to the base layer', async () => { - const txParams = { - witnessLimit: 800, - maxFee: 100_000, - }; - - const pendingTx = await wallet.withdrawToBaseLayer(receiver1.address, 500, txParams); - const { transaction } = await pendingTx.waitForResult(); - - expect(transaction).toBeDefined(); - }); + bench( + isDevnet + ? 'should successfully withdraw to the base layer' + : 'should successfully withdraw to the base layer 10 times', + async () => { + const txParams = { + witnessLimit: 800, + maxFee: 100_000, + }; + + if (isDevnet) { + const pendingTx = await wallet.withdrawToBaseLayer(receiver1.address, 500, txParams); + const { transaction } = await pendingTx.waitForResult(); + + expect(transaction).toBeDefined(); + } else { + for (let i = 0; i < 10; i++) { + const pendingTx = await wallet.withdrawToBaseLayer(receiver1.address, 500, txParams); + const { transaction } = await pendingTx.waitForResult(); + + expect(transaction).toBeDefined(); + } + } + } + ); }); From ab23f262feea070f60404f6415deb2e3e339c10e Mon Sep 17 00:00:00 2001 From: chad Date: Tue, 17 Sep 2024 20:48:36 -0500 Subject: [PATCH 14/21] test: update cost estimation loop --- .../benchmarks/src/cost-estimation.bench.ts | 169 +++++++++++++----- 1 file changed, 126 insertions(+), 43 deletions(-) diff --git a/internal/benchmarks/src/cost-estimation.bench.ts b/internal/benchmarks/src/cost-estimation.bench.ts index 70eb0fb2fd..c563034329 100644 --- a/internal/benchmarks/src/cost-estimation.bench.ts +++ b/internal/benchmarks/src/cost-estimation.bench.ts @@ -2,7 +2,7 @@ import type { TransferParams } from 'fuels'; import { Wallet, Provider, ScriptTransactionRequest, WalletUnlocked } from 'fuels'; -import { launchTestNode, TestAssetId } from 'fuels/test-utils'; +import { launchTestNode } from 'fuels/test-utils'; import { bench } from 'vitest'; import type { CallTestContract } from '../test/typegen/contracts'; @@ -26,6 +26,8 @@ describe('Cost Estimation Benchmarks', () => { let cleanup: () => void; + const isDevnet = process.env.DEVNET_WALLET_PVT_KEY !== undefined; + const setup = (testProvider: Provider) => { request = new ScriptTransactionRequest({ gasLimit: 1000000 }); @@ -47,7 +49,7 @@ describe('Cost Estimation Benchmarks', () => { ); }; - if (process.env.DEVNET_WALLET_PVT_KEY !== undefined) { + if (isDevnet) { beforeAll(async () => { const { networkUrl } = DEVNET_CONFIG; provider = await Provider.create(networkUrl); @@ -81,7 +83,7 @@ describe('Cost Estimation Benchmarks', () => { bench( 'should successfully get transaction cost estimate for a single contract call done 10 times', async () => { - for (let i = 0; i < 10; i++) { + if (isDevnet) { const cost = await contract.functions .return_context_amount() .callParams({ @@ -94,20 +96,37 @@ describe('Cost Estimation Benchmarks', () => { expect(cost.gasPrice).toBeDefined(); expect(cost.gasUsed).toBeDefined(); expect(cost.gasPrice).toBeDefined(); + } else { + for (let i = 0; i < 10; i++) { + const cost = await contract.functions + .return_context_amount() + .callParams({ + forward: [100, contract.provider.getBaseAssetId()], + }) + .getTransactionCost(); + + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } } } ); bench( - 'should successfully get transaction cost estimate for multi contract calls 10 times', + isDevnet + ? 'should successfully get transaction cost estimate for multi contract calls' + : 'should successfully get transaction cost estimate for multi contract calls 10 times', async () => { - for (let i = 0; i < 10; i++) { + if (isDevnet) { const invocationScope = contract.multiCall([ contract.functions.return_context_amount().callParams({ - forward: [100, contract.provider.getBaseAssetId()], + forward: [100, provider.getBaseAssetId()], }), contract.functions.return_context_amount().callParams({ - forward: [200, TestAssetId.A.value], + forward: [200, provider.getBaseAssetId()], }), ]); @@ -118,14 +137,32 @@ describe('Cost Estimation Benchmarks', () => { expect(cost.gasPrice).toBeDefined(); expect(cost.gasUsed).toBeDefined(); expect(cost.gasPrice).toBeDefined(); + } else { + for (let i = 0; i < 10; i++) { + const invocationScope = contract.multiCall([ + contract.functions.return_context_amount().callParams({ + forward: [100, provider.getBaseAssetId()], + }), + ]); + + const cost = await invocationScope.getTransactionCost(); + + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } } } ); bench( - 'should successfully get transaction cost estimate for a single transfer 10 times', + isDevnet + ? 'should successfully get transaction cost estimate for a single transfer' + : 'should successfully get transaction cost estimate for a single transfer 10 times', async () => { - for (let i = 0; i < 10; i++) { + if (isDevnet) { request.addCoinOutput(recipient.address, 10, provider.getBaseAssetId()); const cost = await sender.getTransactionCost(request); @@ -135,36 +172,48 @@ describe('Cost Estimation Benchmarks', () => { expect(cost.gasPrice).toBeDefined(); expect(cost.gasUsed).toBeDefined(); expect(cost.gasPrice).toBeDefined(); + } else { + for (let i = 0; i < 10; i++) { + request.addCoinOutput(recipient.address, 10, provider.getBaseAssetId()); + + const cost = await sender.getTransactionCost(request); + + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } } } ); bench( - 'should successfully get transaction cost estimate for a batch transfer 10 times', + isDevnet + ? 'should successfully get transaction cost estimate for a batch transfer' + : 'should successfully get transaction cost estimate for a batch transfer 10 times', async () => { - for (let i = 0; i < 10; i++) { - const amountToTransfer1 = 989; - const amountToTransfer2 = 699; - const amountToTransfer3 = 122; - - const transferParams: TransferParams[] = [ - { - destination: receiver1.address, - amount: amountToTransfer1, - assetId: provider.getBaseAssetId(), - }, - { - destination: receiver2.address, - amount: amountToTransfer2, - assetId: TestAssetId.A.value, - }, - { - destination: receiver3.address, - amount: amountToTransfer3, - assetId: TestAssetId.B.value, - }, - ]; - + const amountToTransfer1 = 989; + const amountToTransfer2 = 699; + const amountToTransfer3 = 122; + const transferParams: TransferParams[] = [ + { + destination: receiver1.address, + amount: amountToTransfer1, + assetId: provider.getBaseAssetId(), + }, + { + destination: receiver2.address, + amount: amountToTransfer2, + assetId: provider.getBaseAssetId(), + }, + { + destination: receiver3.address, + amount: amountToTransfer3, + assetId: provider.getBaseAssetId(), + }, + ]; + if (isDevnet) { const cost = await contract.functions .sum(40, 50) .addBatchTransfer(transferParams) @@ -175,21 +224,55 @@ describe('Cost Estimation Benchmarks', () => { expect(cost.gasPrice).toBeDefined(); expect(cost.gasUsed).toBeDefined(); expect(cost.gasPrice).toBeDefined(); + } else { + for (let i = 0; i < 10; i++) { + const cost = await contract.functions + .sum(40, 50) + .addBatchTransfer(transferParams) + .getTransactionCost(); + + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } } } ); - it('should successfully get transaction cost estimate for a mint 10 times', async () => { - for (let i = 0; i < 10; i++) { - const subId = '0x4a778acfad1abc155a009dc976d2cf0db6197d3d360194d74b1fb92b96986b00'; + bench( + isDevnet + ? 'should successfully get transaction cost estimate for a mint' + : 'should successfully get transaction cost estimate for a mint 10 times', + async () => { + if (isDevnet) { + const subId = '0x4a778acfad1abc155a009dc976d2cf0db6197d3d360194d74b1fb92b96986b00'; + const amountToMint = 1_000; + + const cost = await contract.functions.mint_coins(subId, amountToMint).getTransactionCost(); + + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } else { + for (let i = 0; i < 10; i++) { + const subId = '0x4a778acfad1abc155a009dc976d2cf0db6197d3d360194d74b1fb92b96986b00'; + const amountToMint = 1_000; - const cost = await contract.functions.mint_coins(subId, 1_000).getTransactionCost(); + const cost = await contract.functions + .mint_coins(subId, amountToMint) + .getTransactionCost(); - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + } + } } - }); + ); }); From 03bb4be33b504e240e88f4ebcb3ff450ccbbfcfb Mon Sep 17 00:00:00 2001 From: chad Date: Tue, 17 Sep 2024 21:19:57 -0500 Subject: [PATCH 15/21] test: update contract read calls --- internal/benchmarks/src/contract-interaction.bench.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/benchmarks/src/contract-interaction.bench.ts b/internal/benchmarks/src/contract-interaction.bench.ts index 88af15eecb..a6b0a9cc2b 100644 --- a/internal/benchmarks/src/contract-interaction.bench.ts +++ b/internal/benchmarks/src/contract-interaction.bench.ts @@ -70,7 +70,7 @@ describe('Contract Interaction Benchmarks', () => { const { value } = await tx.waitForResult(); - expect(JSON.stringify(value)).toEqual(JSON.stringify(bn(0))); + expect(value).toBeDefined(); } else { for (let i = 0; i < 10; i++) { const tx = await contract.functions.get_count().call(); @@ -99,7 +99,7 @@ describe('Contract Interaction Benchmarks', () => { const { value } = await tx.waitForResult(); - expect(JSON.stringify(value)).toEqual(JSON.stringify([bn(initialValue), bn(initialValue)])); + expect(value).toBeDefined(); } else { for (let i = 1; i < 11; i++) { const tx = await contract From 535145d04671fd2480c6df804bbaf114e8c2311f Mon Sep 17 00:00:00 2001 From: chad Date: Wed, 18 Sep 2024 11:18:07 -0500 Subject: [PATCH 16/21] chore: refactor benchmarking tests --- .../src/contract-interaction.bench.ts | 177 ++++-------- .../benchmarks/src/cost-estimation.bench.ts | 264 ++++++------------ .../src/transaction-results.bench.ts | 130 +++------ internal/benchmarks/src/wallet.bench.ts | 59 ++-- 4 files changed, 200 insertions(+), 430 deletions(-) diff --git a/internal/benchmarks/src/contract-interaction.bench.ts b/internal/benchmarks/src/contract-interaction.bench.ts index a6b0a9cc2b..cc456cdf27 100644 --- a/internal/benchmarks/src/contract-interaction.bench.ts +++ b/internal/benchmarks/src/contract-interaction.bench.ts @@ -12,6 +12,7 @@ import { } from '../test/typegen/contracts'; import { DEVNET_CONFIG } from './config'; + /** * @group node * @group browser @@ -22,26 +23,21 @@ describe('Contract Interaction Benchmarks', () => { let wallet: WalletUnlocked; let cleanup: () => void; const isDevnet = process.env.DEVNET_WALLET_PVT_KEY !== undefined; + const iterations = isDevnet ? 1 : 10; - if (isDevnet) { - beforeAll(async () => { + const setupTestEnvironment = async () => { + if (isDevnet) { const { networkUrl } = DEVNET_CONFIG; const provider = await Provider.create(networkUrl); wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); const { waitForResult } = await CounterContractFactory.deploy(wallet); - const { contract: contractDeployed } = await waitForResult(); - - contract = contractDeployed; + contract = (await waitForResult()).contract; const { waitForResult: waitForResultCallTestContract } = await CallTestContractFactory.deploy(wallet); - const { contract: callTestContractDeployed } = await waitForResultCallTestContract(); - - callTestContract = callTestContractDeployed; - }); - } else { - beforeEach(async () => { + callTestContract = (await waitForResultCallTestContract()).contract; + } else { const launched = await launchTestNode({ contractsConfigs: [ { factory: CounterContractFactory }, @@ -53,127 +49,59 @@ describe('Contract Interaction Benchmarks', () => { contract = launched.contracts[0]; callTestContract = launched.contracts[1]; wallet = launched.wallets[0]; - }); + } + }; - afterEach(() => { + beforeAll(setupTestEnvironment); + + afterAll(() => { + if (!isDevnet && cleanup) { cleanup(); + } + }); + + const runBenchmark = (name: string, benchmarkFn: () => Promise) => { + bench(isDevnet ? name : `${name} (x${iterations} times)`, async () => { + for (let i = 0; i < iterations; i++) { + await benchmarkFn(); + } }); - } + }; - bench( - isDevnet - ? 'should successfully execute a contract read function' - : 'should successfully execute a contract read function 10 times', - async () => { - if (isDevnet) { - const tx = await contract.functions.get_count().call(); + runBenchmark('should successfully execute a contract read function', async () => { + const tx = await contract.functions.get_count().call(); + const { value } = await tx.waitForResult(); + expect(value).toBeDefined(); + }); - const { value } = await tx.waitForResult(); + runBenchmark('should successfully execute a contract multi call', async () => { + const initialValue = 100; + const tx = await contract + .multiCall([ + contract.functions.increment_counter(initialValue), + contract.functions.get_count(), + ]) + .call(); + const { value } = await tx.waitForResult(); + expect(value).toBeDefined(); + }); - expect(value).toBeDefined(); - } else { - for (let i = 0; i < 10; i++) { - const tx = await contract.functions.get_count().call(); + runBenchmark('should successfully write to a contract', async () => { + const tx = await contract.functions.increment_counter(100).call(); + await tx.waitForResult(); + }); - const { value } = await tx.waitForResult(); + runBenchmark('should successfully execute a contract mint', async () => { + const tx = await callTestContract.functions.mint_coins(TestAssetId.A.value, bn(100)).call(); + await tx.waitForResult(); + }); - expect(JSON.stringify(value)).toEqual(JSON.stringify(bn(0))); - } - } - } - ); - - bench( - isDevnet - ? 'should successfully execute a contract multi call' - : 'should successfully execute a contract multi call 10 times', - async () => { - const initialValue = 100; - if (isDevnet) { - const tx = await contract - .multiCall([ - contract.functions.increment_counter(initialValue), - contract.functions.get_count(), - ]) - .call(); - - const { value } = await tx.waitForResult(); - - expect(value).toBeDefined(); - } else { - for (let i = 1; i < 11; i++) { - const tx = await contract - .multiCall([contract.functions.increment_counter(100), contract.functions.get_count()]) - .call(); - - const { value } = await tx.waitForResult(); - - expect(JSON.stringify(value)).toEqual( - JSON.stringify([bn(initialValue * i), bn(initialValue * i)]) - ); - } - } - } - ); - - bench( - isDevnet - ? 'should successfully write to a contract' - : 'should successfully write to a contract 10 times', - async () => { - if (isDevnet) { - const tx = await contract.functions.increment_counter(100).call(); - await tx.waitForResult(); - } else { - for (let i = 0; i < 10; i++) { - const tx = await contract.functions.increment_counter(100).call(); - await tx.waitForResult(); - } - } - } - ); - - bench( - isDevnet - ? 'should successfully execute a contract mint' - : 'should successfully execute a contract mint 10 times', - async () => { - if (isDevnet) { - const tx = await callTestContract.functions.mint_coins(TestAssetId.A.value, bn(100)).call(); - await tx.waitForResult(); - } else { - for (let i = 0; i < 10; i++) { - const tx = await callTestContract.functions - .mint_coins(TestAssetId.A.value, bn(100)) - .call(); - await tx.waitForResult(); - } - } - } - ); - - bench( - isDevnet - ? 'should successfully execute a contract deploy' - : 'should successfully execute a contract deploy 10 times', - async () => { - if (isDevnet) { - const factory = new CounterContractFactory(wallet); - const { waitForResult } = await factory.deploy(); - const { contract: deployedContract } = await waitForResult(); - - expect(deployedContract).toBeDefined(); - } else { - for (let i = 0; i < 10; i++) { - const factory = new CounterContractFactory(wallet); - const { waitForResult } = await factory.deploy(); - const { contract: deployedContract } = await waitForResult(); - - expect(deployedContract).toBeDefined(); - } - } - } - ); + runBenchmark('should successfully execute a contract deploy', async () => { + const factory = new CounterContractFactory(wallet); + const { waitForResult } = await factory.deploy(); + const { contract: deployedContract } = await waitForResult(); + expect(deployedContract).toBeDefined(); + }); bench('should successfully execute a contract deploy as blobs', async () => { const factory = new PythContractFactory(wallet); @@ -181,7 +109,6 @@ describe('Contract Interaction Benchmarks', () => { chunkSizeMultiplier: 0.9, }); const { contract: deployedContract } = await waitForResult(); - expect(deployedContract).toBeDefined(); }); }); diff --git a/internal/benchmarks/src/cost-estimation.bench.ts b/internal/benchmarks/src/cost-estimation.bench.ts index c563034329..2ab936c7e3 100644 --- a/internal/benchmarks/src/cost-estimation.bench.ts +++ b/internal/benchmarks/src/cost-estimation.bench.ts @@ -23,47 +23,22 @@ describe('Cost Estimation Benchmarks', () => { let receiver2: WalletUnlocked; let receiver3: WalletUnlocked; let sender: WalletUnlocked; - let cleanup: () => void; const isDevnet = process.env.DEVNET_WALLET_PVT_KEY !== undefined; + const iterations = isDevnet ? 1 : 10; - const setup = (testProvider: Provider) => { - request = new ScriptTransactionRequest({ gasLimit: 1000000 }); - - recipient = Wallet.generate({ - provider: testProvider, - }); - receiver1 = Wallet.generate({ - provider: testProvider, - }); - receiver2 = Wallet.generate({ - provider: testProvider, - }); - receiver3 = Wallet.generate({ - provider: testProvider, - }); - sender = Wallet.fromPrivateKey( - '0x30bb0bc68f5d2ec3b523cee5a65503031b40679d9c72280cd8088c2cfbc34e38', - testProvider - ); - }; - - if (isDevnet) { - beforeAll(async () => { + const setupTestEnvironment = async () => { + if (isDevnet) { const { networkUrl } = DEVNET_CONFIG; provider = await Provider.create(networkUrl); const wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); - setup(provider); - const contractFactory = new CallTestContractFactory(wallet); const { waitForResult } = await contractFactory.deploy(); const { contract: deployedContract } = await waitForResult(); contract = deployedContract; - }); - } else { - beforeEach(async () => { + } else { const launched = await launchTestNode({ contractsConfigs: [{ factory: CallTestContractFactory }], }); @@ -71,127 +46,88 @@ describe('Cost Estimation Benchmarks', () => { cleanup = launched.cleanup; contract = launched.contracts[0]; provider = contract.provider; + } - setup(provider); - }); + request = new ScriptTransactionRequest({ gasLimit: 1000000 }); + recipient = Wallet.generate({ provider }); + receiver1 = Wallet.generate({ provider }); + receiver2 = Wallet.generate({ provider }); + receiver3 = Wallet.generate({ provider }); + sender = Wallet.fromPrivateKey( + '0x30bb0bc68f5d2ec3b523cee5a65503031b40679d9c72280cd8088c2cfbc34e38', + provider + ); + }; - afterEach(() => { + beforeAll(setupTestEnvironment); + + afterAll(() => { + if (!isDevnet && cleanup) { cleanup(); - }); - } + } + }); - bench( - 'should successfully get transaction cost estimate for a single contract call done 10 times', - async () => { - if (isDevnet) { - const cost = await contract.functions - .return_context_amount() - .callParams({ - forward: [100, contract.provider.getBaseAssetId()], - }) - .getTransactionCost(); + const runBenchmark = (name: string, benchmarkFn: () => Promise) => { + bench(isDevnet ? name : `${name} (x${iterations} times)`, async () => { + for (let i = 0; i < iterations; i++) { + await benchmarkFn(); + } + }); + }; - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - } else { - for (let i = 0; i < 10; i++) { - const cost = await contract.functions - .return_context_amount() - .callParams({ - forward: [100, contract.provider.getBaseAssetId()], - }) - .getTransactionCost(); + const expectCostToBeDefined = (cost: any) => { + expect(cost.minFee).toBeDefined(); + expect(cost.maxFee).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + expect(cost.gasUsed).toBeDefined(); + expect(cost.gasPrice).toBeDefined(); + }; - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - } - } + runBenchmark( + 'should successfully get transaction cost estimate for a single contract call', + async () => { + const cost = await contract.functions + .return_context_amount() + .callParams({ + forward: [100, contract.provider.getBaseAssetId()], + }) + .getTransactionCost(); + + expectCostToBeDefined(cost); } ); - bench( - isDevnet - ? 'should successfully get transaction cost estimate for multi contract calls' - : 'should successfully get transaction cost estimate for multi contract calls 10 times', + runBenchmark( + 'should successfully get transaction cost estimate for multi contract calls', async () => { - if (isDevnet) { - const invocationScope = contract.multiCall([ - contract.functions.return_context_amount().callParams({ - forward: [100, provider.getBaseAssetId()], - }), - contract.functions.return_context_amount().callParams({ - forward: [200, provider.getBaseAssetId()], - }), - ]); - - const cost = await invocationScope.getTransactionCost(); - - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - } else { - for (let i = 0; i < 10; i++) { - const invocationScope = contract.multiCall([ - contract.functions.return_context_amount().callParams({ - forward: [100, provider.getBaseAssetId()], - }), - ]); - - const cost = await invocationScope.getTransactionCost(); - - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - } - } + const invocationScope = contract.multiCall([ + contract.functions.return_context_amount().callParams({ + forward: [100, provider.getBaseAssetId()], + }), + contract.functions.return_context_amount().callParams({ + forward: [200, provider.getBaseAssetId()], + }), + ]); + + const cost = await invocationScope.getTransactionCost(); + + expectCostToBeDefined(cost); } ); - bench( - isDevnet - ? 'should successfully get transaction cost estimate for a single transfer' - : 'should successfully get transaction cost estimate for a single transfer 10 times', + runBenchmark( + 'should successfully get transaction cost estimate for a single transfer', async () => { - if (isDevnet) { - request.addCoinOutput(recipient.address, 10, provider.getBaseAssetId()); - - const cost = await sender.getTransactionCost(request); - - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - } else { - for (let i = 0; i < 10; i++) { - request.addCoinOutput(recipient.address, 10, provider.getBaseAssetId()); + request.addCoinOutput(recipient.address, 10, provider.getBaseAssetId()); - const cost = await sender.getTransactionCost(request); + const cost = await sender.getTransactionCost(request); - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - } - } + expectCostToBeDefined(cost); } ); - bench( - isDevnet - ? 'should successfully get transaction cost estimate for a batch transfer' - : 'should successfully get transaction cost estimate for a batch transfer 10 times', + runBenchmark( + 'should successfully get transaction cost estimate for a batch transfer', async () => { const amountToTransfer1 = 989; const amountToTransfer2 = 699; @@ -213,66 +149,22 @@ describe('Cost Estimation Benchmarks', () => { assetId: provider.getBaseAssetId(), }, ]; - if (isDevnet) { - const cost = await contract.functions - .sum(40, 50) - .addBatchTransfer(transferParams) - .getTransactionCost(); - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - } else { - for (let i = 0; i < 10; i++) { - const cost = await contract.functions - .sum(40, 50) - .addBatchTransfer(transferParams) - .getTransactionCost(); + const cost = await contract.functions + .sum(40, 50) + .addBatchTransfer(transferParams) + .getTransactionCost(); - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - } - } + expectCostToBeDefined(cost); } ); - bench( - isDevnet - ? 'should successfully get transaction cost estimate for a mint' - : 'should successfully get transaction cost estimate for a mint 10 times', - async () => { - if (isDevnet) { - const subId = '0x4a778acfad1abc155a009dc976d2cf0db6197d3d360194d74b1fb92b96986b00'; - const amountToMint = 1_000; + runBenchmark('should successfully get transaction cost estimate for a mint', async () => { + const subId = '0x4a778acfad1abc155a009dc976d2cf0db6197d3d360194d74b1fb92b96986b00'; + const amountToMint = 1_000; - const cost = await contract.functions.mint_coins(subId, amountToMint).getTransactionCost(); + const cost = await contract.functions.mint_coins(subId, amountToMint).getTransactionCost(); - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - } else { - for (let i = 0; i < 10; i++) { - const subId = '0x4a778acfad1abc155a009dc976d2cf0db6197d3d360194d74b1fb92b96986b00'; - const amountToMint = 1_000; - - const cost = await contract.functions - .mint_coins(subId, amountToMint) - .getTransactionCost(); - - expect(cost.minFee).toBeDefined(); - expect(cost.maxFee).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - expect(cost.gasUsed).toBeDefined(); - expect(cost.gasPrice).toBeDefined(); - } - } - } - ); + expectCostToBeDefined(cost); + }); }); diff --git a/internal/benchmarks/src/transaction-results.bench.ts b/internal/benchmarks/src/transaction-results.bench.ts index 202206eecd..f17521a602 100644 --- a/internal/benchmarks/src/transaction-results.bench.ts +++ b/internal/benchmarks/src/transaction-results.bench.ts @@ -20,39 +20,44 @@ describe('Transaction Submission Benchmarks', () => { let cleanup: () => void; const isDevnet = process.env.DEVNET_WALLET_PVT_KEY !== undefined; + const iterations = isDevnet ? 1 : 10; - if (isDevnet) { - beforeAll(async () => { + const setupTestEnvironment = async () => { + if (isDevnet) { const { networkUrl } = DEVNET_CONFIG; provider = await Provider.create(networkUrl); wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); - - receiver1 = Wallet.generate({ provider }); - receiver2 = Wallet.generate({ provider }); - receiver3 = Wallet.generate({ provider }); - }); - } else { - beforeEach(async () => { + } else { const launched = await launchTestNode(); - cleanup = launched.cleanup; provider = launched.provider; wallet = launched.wallets[1]; - receiver1 = Wallet.generate({ provider }); - receiver2 = Wallet.generate({ provider }); - receiver3 = Wallet.generate({ provider }); - }); + } + + receiver1 = Wallet.generate({ provider }); + receiver2 = Wallet.generate({ provider }); + receiver3 = Wallet.generate({ provider }); + }; - afterEach(() => { + beforeAll(setupTestEnvironment); + + afterAll(() => { + if (!isDevnet && cleanup) { cleanup(); + } + }); + + const runBenchmark = (name: string, benchmarkFn: () => Promise) => { + bench(isDevnet ? name : `${name} (x${iterations} times)`, async () => { + for (let i = 0; i < iterations; i++) { + await benchmarkFn(); + } }); - } + }; const transfer = async () => { const tx = await wallet.transfer(receiver1.address, 100, provider.getBaseAssetId()); - const { isStatusSuccess } = await tx.waitForResult(); - expect(isStatusSuccess).toBeTruthy(); }; @@ -62,50 +67,21 @@ describe('Transaction Submission Benchmarks', () => { witnessLimit: 800, maxFee: 70_000, }; - const pendingTx = await wallet.transfer( receiver1.address, 500, provider.getBaseAssetId(), txParams ); - const { transaction } = await pendingTx.waitForResult(); - expect(transaction).toBeDefined(); }; - bench( - isDevnet - ? 'should successfully transfer a single asset between wallets' - : 'should successfully transfer a single asset between wallets 10 times', - async () => { - if (isDevnet) { - await transfer(); - } else { - for (let i = 0; i < 10; i++) { - await transfer(); - } - } - } - ); - - bench( - isDevnet - ? 'should successfully conduct a custom transfer between wallets' - : 'should successfully conduct a custom transfer between wallets 10 times', - async () => { - if (isDevnet) { - await customTransfer(); - } else { - for (let i = 0; i < 10; i++) { - await customTransfer(); - } - } - } - ); + runBenchmark('should successfully transfer a single asset between wallets', transfer); + + runBenchmark('should successfully conduct a custom transfer between wallets', customTransfer); - bench('should successfully perform a batch transfer', async () => { + runBenchmark('should successfully perform a batch transfer', async () => { const amountToTransfer1 = 989; const amountToTransfer2 = 699; const amountToTransfer3 = 122; @@ -128,46 +104,18 @@ describe('Transaction Submission Benchmarks', () => { }, ]; - if (isDevnet) { - const tx = await wallet.batchTransfer(transferParams); - - const { isStatusSuccess } = await tx.waitForResult(); - - expect(isStatusSuccess).toBeTruthy(); - } else { - for (let i = 0; i < 10; i++) { - const tx = await wallet.batchTransfer(transferParams); - - const { isStatusSuccess } = await tx.waitForResult(); - - expect(isStatusSuccess).toBeTruthy(); - } - } + const tx = await wallet.batchTransfer(transferParams); + const { isStatusSuccess } = await tx.waitForResult(); + expect(isStatusSuccess).toBeTruthy(); }); - bench( - isDevnet - ? 'should successfully withdraw to the base layer' - : 'should successfully withdraw to the base layer 10 times', - async () => { - const txParams = { - witnessLimit: 800, - maxFee: 100_000, - }; - - if (isDevnet) { - const pendingTx = await wallet.withdrawToBaseLayer(receiver1.address, 500, txParams); - const { transaction } = await pendingTx.waitForResult(); - - expect(transaction).toBeDefined(); - } else { - for (let i = 0; i < 10; i++) { - const pendingTx = await wallet.withdrawToBaseLayer(receiver1.address, 500, txParams); - const { transaction } = await pendingTx.waitForResult(); - - expect(transaction).toBeDefined(); - } - } - } - ); + runBenchmark('should successfully withdraw to the base layer', async () => { + const txParams = { + witnessLimit: 800, + maxFee: 100_000, + }; + const pendingTx = await wallet.withdrawToBaseLayer(receiver1.address, 500, txParams); + const { transaction } = await pendingTx.waitForResult(); + expect(transaction).toBeDefined(); + }); }); diff --git a/internal/benchmarks/src/wallet.bench.ts b/internal/benchmarks/src/wallet.bench.ts index 4b957c263e..b816be8439 100644 --- a/internal/benchmarks/src/wallet.bench.ts +++ b/internal/benchmarks/src/wallet.bench.ts @@ -20,46 +20,49 @@ describe('Wallet Benchmarks', () => { let cleanup: () => void; let provider: Provider; - if (process.env.DEVNET_WALLET_PVT_KEY !== undefined) { - beforeAll(async () => { + const isDevnet = process.env.DEVNET_WALLET_PVT_KEY !== undefined; + const iterations = isDevnet ? 1 : 10; + + const setupTestEnvironment = async () => { + if (isDevnet) { provider = await Provider.create(DEVNET_CONFIG.networkUrl); - }); - } else { - beforeEach(async () => { + } else { const launched = await launchTestNode(); - cleanup = launched.cleanup; provider = launched.provider; - }); - - afterEach(() => { - cleanup(); - }); - } + } + }; - bench('Instantiate a new Unlocked wallet 10 times', () => { - for (let i = 0; i < 10; i++) { - const unlockedWallet = new WalletUnlocked(expectedPrivateKey, provider); + beforeAll(setupTestEnvironment); - expect(unlockedWallet.publicKey).toEqual(expectedPublicKey); - expect(unlockedWallet.address.toAddress()).toEqual(expectedAddress); + afterAll(() => { + if (!isDevnet && cleanup) { + cleanup(); } }); - bench('Instantiate a new Locked wallet from a constructor 10 times', () => { - for (let i = 0; i < 10; i++) { - const lockedWallet = new WalletLocked(expectedPrivateKey, provider); + const runBenchmark = (name: string, benchmarkFn: () => void) => { + bench(isDevnet ? name : `${name} (x${iterations} times)`, () => { + for (let i = 0; i < iterations; i++) { + benchmarkFn(); + } + }); + }; - expect(lockedWallet.address.toAddress()).toEqual(expectedLockedAddress); - } + runBenchmark('Instantiate a new Unlocked wallet', () => { + const unlockedWallet = new WalletUnlocked(expectedPrivateKey, provider); + expect(unlockedWallet.publicKey).toEqual(expectedPublicKey); + expect(unlockedWallet.address.toAddress()).toEqual(expectedAddress); }); - bench('Instantiate from an address 10 times', () => { - for (let i = 0; i < 10; i++) { - const lockedWallet = Wallet.fromAddress(expectedAddress, provider); + runBenchmark('Instantiate a new Locked wallet from a constructor', () => { + const lockedWallet = new WalletLocked(expectedPrivateKey, provider); + expect(lockedWallet.address.toAddress()).toEqual(expectedLockedAddress); + }); - expect(lockedWallet.address.toAddress()).toEqual(expectedAddress); - expect(lockedWallet).toBeInstanceOf(WalletLocked); - } + runBenchmark('Instantiate from an address', () => { + const lockedWallet = Wallet.fromAddress(expectedAddress, provider); + expect(lockedWallet.address.toAddress()).toEqual(expectedAddress); + expect(lockedWallet).toBeInstanceOf(WalletLocked); }); }); From 26f53ef14ec18a5930434c6ccd25973edfd0054a Mon Sep 17 00:00:00 2001 From: chad Date: Wed, 18 Sep 2024 11:38:05 -0500 Subject: [PATCH 17/21] ci: update workflows --- .github/workflows/bench-devnet.yaml | 5 ----- .github/workflows/bench.yml | 5 ++--- internal/benchmarks/src/cost-estimation.bench.ts | 4 ++-- 3 files changed, 4 insertions(+), 10 deletions(-) diff --git a/.github/workflows/bench-devnet.yaml b/.github/workflows/bench-devnet.yaml index c035e382b4..4e2d87b7da 100644 --- a/.github/workflows/bench-devnet.yaml +++ b/.github/workflows/bench-devnet.yaml @@ -1,13 +1,8 @@ name: "Bench Devnet" on: - # Remove this before merging, we only want devnet tests on master - pull_request: - branches: - - master push: branches: - - master - release/* jobs: diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 96f6a93255..edd809059b 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -1,10 +1,9 @@ name: Benchmarks + on: pull_request: - branches: - - master push: - branches-ignore: + branches: - master jobs: diff --git a/internal/benchmarks/src/cost-estimation.bench.ts b/internal/benchmarks/src/cost-estimation.bench.ts index 2ab936c7e3..88ef70eb32 100644 --- a/internal/benchmarks/src/cost-estimation.bench.ts +++ b/internal/benchmarks/src/cost-estimation.bench.ts @@ -1,6 +1,6 @@ /* eslint-disable import/no-extraneous-dependencies */ -import type { TransferParams } from 'fuels'; +import type { TransferParams, TransactionCost } from 'fuels'; import { Wallet, Provider, ScriptTransactionRequest, WalletUnlocked } from 'fuels'; import { launchTestNode } from 'fuels/test-utils'; import { bench } from 'vitest'; @@ -75,7 +75,7 @@ describe('Cost Estimation Benchmarks', () => { }); }; - const expectCostToBeDefined = (cost: any) => { + const expectCostToBeDefined = (cost: TransactionCost) => { expect(cost.minFee).toBeDefined(); expect(cost.maxFee).toBeDefined(); expect(cost.gasPrice).toBeDefined(); From aea62fb1f569367cd39b4a6a8e1fa3434bcdf0f0 Mon Sep 17 00:00:00 2001 From: chad Date: Thu, 19 Sep 2024 09:54:15 -0500 Subject: [PATCH 18/21] chore: refactoring + cleanup --- internal/benchmarks/src/config.ts | 26 +- .../src/contract-interaction.bench.ts | 16 +- .../benchmarks/src/cost-estimation.bench.ts | 20 +- .../src/transaction-results.bench.ts | 20 +- internal/benchmarks/src/wallet.bench.ts | 22 +- .../test/fixtures/forc-projects/Forc.toml | 3 +- .../forc-projects/large-contract/Forc.toml | 7 + .../forc-projects/large-contract/src/main.sw | 14 + .../forc-projects/pyth-contract/Forc.toml | 10 - .../forc-projects/pyth-contract/src/main.sw | 934 ------------------ .../forc-projects/pyth-interface/Forc.toml | 8 - .../pyth-interface/src/data_structures.sw | 11 - .../src/data_structures/accumulator_update.sw | 136 --- .../batch_attestation_update.sw | 94 -- .../src/data_structures/data_source.sw | 39 - .../data_structures/governance_instruction.sw | 242 ----- .../src/data_structures/governance_payload.sw | 29 - .../src/data_structures/price.sw | 343 ------- .../src/data_structures/update_type.sw | 38 - .../src/data_structures/wormhole_light.sw | 589 ----------- .../pyth-interface/src/errors.sw | 72 -- .../pyth-interface/src/events.sw | 43 - .../pyth-interface/src/interface.sw | 323 ------ .../pyth-interface/src/pyth_merkle_proof.sw | 63 -- .../forc-projects/pyth-interface/src/utils.sw | 17 - 25 files changed, 53 insertions(+), 3066 deletions(-) create mode 100644 internal/benchmarks/test/fixtures/forc-projects/large-contract/Forc.toml create mode 100644 internal/benchmarks/test/fixtures/forc-projects/large-contract/src/main.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-contract/Forc.toml delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-contract/src/main.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/Forc.toml delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/accumulator_update.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/batch_attestation_update.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/data_source.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_instruction.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_payload.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/price.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/update_type.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/wormhole_light.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/errors.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/events.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/interface.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/pyth_merkle_proof.sw delete mode 100644 internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/utils.sw diff --git a/internal/benchmarks/src/config.ts b/internal/benchmarks/src/config.ts index d8c15e599f..512c66d155 100644 --- a/internal/benchmarks/src/config.ts +++ b/internal/benchmarks/src/config.ts @@ -1,11 +1,17 @@ -import { DEVNET_NETWORK_URL } from '@internal/utils'; -import { TransactionType } from 'fuels'; - -export const DEVNET_CONFIG = { - networkUrl: DEVNET_NETWORK_URL, - faucetUrl: `https://faucet-devnet.fuel.network/`, - txIds: { - [TransactionType.Upgrade]: '0xe2c03044fe708e9b112027881baf9f892e6b64a630a629998922c1cab918c094', - [TransactionType.Upload]: '0x94bc2a189b8211796c8fe5b9c6b67624fe97d2007e104bf1b30739944f43bd73', - }, +/* eslint-disable import/no-extraneous-dependencies */ + +import { bench } from 'vitest'; + +export const isDevnet = process.env.DEVNET_WALLET_PVT_KEY !== undefined; + +const iterations = isDevnet ? 1 : 10; + +export const runBenchmark = (name: string, benchmarkFn: () => Promise) => { + bench( + isDevnet ? name : `${name} (x${iterations} times)`, + async () => { + await benchmarkFn(); + }, + { iterations } + ); }; diff --git a/internal/benchmarks/src/contract-interaction.bench.ts b/internal/benchmarks/src/contract-interaction.bench.ts index cc456cdf27..27d3f796a8 100644 --- a/internal/benchmarks/src/contract-interaction.bench.ts +++ b/internal/benchmarks/src/contract-interaction.bench.ts @@ -1,5 +1,6 @@ /* eslint-disable import/no-extraneous-dependencies */ +import { DEVNET_NETWORK_URL } from '@internal/utils'; import { WalletUnlocked, bn, Provider } from 'fuels'; import { launchTestNode, TestAssetId } from 'fuels/test-utils'; import { bench } from 'vitest'; @@ -11,7 +12,7 @@ import { PythContractFactory, } from '../test/typegen/contracts'; -import { DEVNET_CONFIG } from './config'; +import { isDevnet, runBenchmark } from './config'; /** * @group node @@ -22,13 +23,10 @@ describe('Contract Interaction Benchmarks', () => { let callTestContract: CallTestContract; let wallet: WalletUnlocked; let cleanup: () => void; - const isDevnet = process.env.DEVNET_WALLET_PVT_KEY !== undefined; - const iterations = isDevnet ? 1 : 10; const setupTestEnvironment = async () => { if (isDevnet) { - const { networkUrl } = DEVNET_CONFIG; - const provider = await Provider.create(networkUrl); + const provider = await Provider.create(DEVNET_NETWORK_URL); wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); const { waitForResult } = await CounterContractFactory.deploy(wallet); @@ -60,14 +58,6 @@ describe('Contract Interaction Benchmarks', () => { } }); - const runBenchmark = (name: string, benchmarkFn: () => Promise) => { - bench(isDevnet ? name : `${name} (x${iterations} times)`, async () => { - for (let i = 0; i < iterations; i++) { - await benchmarkFn(); - } - }); - }; - runBenchmark('should successfully execute a contract read function', async () => { const tx = await contract.functions.get_count().call(); const { value } = await tx.waitForResult(); diff --git a/internal/benchmarks/src/cost-estimation.bench.ts b/internal/benchmarks/src/cost-estimation.bench.ts index 88ef70eb32..dd50683c84 100644 --- a/internal/benchmarks/src/cost-estimation.bench.ts +++ b/internal/benchmarks/src/cost-estimation.bench.ts @@ -1,14 +1,12 @@ -/* eslint-disable import/no-extraneous-dependencies */ - +import { DEVNET_NETWORK_URL } from '@internal/utils'; import type { TransferParams, TransactionCost } from 'fuels'; import { Wallet, Provider, ScriptTransactionRequest, WalletUnlocked } from 'fuels'; import { launchTestNode } from 'fuels/test-utils'; -import { bench } from 'vitest'; import type { CallTestContract } from '../test/typegen/contracts'; import { CallTestContractFactory } from '../test/typegen/contracts'; -import { DEVNET_CONFIG } from './config'; +import { isDevnet, runBenchmark } from './config'; /** * @group node @@ -25,13 +23,9 @@ describe('Cost Estimation Benchmarks', () => { let sender: WalletUnlocked; let cleanup: () => void; - const isDevnet = process.env.DEVNET_WALLET_PVT_KEY !== undefined; - const iterations = isDevnet ? 1 : 10; - const setupTestEnvironment = async () => { if (isDevnet) { - const { networkUrl } = DEVNET_CONFIG; - provider = await Provider.create(networkUrl); + provider = await Provider.create(DEVNET_NETWORK_URL); const wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); const contractFactory = new CallTestContractFactory(wallet); @@ -67,14 +61,6 @@ describe('Cost Estimation Benchmarks', () => { } }); - const runBenchmark = (name: string, benchmarkFn: () => Promise) => { - bench(isDevnet ? name : `${name} (x${iterations} times)`, async () => { - for (let i = 0; i < iterations; i++) { - await benchmarkFn(); - } - }); - }; - const expectCostToBeDefined = (cost: TransactionCost) => { expect(cost.minFee).toBeDefined(); expect(cost.maxFee).toBeDefined(); diff --git a/internal/benchmarks/src/transaction-results.bench.ts b/internal/benchmarks/src/transaction-results.bench.ts index f17521a602..c1916826a8 100644 --- a/internal/benchmarks/src/transaction-results.bench.ts +++ b/internal/benchmarks/src/transaction-results.bench.ts @@ -1,11 +1,9 @@ -/* eslint-disable import/no-extraneous-dependencies */ - +import { DEVNET_NETWORK_URL } from '@internal/utils'; import type { TransferParams } from 'fuels'; import { Wallet, Provider, WalletUnlocked } from 'fuels'; import { launchTestNode } from 'fuels/test-utils'; -import { bench } from 'vitest'; -import { DEVNET_CONFIG } from './config'; +import { isDevnet, runBenchmark } from './config'; /** * @group node @@ -19,13 +17,9 @@ describe('Transaction Submission Benchmarks', () => { let receiver3: WalletUnlocked; let cleanup: () => void; - const isDevnet = process.env.DEVNET_WALLET_PVT_KEY !== undefined; - const iterations = isDevnet ? 1 : 10; - const setupTestEnvironment = async () => { if (isDevnet) { - const { networkUrl } = DEVNET_CONFIG; - provider = await Provider.create(networkUrl); + provider = await Provider.create(DEVNET_NETWORK_URL); wallet = new WalletUnlocked(process.env.DEVNET_WALLET_PVT_KEY as string, provider); } else { const launched = await launchTestNode(); @@ -47,14 +41,6 @@ describe('Transaction Submission Benchmarks', () => { } }); - const runBenchmark = (name: string, benchmarkFn: () => Promise) => { - bench(isDevnet ? name : `${name} (x${iterations} times)`, async () => { - for (let i = 0; i < iterations; i++) { - await benchmarkFn(); - } - }); - }; - const transfer = async () => { const tx = await wallet.transfer(receiver1.address, 100, provider.getBaseAssetId()); const { isStatusSuccess } = await tx.waitForResult(); diff --git a/internal/benchmarks/src/wallet.bench.ts b/internal/benchmarks/src/wallet.bench.ts index b816be8439..123dc4d5a7 100644 --- a/internal/benchmarks/src/wallet.bench.ts +++ b/internal/benchmarks/src/wallet.bench.ts @@ -1,10 +1,11 @@ /* eslint-disable import/no-extraneous-dependencies */ +import { DEVNET_NETWORK_URL } from '@internal/utils'; import { Provider, WalletLocked, WalletUnlocked, Wallet } from 'fuels'; import { launchTestNode } from 'fuels/test-utils'; import { bench } from 'vitest'; -import { DEVNET_CONFIG } from './config'; +import { isDevnet } from './config'; const expectedPrivateKey = '0x5f70feeff1f229e4a95e1056e8b4d80d0b24b565674860cc213bdb07127ce1b1'; const expectedPublicKey = @@ -20,12 +21,9 @@ describe('Wallet Benchmarks', () => { let cleanup: () => void; let provider: Provider; - const isDevnet = process.env.DEVNET_WALLET_PVT_KEY !== undefined; - const iterations = isDevnet ? 1 : 10; - const setupTestEnvironment = async () => { if (isDevnet) { - provider = await Provider.create(DEVNET_CONFIG.networkUrl); + provider = await Provider.create(DEVNET_NETWORK_URL); } else { const launched = await launchTestNode(); cleanup = launched.cleanup; @@ -41,26 +39,18 @@ describe('Wallet Benchmarks', () => { } }); - const runBenchmark = (name: string, benchmarkFn: () => void) => { - bench(isDevnet ? name : `${name} (x${iterations} times)`, () => { - for (let i = 0; i < iterations; i++) { - benchmarkFn(); - } - }); - }; - - runBenchmark('Instantiate a new Unlocked wallet', () => { + bench('Instantiate a new Unlocked wallet', () => { const unlockedWallet = new WalletUnlocked(expectedPrivateKey, provider); expect(unlockedWallet.publicKey).toEqual(expectedPublicKey); expect(unlockedWallet.address.toAddress()).toEqual(expectedAddress); }); - runBenchmark('Instantiate a new Locked wallet from a constructor', () => { + bench('Instantiate a new Locked wallet from a constructor', () => { const lockedWallet = new WalletLocked(expectedPrivateKey, provider); expect(lockedWallet.address.toAddress()).toEqual(expectedLockedAddress); }); - runBenchmark('Instantiate from an address', () => { + bench('Instantiate from an address', () => { const lockedWallet = Wallet.fromAddress(expectedAddress, provider); expect(lockedWallet.address.toAddress()).toEqual(expectedAddress); expect(lockedWallet).toBeInstanceOf(WalletLocked); diff --git a/internal/benchmarks/test/fixtures/forc-projects/Forc.toml b/internal/benchmarks/test/fixtures/forc-projects/Forc.toml index 33f7786179..a88486f141 100644 --- a/internal/benchmarks/test/fixtures/forc-projects/Forc.toml +++ b/internal/benchmarks/test/fixtures/forc-projects/Forc.toml @@ -2,6 +2,5 @@ members = [ "call-test-contract", "counter-contract", - "pyth-contract", - "pyth-interface", + "large-contract", ] diff --git a/internal/benchmarks/test/fixtures/forc-projects/large-contract/Forc.toml b/internal/benchmarks/test/fixtures/forc-projects/large-contract/Forc.toml new file mode 100644 index 0000000000..09e85255d6 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/large-contract/Forc.toml @@ -0,0 +1,7 @@ +[project] +authors = ["Fuel Labs "] +entry = "main.sw" +license = "Apache-2.0" +name = "large-contract" + +[dependencies] diff --git a/internal/benchmarks/test/fixtures/forc-projects/large-contract/src/main.sw b/internal/benchmarks/test/fixtures/forc-projects/large-contract/src/main.sw new file mode 100644 index 0000000000..3dc4af3d05 --- /dev/null +++ b/internal/benchmarks/test/fixtures/forc-projects/large-contract/src/main.sw @@ -0,0 +1,14 @@ +contract; + +abi MyContract { + fn something() -> u64; +} + +impl MyContract for Contract { + fn something() -> u64 { + asm() { + blob i450000; + } + 1001 + } +} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-contract/Forc.toml b/internal/benchmarks/test/fixtures/forc-projects/pyth-contract/Forc.toml deleted file mode 100644 index 01cffae12f..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-contract/Forc.toml +++ /dev/null @@ -1,10 +0,0 @@ -[project] -authors = ["Fuel Labs "] -entry = "main.sw" -license = "Apache-2.0" -name = "pyth-contract" - -[dependencies] -pyth_interface = { path = "../pyth-interface" } -standards = { git = "https://github.com/FuelLabs/sway-standards", tag = "v0.4.4" } -sway_libs = { git = "https://github.com/FuelLabs/sway-libs", tag = "v0.21.0" } diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-contract/src/main.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-contract/src/main.sw deleted file mode 100644 index 8df0945616..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-contract/src/main.sw +++ /dev/null @@ -1,934 +0,0 @@ -contract; - -use std::{ - asset_id::AssetId, - block::timestamp, - bytes::Bytes, - call_frames::msg_asset_id, - constants::{ - ZERO_B256, - }, - context::msg_amount, - hash::{ - Hash, - keccak256, - sha256, - }, - revert::revert, - storage::{ - storage_map::StorageMap, - storage_vec::*, - }, -}; - -use pyth_interface::{ - data_structures::{ - batch_attestation_update::*, - data_source::*, - governance_instruction::*, - governance_payload::*, - price::*, - update_type::UpdateType, - wormhole_light::*, - }, - errors::{ - PythError, - WormholeError, - }, - events::{ - ConstructedEvent, - ContractUpgradedEvent, - DataSourcesSetEvent, - FeeSetEvent, - GovernanceDataSourceSetEvent, - NewGuardianSetEvent, - UpdatedPriceFeedsEvent, - ValidPeriodSetEvent, - }, - pyth_merkle_proof::validate_proof, - PythCore, - PythInfo, - PythInit, - utils::total_fee, - WormholeGuardians, -}; - -use sway_libs::ownership::*; -use standards::src5::{SRC5, State}; - -const GUARDIAN_SET_EXPIRATION_TIME_SECONDS: u64 = 86400; // 24 hours in seconds -configurable { - DEPLOYER: Identity = Identity::Address(Address::from(ZERO_B256)), -} - -storage { - // | | - // --+-- PYTH STATE --+-- - // | | - // (chainId, emitterAddress) => isValid; takes advantage of - // constant-time mapping lookup for VM verification - is_valid_data_source: StorageMap = StorageMap {}, - // Mapping of cached price information - // priceId => PriceInfo - latest_price_feed: StorageMap = StorageMap {}, - // Fee required for each update - single_update_fee: u64 = 0, - // For tracking all active emitter/chain ID pairs - valid_data_sources: StorageVec = StorageVec {}, - /// Maximum acceptable time period before price is considered to be stale. - /// This includes attestation delay, block time, and potential clock drift - /// between the source/target chains. - valid_time_period_seconds: u64 = 0, - /// Governance data source. VAA messages from this source can change this contract - /// state. e.g., upgrade the contract, change the valid data sources, and more. - governance_data_source: DataSource = DataSource { - chain_id: 0u16, - emitter_address: ZERO_B256, - }, - /// Index of the governance data source, increased each time the governance data source changes. - governance_data_source_index: u32 = 0, - /// Sequence number of the last executed governance message. Any governance message - /// with a lower or equal sequence number will be discarded. This prevents double-execution, - /// and also makes sure that messages are executed in the right order. - last_executed_governance_sequence: u64 = 0, - /// Chain ID of the contract - chain_id: u16 = 0, - /// | | - /// --+-- WORMHOLE STATE --+-- - /// | | - /// Mapping of consumed governance actions - wormhole_consumed_governance_actions: StorageMap = StorageMap {}, - /// Mapping of guardian_set_index => guardian set - wormhole_guardian_sets: StorageMap = StorageMap {}, - /// Current active guardian set index - wormhole_guardian_set_index: u32 = 0, - /// Using Ethereum's Wormhole governance - wormhole_governance_data_source: DataSource = DataSource { - chain_id: 0u16, - emitter_address: ZERO_B256, - }, - /// | | - /// --+-- GOVERNANCE STATE --+-- - /// | | - current_implementation: Identity = Identity::Address(Address::from(ZERO_B256)), -} - -impl SRC5 for Contract { - #[storage(read)] - fn owner() -> State { - _owner() - } -} - -impl PythCore for Contract { - #[storage(read)] - fn ema_price(price_feed_id: PriceFeedId) -> Price { - ema_price_no_older_than(valid_time_period(), price_feed_id) - } - - #[storage(read)] - fn ema_price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price { - ema_price_no_older_than(time_period, price_feed_id) - } - - #[storage(read)] - fn ema_price_unsafe(price_feed_id: PriceFeedId) -> Price { - ema_price_unsafe(price_feed_id) - } - - #[storage(read), payable] - fn parse_price_feed_updates( - max_publish_time: u64, - min_publish_time: u64, - target_price_feed_ids: Vec, - update_data: Vec, - ) -> Vec { - require( - msg_asset_id() == AssetId::base(), - PythError::FeesCanOnlyBePaidInTheBaseAsset, - ); - - let required_fee = update_fee(update_data); - require(msg_amount() >= required_fee, PythError::InsufficientFee); - - let mut output_price_feeds: Vec = Vec::with_capacity(target_price_feed_ids.len()); - let mut i = 0; - while i < update_data.len() { - let data = update_data.get(i).unwrap(); - - match UpdateType::determine_type(data) { - UpdateType::Accumulator(accumulator_update) => { - let (mut offset, digest, number_of_updates, encoded) = accumulator_update.verify_and_parse( - current_guardian_set_index(), - storage - .wormhole_guardian_sets, - storage - .is_valid_data_source, - ); - let mut i_2 = 0; - while i_2 < number_of_updates { - let (new_offset, price_feed) = PriceFeed::extract_from_merkle_proof(digest, encoded, offset); - - offset = new_offset; - - if price_feed.id.is_target(target_price_feed_ids) == false { - i_2 += 1; - continue; - } - - if price_feed.price.publish_time >= min_publish_time && price_feed.price.publish_time <= max_publish_time { - // check if output_price_feeds already contains a PriceFeed with price_feed.id, if so continue as we only want 1 - // output PriceFeed per target ID - if price_feed.id.is_contained_within(output_price_feeds) { - i_2 += 1; - continue; - } - - output_price_feeds.push(price_feed) - } - - i_2 += 1; - } - require(offset == encoded.len(), PythError::InvalidUpdateDataLength); - }, - UpdateType::BatchAttestation(batch_attestation_update) => { - let vm = WormholeVM::parse_and_verify_pyth_vm( - current_guardian_set_index(), - batch_attestation_update - .data, - storage - .wormhole_guardian_sets, - storage - .is_valid_data_source, - ); - - let (mut attestation_index, number_of_attestations, attestation_size) = parse_and_verify_batch_attestation_header(vm.payload); - let attestation_size_u16 = attestation_size.as_u64(); - - let mut i_2: u16 = 0; - while i_2 < number_of_attestations { - let (_, slice) = vm.payload.split_at(attestation_index + 32); - let (price_feed_id, _) = slice.split_at(32); - let price_feed_id: PriceFeedId = price_feed_id.into(); - - if price_feed_id.is_target(target_price_feed_ids) == false { - attestation_index += attestation_size_u16; - i_2 += 1; - continue; - } - - let price_feed = PriceFeed::parse_attestation(attestation_size, vm.payload, attestation_index); - - if price_feed.price.publish_time >= min_publish_time && price_feed.price.publish_time <= max_publish_time { - // check if output_price_feeds already contains a PriceFeed with price_feed.id, if so continue; - // as we only want 1 output PriceFeed per target ID - if price_feed.id.is_contained_within(output_price_feeds) { - attestation_index += attestation_size_u16; - i_2 += 1; - continue; - } - - output_price_feeds.push(price_feed) - } - - attestation_index += attestation_size_u16; - i_2 += 1; - } - } - } - - i += 1; - } - - require( - target_price_feed_ids - .len() == output_price_feeds - .len(), - PythError::PriceFeedNotFoundWithinRange, - ); - - output_price_feeds - } - - #[storage(read)] - fn price(price_feed_id: PriceFeedId) -> Price { - price_no_older_than(valid_time_period(), price_feed_id) - } - - #[storage(read)] - fn price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price { - price_no_older_than(time_period, price_feed_id) - } - - #[storage(read)] - fn price_unsafe(price_feed_id: PriceFeedId) -> Price { - price_unsafe(price_feed_id) - } - - #[storage(read)] - fn update_fee(update_data: Vec) -> u64 { - update_fee(update_data) - } - - #[storage(read, write), payable] - fn update_price_feeds(update_data: Vec) { - update_price_feeds(update_data) - } - - #[storage(read, write), payable] - fn update_price_feeds_if_necessary( - price_feed_ids: Vec, - publish_times: Vec, - update_data: Vec, - ) { - require( - price_feed_ids - .len() == publish_times - .len(), - PythError::LengthOfPriceFeedIdsAndPublishTimesMustMatch, - ); - - let mut i = 0; - while i < price_feed_ids.len() { - if latest_publish_time(price_feed_ids.get(i).unwrap()) < publish_times.get(i).unwrap() - { - update_price_feeds(update_data); - return; - } - - i += 1; - } - } - - #[storage(read)] - fn valid_time_period() -> u64 { - valid_time_period() - } -} - -/// PythCore Private Functions /// -#[storage(read)] -fn ema_price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price { - let price = ema_price_unsafe(price_feed_id); - let current_time = timestamp(); - require( - current_time - price.publish_time <= time_period, - PythError::OutdatedPrice, - ); - - price -} - -#[storage(read)] -fn ema_price_unsafe(price_feed_id: PriceFeedId) -> Price { - let price_feed = storage.latest_price_feed.get(price_feed_id).try_read(); - require(price_feed.is_some(), PythError::PriceFeedNotFound); - - price_feed.unwrap().ema_price -} - -#[storage(read)] -fn price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price { - let price = price_unsafe(price_feed_id); - let current_time = timestamp(); - require( - current_time - price.publish_time <= time_period, - PythError::OutdatedPrice, - ); - - price -} - -#[storage(read)] -fn price_unsafe(price_feed_id: PriceFeedId) -> Price { - let price_feed = storage.latest_price_feed.get(price_feed_id).try_read(); - require(price_feed.is_some(), PythError::PriceFeedNotFound); - - price_feed.unwrap().price -} - -#[storage(read)] -fn update_fee(update_data: Vec) -> u64 { - let mut total_number_of_updates = 0; - let mut i = 0; - while i < update_data.len() { - let data = update_data.get(i).unwrap(); - - match UpdateType::determine_type(data) { - UpdateType::Accumulator(accumulator_update) => { - let proof_size_offset = accumulator_update.verify(); - - total_number_of_updates += accumulator_update.total_updates(proof_size_offset); - }, - UpdateType::BatchAttestation => { - total_number_of_updates += 1; - }, - } - - i += 1; - } - - total_fee(total_number_of_updates, storage.single_update_fee) -} - -#[storage(read, write), payable] -fn update_price_feeds(update_data: Vec) { - require( - msg_asset_id() == AssetId::base(), - PythError::FeesCanOnlyBePaidInTheBaseAsset, - ); - - let mut total_number_of_updates = 0; - - // let mut updated_price_feeds: Vec = Vec::new(); // TODO: requires append for Vec - let mut i = 0; - while i < update_data.len() { - let data = update_data.get(i).unwrap(); - - match UpdateType::determine_type(data) { - UpdateType::Accumulator(accumulator_update) => { - let (number_of_updates, _updated_ids) = accumulator_update.update_price_feeds( - current_guardian_set_index(), - storage - .wormhole_guardian_sets, - storage - .latest_price_feed, - storage - .is_valid_data_source, - ); - // updated_price_feeds.append(updated_ids); // TODO: requires append for Vec - total_number_of_updates += number_of_updates; - }, - UpdateType::BatchAttestation(batch_attestation_update) => { - let _updated_ids = batch_attestation_update.update_price_feeds( - current_guardian_set_index(), - storage - .wormhole_guardian_sets, - storage - .latest_price_feed, - storage - .is_valid_data_source, - ); - // updated_price_feeds.append(updated_ids); // TODO: requires append for Vec - total_number_of_updates += 1; - }, - } - - i += 1; - } - - let required_fee = total_fee(total_number_of_updates, storage.single_update_fee); - require(msg_amount() >= required_fee, PythError::InsufficientFee); - - // log(UpdatedPriceFeedsEvent { // TODO: requires append for Vec - // updated_price_feeds, - // }) -} - -#[storage(read)] -fn valid_time_period() -> u64 { - storage.valid_time_period_seconds.read() -} - -#[storage(read)] -fn governance_data_source() -> DataSource { - storage.governance_data_source.read() -} - -#[storage(write)] -fn set_governance_data_source(data_source: DataSource) { - storage.governance_data_source.write(data_source); -} - -#[storage(read)] -fn governance_data_source_index() -> u32 { - storage.governance_data_source_index.read() -} - -#[storage(write)] -fn set_governance_data_source_index(index: u32) { - storage.governance_data_source_index.write(index); -} - -#[storage(read)] -fn last_executed_governance_sequence() -> u64 { - storage.last_executed_governance_sequence.read() -} - -#[storage(write)] -fn set_last_executed_governance_sequence(sequence: u64) { - storage.last_executed_governance_sequence.write(sequence); -} - -#[storage(read)] -fn chain_id() -> u16 { - storage.chain_id.read() -} - -#[storage(read)] -fn current_implementation() -> Identity { - storage.current_implementation.read() -} - -impl PythInit for Contract { - #[storage(read, write)] - fn constructor( - data_sources: Vec, - governance_data_source: DataSource, - wormhole_governance_data_source: DataSource, - single_update_fee: u64, - valid_time_period_seconds: u64, - wormhole_guardian_set_addresses: Vec, - wormhole_guardian_set_index: u32, - chain_id: u16, - ) { - // This function sets the passed identity as the initial owner. https://github.com/FuelLabs/sway-libs/blob/8045a19e3297599750abdf6300c11e9927a29d40/libs/src/ownership.sw#L127-L138 - initialize_ownership(DEPLOYER); - // This function ensures that the sender is the owner. https://github.com/FuelLabs/sway-libs/blob/8045a19e3297599750abdf6300c11e9927a29d40/libs/src/ownership.sw#L59-L65 - only_owner(); - - require(data_sources.len() > 0, PythError::InvalidDataSourcesLength); - - let mut i = 0; - while i < data_sources.len() { - let data_source = data_sources.get(i).unwrap(); - storage.is_valid_data_source.insert(data_source, true); - storage.valid_data_sources.push(data_source); - - i += 1; - } - storage - .latest_price_feed - .write(StorageMap:: {}); - - storage - .valid_time_period_seconds - .write(valid_time_period_seconds); - storage.single_update_fee.write(single_update_fee); - - let guardian_length: u8 = wormhole_guardian_set_addresses.len().try_as_u8().unwrap(); - let mut new_guardian_set = StorageGuardianSet::new( - 0, - StorageKey::>::new( - sha256(("guardian_set_keys", wormhole_guardian_set_index)), - 0, - ZERO_B256, - ), - ); - let mut i: u8 = 0; - while i < guardian_length { - let key: b256 = wormhole_guardian_set_addresses.get(i.as_u64()).unwrap(); - new_guardian_set.keys.push(key); - i += 1; - } - - storage - .wormhole_guardian_set_index - .write(wormhole_guardian_set_index); - storage - .wormhole_guardian_sets - .insert(wormhole_guardian_set_index, new_guardian_set); - - storage.governance_data_source.write(governance_data_source); - storage - .wormhole_governance_data_source - .write(wormhole_governance_data_source); - storage.governance_data_source_index.write(0); - storage - .wormhole_consumed_governance_actions - .write(StorageMap:: {}); - storage.chain_id.write(chain_id); - storage.last_executed_governance_sequence.write(0); - - storage - .current_implementation - .write(Identity::Address(Address::from(ZERO_B256))); - - // This function revokes ownership of the current owner and disallows any new owners. https://github.com/FuelLabs/sway-libs/blob/8045a19e3297599750abdf6300c11e9927a29d40/libs/src/ownership.sw#L89-L99 - renounce_ownership(); - - log(ConstructedEvent { - guardian_set_index: wormhole_guardian_set_index, - }) - } -} - -impl PythInfo for Contract { - #[storage(read)] - fn valid_data_sources() -> Vec { - storage.valid_data_sources.load_vec() - } - - #[storage(read)] - fn latest_publish_time(price_feed_id: PriceFeedId) -> u64 { - latest_publish_time(price_feed_id) - } - - #[storage(read)] - fn price_feed_exists(price_feed_id: PriceFeedId) -> bool { - match storage.latest_price_feed.get(price_feed_id).try_read() { - Some(_) => true, - None => false, - } - } - - #[storage(read)] - fn price_feed_unsafe(price_feed_id: PriceFeedId) -> PriceFeed { - let price_feed = storage.latest_price_feed.get(price_feed_id).try_read(); - require(price_feed.is_some(), PythError::PriceFeedNotFound); - price_feed.unwrap() - } - - #[storage(read)] - fn single_update_fee() -> u64 { - storage.single_update_fee.read() - } - - #[storage(read)] - fn is_valid_data_source(data_source: DataSource) -> bool { - data_source.is_valid_data_source(storage.is_valid_data_source) - } - - #[storage(read)] - fn last_executed_governance_sequence() -> u64 { - last_executed_governance_sequence() - } - - #[storage(read)] - fn chain_id() -> u16 { - chain_id() - } -} - -/// PythInfo Private Functions /// -#[storage(read)] -fn latest_publish_time(price_feed_id: PriceFeedId) -> u64 { - match storage.latest_price_feed.get(price_feed_id).try_read() { - Some(price_feed) => price_feed.price.publish_time, - None => 0, - } -} - -impl WormholeGuardians for Contract { - #[storage(read)] - fn current_guardian_set_index() -> u32 { - current_guardian_set_index() - } - - #[storage(read)] - fn current_wormhole_provider() -> DataSource { - current_wormhole_provider() - } - - #[storage(read)] - fn guardian_set(index: u32) -> GuardianSet { - let stored_guardian_set = storage.wormhole_guardian_sets.get(index).try_read(); - require( - stored_guardian_set - .is_some(), - PythError::GuardianSetNotFound, - ); - GuardianSet::from_stored(stored_guardian_set.unwrap()) - } - - #[storage(read)] - fn governance_action_is_consumed(governance_action_hash: b256) -> bool { - governance_action_is_consumed(governance_action_hash) - } - - #[storage(read, write)] - fn submit_new_guardian_set(encoded_vm: Bytes) { - submit_new_guardian_set(encoded_vm) - } -} - -/// WormholeGuardians Private Functions /// -#[storage(read)] -fn current_guardian_set_index() -> u32 { - storage.wormhole_guardian_set_index.read() -} - -#[storage(read)] -fn current_wormhole_provider() -> DataSource { - storage.wormhole_governance_data_source.read() -} - -#[storage(read)] -fn governance_action_is_consumed(governance_action_hash: b256) -> bool { - match storage.wormhole_consumed_governance_actions.get(governance_action_hash).try_read() { - Some(bool_) => bool_, - None => false, - } -} - -#[storage(read, write)] -fn submit_new_guardian_set(encoded_vm: Bytes) { - let vm: WormholeVM = WormholeVM::parse_and_verify_wormhole_vm( - current_guardian_set_index(), - encoded_vm, - storage - .wormhole_guardian_sets, - ); - require( - vm.guardian_set_index == current_guardian_set_index(), - WormholeError::NotSignedByCurrentGuardianSet, - ); - let current_wormhole_provider: DataSource = current_wormhole_provider(); - require( - vm.emitter_chain_id == current_wormhole_provider - .chain_id, - WormholeError::InvalidGovernanceChain, - ); - require( - vm.emitter_address == current_wormhole_provider - .emitter_address, - WormholeError::InvalidGovernanceContract, - ); - require( - governance_action_is_consumed(vm.governance_action_hash) == false, - WormholeError::GovernanceActionAlreadyConsumed, - ); - - let current_guardian_set_index: u32 = current_guardian_set_index(); - let upgrade: GuardianSetUpgrade = GuardianSetUpgrade::parse_encoded_upgrade(current_guardian_set_index, vm.payload); - - storage - .wormhole_consumed_governance_actions - .insert(vm.governance_action_hash, true); - - // Set expiry if current GuardianSet exists - let current_guardian_set = storage.wormhole_guardian_sets.get(current_guardian_set_index).try_read(); - if current_guardian_set.is_some() { - let mut current_guardian_set = current_guardian_set.unwrap(); - current_guardian_set.expiration_time = timestamp() + GUARDIAN_SET_EXPIRATION_TIME_SECONDS; - storage - .wormhole_guardian_sets - .insert(current_guardian_set_index, current_guardian_set); - } - - storage - .wormhole_guardian_sets - .insert(upgrade.new_guardian_set_index, upgrade.new_guardian_set); - storage - .wormhole_guardian_set_index - .write(upgrade.new_guardian_set_index); - - log(NewGuardianSetEvent { - governance_action_hash: vm.governance_action_hash, - new_guardian_set_index: upgrade.new_guardian_set_index, - }) -} - -/// Transfer the governance data source to a new value with sanity checks to ensure the new governance data source can manage the contract. -#[storage(read, write)] -fn authorize_governance_data_source_transfer( - payload: AuthorizeGovernanceDataSourceTransferPayload, -) { - let old_governance_data_source = governance_data_source(); - - // Parse and verify the VAA contained in the payload to ensure it's valid and can manage the contract - let vm: WormholeVM = WormholeVM::parse_and_verify_wormhole_vm( - current_guardian_set_index(), - payload - .claim_vaa, - storage - .wormhole_guardian_sets, - ); - - let gi = GovernanceInstruction::parse_governance_instruction(vm.payload); - require( - gi.target_chain_id == chain_id() || gi.target_chain_id == 0, - PythError::InvalidGovernanceTarget, - ); - - require( - match gi.action { - GovernanceAction::RequestGovernanceDataSourceTransfer => true, - _ => false, - }, - PythError::InvalidGovernanceMessage, - ); - - let claim_payload = GovernanceInstruction::parse_request_governance_data_source_transfer_payload(gi.payload); - - require( - governance_data_source_index() < claim_payload - .governance_data_source_index, - PythError::OldGovernanceMessage, - ); - - set_governance_data_source_index(claim_payload.governance_data_source_index); - - let new_governance_data_source = DataSource { - chain_id: vm.emitter_chain_id, - emitter_address: vm.emitter_address, - }; - - set_governance_data_source(new_governance_data_source); - - // Setting the last executed governance to the claimVaa sequence to avoid using older sequences. - set_last_executed_governance_sequence(vm.sequence); - - log(GovernanceDataSourceSetEvent { - old_data_source: old_governance_data_source, - new_data_source: new_governance_data_source, - initial_sequence: vm.sequence, - }); -} - -#[storage(read, write)] -fn set_data_sources(payload: SetDataSourcesPayload) { - let old_data_sources = storage.valid_data_sources.load_vec(); - - let mut i = 0; - while i < old_data_sources.len() { - let data_source = old_data_sources.get(i).unwrap(); - storage.is_valid_data_source.insert(data_source, false); - i += 1; - } - - // Clear the current list of valid data sources - storage.valid_data_sources.clear(); - - i = 0; - // Add new data sources from the payload and mark them as valid - while i < payload.data_sources.len() { - let data_source = payload.data_sources.get(i).unwrap(); - storage.valid_data_sources.push(data_source); - storage.is_valid_data_source.insert(data_source, true); - - i += 1; - } - - // Emit an event with the old and new data sources - log(DataSourcesSetEvent { - old_data_sources: old_data_sources, - new_data_sources: storage.valid_data_sources.load_vec(), - }); -} - -#[storage(read, write)] -fn set_fee(payload: SetFeePayload) { - let old_fee = storage.single_update_fee.read(); - storage.single_update_fee.write(payload.new_fee); - - log(FeeSetEvent { - old_fee, - new_fee: payload.new_fee, - }); -} - -#[storage(read, write)] -fn set_valid_period(payload: SetValidPeriodPayload) { - let old_valid_period = storage.valid_time_period_seconds.read(); - storage - .valid_time_period_seconds - .write(payload.new_valid_period); - - log(ValidPeriodSetEvent { - old_valid_period, - new_valid_period: payload.new_valid_period, - }); -} - -abi PythGovernance { - #[storage(read)] - fn governance_data_source() -> DataSource; - - #[storage(read, write)] - fn execute_governance_instruction(encoded_vm: Bytes); -} - -impl PythGovernance for Contract { - #[storage(read)] - fn governance_data_source() -> DataSource { - governance_data_source() - } - - #[storage(read, write)] - fn execute_governance_instruction(encoded_vm: Bytes) { - execute_governance_instruction(encoded_vm) - } -} - -#[storage(read, write)] -fn execute_governance_instruction(encoded_vm: Bytes) { - let vm = verify_governance_vm(encoded_vm); - // Log so that the WormholeVM struct will show up in the ABI and can be used in the tests - log(vm); - - let gi = GovernanceInstruction::parse_governance_instruction(vm.payload); - // Log so that the GovernanceInstruction struct will show up in the ABI and can be used in the tests - log(gi); - - require( - gi.target_chain_id == chain_id() || gi.target_chain_id == 0, - PythError::InvalidGovernanceTarget, - ); - - match gi.action { - GovernanceAction::UpgradeContract => { - require(gi.target_chain_id != 0, PythError::InvalidGovernanceTarget); - // TODO: implement upgrade_upgradeable_contract(uc) when Fuel releases the upgrade standard library; - log("Upgrade functionality not implemented"); - revert(0u64); - }, - GovernanceAction::AuthorizeGovernanceDataSourceTransfer => { - let agdst = GovernanceInstruction::parse_authorize_governance_data_source_transfer_payload(gi.payload); - log(agdst); - authorize_governance_data_source_transfer(agdst); - }, - GovernanceAction::SetDataSources => { - let sdsp = GovernanceInstruction::parse_set_data_sources_payload(gi.payload); - log(sdsp); - set_data_sources(sdsp); - }, - GovernanceAction::SetFee => { - let sf = GovernanceInstruction::parse_set_fee_payload(gi.payload); - log(sf); - set_fee(sf); - }, - GovernanceAction::SetValidPeriod => { - let svp = GovernanceInstruction::parse_set_valid_period_payload(gi.payload); - log(svp); - set_valid_period(svp); - }, - GovernanceAction::RequestGovernanceDataSourceTransfer => { - // RequestGovernanceDataSourceTransfer can be only part of AuthorizeGovernanceDataSourceTransfer message - // The `revert` function only accepts u64, so as - // a workaround we use require. - require(false, PythError::InvalidGovernanceMessage); - }, - _ => { - // The `revert` function only accepts u64, so as - // a workaround we use require. - require(false, PythError::InvalidGovernanceMessage); - } - } -} - -#[storage(read, write)] -fn verify_governance_vm(encoded_vm: Bytes) -> WormholeVM { - let vm: WormholeVM = WormholeVM::parse_and_verify_wormhole_vm( - current_guardian_set_index(), - encoded_vm, - storage - .wormhole_guardian_sets, - ); - - require( - storage - .governance_data_source - .read() - .is_valid_governance_data_source(vm.emitter_chain_id, vm.emitter_address), - PythError::InvalidGovernanceDataSource, - ); - - require( - vm.sequence > last_executed_governance_sequence(), - PythError::OldGovernanceMessage, - ); - - set_last_executed_governance_sequence(vm.sequence); - vm -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/Forc.toml b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/Forc.toml deleted file mode 100644 index 7cb44a4edc..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/Forc.toml +++ /dev/null @@ -1,8 +0,0 @@ -[project] -authors = ["Fuel Labs "] -entry = "interface.sw" -license = "Apache-2.0" -name = "pyth_interface" - -[dependencies] -standards = { git = "https://github.com/FuelLabs/sway-standards", tag = "v0.4.4" } diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures.sw deleted file mode 100644 index 4e7d6ddb91..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures.sw +++ /dev/null @@ -1,11 +0,0 @@ -library; - -// The order of the modules is important because of the dependencies between them. -pub mod data_source; -pub mod wormhole_light; -pub mod price; -pub mod accumulator_update; -pub mod batch_attestation_update; -pub mod governance_payload; -pub mod governance_instruction; -pub mod update_type; diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/accumulator_update.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/accumulator_update.sw deleted file mode 100644 index 014cee8975..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/accumulator_update.sw +++ /dev/null @@ -1,136 +0,0 @@ -library; - -use ::errors::PythError; -use ::data_structures::{data_source::*, price::*, wormhole_light::{StorageGuardianSet, WormholeVM}}; -use std::{bytes::Bytes, hash::Hash}; - -pub struct AccumulatorUpdate { - data: Bytes, -} -const MINIMUM_ALLOWED_MINOR_VERSION = 0; -const MAJOR_VERSION = 1; -impl AccumulatorUpdate { - pub fn new(data: Bytes) -> Self { - Self { data } - } - pub fn total_updates(self, ref mut offset: u64) -> u64 { - let proof_size = u16::from_be_bytes([self.data.get(offset).unwrap(), self.data.get(offset + 1).unwrap()]).as_u64(); - offset += proof_size + 2; - self.data.get(offset).unwrap().as_u64() - } - pub fn verify(self) -> u64 { - // skip magic as already checked when this is called - let major_version = self.data.get(4); - require( - major_version - .is_some() && major_version - .unwrap() == MAJOR_VERSION, - PythError::InvalidMajorVersion, - ); - let minor_version = self.data.get(5); - require( - minor_version - .is_some() && minor_version - .unwrap() >= MINIMUM_ALLOWED_MINOR_VERSION, - PythError::InvalidMinorVersion, - ); - let trailing_header_size = self.data.get(6); - require(trailing_header_size.is_some(), PythError::InvalidHeaderSize); - // skip trailing headers and update type - let offset = 8 + trailing_header_size.unwrap().as_u64(); - require( - self.data - .len() >= offset, - PythError::InvalidUpdateDataLength, - ); - offset - } -} -impl AccumulatorUpdate { - #[storage(read)] - pub fn verify_and_parse( - self, - current_guardian_set_index: u32, - wormhole_guardian_sets: StorageKey>, - is_valid_data_source: StorageKey>, -) -> (u64, Bytes, u64, Bytes) { - let encoded_offset = self.verify(); - let (_, slice) = self.data.split_at(encoded_offset); - let (encoded_slice, _) = slice.split_at(self.data.len() - encoded_offset); - let mut offset = 0; - let wormhole_proof_size = u16::from_be_bytes([encoded_slice.get(offset).unwrap(), encoded_slice.get(offset + 1).unwrap()]).as_u64(); - offset += 2; - let (_, slice) = encoded_slice.split_at(offset); - let (encoded_vm, _) = slice.split_at(wormhole_proof_size); - let vm = WormholeVM::parse_and_verify_pyth_vm( - current_guardian_set_index, - encoded_vm, - wormhole_guardian_sets, - is_valid_data_source, - ); - offset += wormhole_proof_size; - let encoded_payload = vm.payload; - /* - Payload offset: - skip magic (4 bytes) as already checked when this is called - skip update_type as (1 byte) it can only be WormholeMerkle - skip slot (8 bytes) as unused - skip ring_size (4 bytes) as unused - */ - let mut payload_offset = 17; - let (_, slice) = encoded_payload.split_at(payload_offset); - let (digest, _) = slice.split_at(20); - payload_offset += 20; - require( - payload_offset <= encoded_payload - .len(), - PythError::InvalidPayloadLength, - ); - let number_of_updates = encoded_slice.get(offset); - require( - number_of_updates - .is_some(), - PythError::NumberOfUpdatesIrretrievable, - ); - offset += 1; - (offset, digest, number_of_updates.unwrap().as_u64(), encoded_slice) - } -} -impl AccumulatorUpdate { - #[storage(read, write)] - pub fn update_price_feeds( - self, - current_guardian_set_index: u32, - wormhole_guardian_sets: StorageKey>, - latest_price_feed: StorageKey>, - is_valid_data_source: StorageKey>, -) -> (u64, Vec) { - let (mut offset, digest, number_of_updates, encoded_data) = self.verify_and_parse( - current_guardian_set_index, - wormhole_guardian_sets, - is_valid_data_source, - ); - - let mut updated_ids = Vec::new(); - let mut i = 0; - while i < number_of_updates { - let (new_offset, price_feed) = PriceFeed::extract_from_merkle_proof(digest, encoded_data, offset); - offset = new_offset; - let latest_publish_time = match latest_price_feed.get(price_feed.id).try_read() { - Some(price_feed) => price_feed.price.publish_time, - None => 0, - }; - if price_feed.price.publish_time > latest_publish_time { - latest_price_feed.insert(price_feed.id, price_feed); - updated_ids.push(price_feed.id); - } - i += 1; - } - require( - offset == encoded_data - .len(), - PythError::InvalidUpdateDataLength, - ); - (number_of_updates, updated_ids) - } -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/batch_attestation_update.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/batch_attestation_update.sw deleted file mode 100644 index f1ef0a21b8..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/batch_attestation_update.sw +++ /dev/null @@ -1,94 +0,0 @@ -library; - -use ::errors::PythError; -use ::data_structures::{data_source::*, price::*, wormhole_light::{StorageGuardianSet, WormholeVM}}; -use std::{bytes::Bytes, hash::Hash}; - -const BATCH_MAGIC: u32 = 0x50325748; - -pub struct BatchAttestationUpdate { - pub data: Bytes, -} -impl BatchAttestationUpdate { - pub fn new(data: Bytes) -> Self { - Self { data } - } - #[storage(read, write)] - pub fn update_price_feeds( - self, - current_guardian_set_index: u32, - wormhole_guardian_sets: StorageKey>, - latest_price_feed: StorageKey>, - is_valid_data_source: StorageKey>, -) -> Vec { - let vm = WormholeVM::parse_and_verify_pyth_vm( - current_guardian_set_index, - self.data, - wormhole_guardian_sets, - is_valid_data_source, - ); - let (mut attestation_index, number_of_attestations, attestation_size) = parse_and_verify_batch_attestation_header(vm.payload); - let mut updated_ids = Vec::new(); - let mut i: u16 = 0; - while i < number_of_attestations { - let price_feed = PriceFeed::parse_attestation(attestation_size, vm.payload, attestation_index); - // Respect specified attestation size for forward-compatibility - attestation_index += attestation_size.as_u64(); - let latest_publish_time = match latest_price_feed.get(price_feed.id).try_read() { - Some(price_feed) => price_feed.price.publish_time, - None => 0, - }; - if price_feed.price.publish_time > latest_publish_time { - latest_price_feed.insert(price_feed.id, price_feed); - updated_ids.push(price_feed.id); - } - i += 1; - } - updated_ids - } -} -pub fn parse_and_verify_batch_attestation_header(encoded_payload: Bytes) -> (u64, u16, u16) { - let mut index = 0; - //Check header - let magic = u32::from_be_bytes([ - encoded_payload.get(index).unwrap(), - encoded_payload.get(index + 1).unwrap(), - encoded_payload.get(index + 2).unwrap(), - encoded_payload.get(index + 3).unwrap(), - ]); - require(magic == BATCH_MAGIC, PythError::InvalidMagic); - index += 4; - let major_version = u16::from_be_bytes([encoded_payload.get(index).unwrap(), encoded_payload.get(index + 1).unwrap()]); - require(major_version == 3, PythError::InvalidMajorVersion); - // addtionally skip minor_version(2 bytes) as unused - index += 4; - let header_size = u16::from_be_bytes([encoded_payload.get(index).unwrap(), encoded_payload.get(index + 1).unwrap()]); - index += 2; - // From solidity impl: - // NOTE(2022-04-19): Currently, only payloadId comes after - // hdrSize. Future extra header fields must be read using a - // separate offset to respect hdrSize, i.e.: - // uint hdrIndex = 0; - // bpa.header.payloadId = UnsafeBytesLib.toUint8(encoded, index + hdrIndex); - // hdrIndex += 1; - // bpa.header.someNewField = UnsafeBytesLib.toUint32(encoded, index + hdrIndex); - // hdrIndex += 4; - // Skip remaining unknown header bytes - // index += bpa.header.hdrSize; - let payload_id = encoded_payload.get(index).unwrap(); - // Payload ID of 2 required for batch header - require(payload_id == 2, PythError::InvalidPayloadId); - // Skip remaining unknown header bytes - index += header_size.as_u64(); - let number_of_attestations = u16::from_be_bytes([encoded_payload.get(index).unwrap(), encoded_payload.get(index + 1).unwrap()]); - index += 2; - let attestation_size = u16::from_be_bytes([encoded_payload.get(index).unwrap(), encoded_payload.get(index + 1).unwrap()]); - index += 2; - require( - encoded_payload - .len() == index + (attestation_size * number_of_attestations) - .as_u64(), - PythError::InvalidPayloadLength, - ); - return (index, number_of_attestations, attestation_size); -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/data_source.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/data_source.sw deleted file mode 100644 index 2cb65cd0b1..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/data_source.sw +++ /dev/null @@ -1,39 +0,0 @@ -library; - -use std::hash::{Hash, Hasher}; - -pub struct DataSource { - pub chain_id: u16, - pub emitter_address: b256, -} - -impl Hash for DataSource { - fn hash(self, ref mut state: Hasher) { - self.chain_id.hash(state); - self.emitter_address.hash(state); - } -} - -impl DataSource { - pub fn new(chain_id: u16, emitter_address: b256) -> Self { - Self { - chain_id, - emitter_address, - } - } - - #[storage(read)] - pub fn is_valid_data_source( - self, - is_valid_data_source: StorageKey>, -) -> bool { - match is_valid_data_source.get(self).try_read() { - Some(bool) => bool, - None => false, - } - } - - pub fn is_valid_governance_data_source(self, chain_id: u16, emitter_address: b256) -> bool { - self.chain_id == chain_id && self.emitter_address == emitter_address - } -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_instruction.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_instruction.sw deleted file mode 100644 index 645b440bed..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_instruction.sw +++ /dev/null @@ -1,242 +0,0 @@ -library; - -use ::errors::PythError; -use ::data_structures::{ - data_source::*, - governance_payload::*, - price::*, - wormhole_light::{ - StorageGuardianSet, - WormholeVM, - }, -}; -use std::{bytes::Bytes, hash::Hash}; -use std::math::*; -use std::primitive_conversions::{u32::*, u64::*}; - -pub const MAGIC: u32 = 0x5054474d; - -pub struct GovernanceInstruction { - pub magic: u32, - pub module: GovernanceModule, - pub action: GovernanceAction, - pub target_chain_id: u16, - pub payload: Bytes, -} - -pub enum GovernanceModule { - Executor: (), // 0 - Target: (), // 1 - EvmExecutor: (), // 2 - StacksTarget: (), // 3 - Invalid: (), -} - -pub enum GovernanceAction { - UpgradeContract: (), // 0 - AuthorizeGovernanceDataSourceTransfer: (), // 1 - SetDataSources: (), // 2 - SetFee: (), // 3 - SetValidPeriod: (), // 4 - RequestGovernanceDataSourceTransfer: (), // 5 - Invalid: (), -} - -impl GovernanceInstruction { - pub fn new( - magic: u32, - module: GovernanceModule, - action: GovernanceAction, - target_chain_id: u16, - payload: Bytes, - ) -> Self { - Self { - magic, - module, - action, - target_chain_id, - payload, - } - } - - pub fn parse_governance_instruction(encoded_instruction: Bytes) -> Self { - let mut index = 0; - let magic = u32::from_be_bytes([ - encoded_instruction.get(index).unwrap(), - encoded_instruction.get(index + 1).unwrap(), - encoded_instruction.get(index + 2).unwrap(), - encoded_instruction.get(index + 3).unwrap(), - ]); - require(magic == MAGIC, PythError::InvalidMagic); - index += 4; - - let mod_number = encoded_instruction.get(index).unwrap(); - let module = match mod_number { - 0 => GovernanceModule::Executor, - 1 => GovernanceModule::Target, - 2 => GovernanceModule::EvmExecutor, - 3 => GovernanceModule::StacksTarget, - _ => GovernanceModule::Invalid, - }; - require( - match module { - GovernanceModule::Target => true, - _ => false, - }, - PythError::InvalidGovernanceTarget, - ); - index += 1; - - let action_number = encoded_instruction.get(index).unwrap(); - let governance_action = match action_number { - 0 => GovernanceAction::UpgradeContract, // Not implemented - 1 => GovernanceAction::AuthorizeGovernanceDataSourceTransfer, - 2 => GovernanceAction::SetDataSources, - 3 => GovernanceAction::SetFee, - 4 => GovernanceAction::SetValidPeriod, - 5 => GovernanceAction::RequestGovernanceDataSourceTransfer, - _ => GovernanceAction::Invalid, - }; - require( - match governance_action { - GovernanceAction::Invalid => false, - _ => true, - }, - PythError::InvalidGovernanceAction, - ); - index += 1; - - let target_chain_id = u16::from_be_bytes([ - encoded_instruction.get(index).unwrap(), - encoded_instruction.get(index + 1).unwrap(), - ]); - index += 2; - - let (_, payload) = encoded_instruction.split_at(index); - - GovernanceInstruction::new(magic, module, governance_action, target_chain_id, payload) - } - - /// Parse an AuthorizeGovernanceDataSourceTransferPayload (action 2) with minimal validation - pub fn parse_authorize_governance_data_source_transfer_payload( - encoded_payload: Bytes, - ) -> AuthorizeGovernanceDataSourceTransferPayload { - AuthorizeGovernanceDataSourceTransferPayload { - claim_vaa: encoded_payload, - } - } - - pub fn parse_request_governance_data_source_transfer_payload( - encoded_payload: Bytes, - ) -> RequestGovernanceDataSourceTransferPayload { - let mut index = 0; - let governance_data_source_index = u32::from_be_bytes([ - encoded_payload.get(index).unwrap(), - encoded_payload.get(index + 1).unwrap(), - encoded_payload.get(index + 2).unwrap(), - encoded_payload.get(index + 3).unwrap(), - ]); - index += 4; - require( - index == encoded_payload - .len(), - PythError::InvalidGovernanceMessage, - ); - let rdgst = RequestGovernanceDataSourceTransferPayload { - governance_data_source_index, - }; - rdgst - } - - pub fn parse_set_data_sources_payload(encoded_payload: Bytes) -> SetDataSourcesPayload { - let mut index = 0; - let data_sources_length = encoded_payload.get(index).unwrap().as_u64(); - index += 1; - let mut data_sources = Vec::with_capacity(data_sources_length); - - let mut i = 0; - while i < data_sources_length { - let (_, slice) = encoded_payload.split_at(index); - let (slice, _) = slice.split_at(2); - let chain_id = u16::from_be_bytes([slice.get(0).unwrap(), slice.get(1).unwrap()]); - index += 2; - let (_, slice) = encoded_payload.split_at(index); - let (slice, _) = slice.split_at(32); - let emitter_address: b256 = slice.into(); - index += 32; - - data_sources.push(DataSource { - chain_id, - emitter_address, - }); - i += 1 - } - - require( - index == encoded_payload - .len(), - PythError::InvalidGovernanceMessage, - ); - let sds = SetDataSourcesPayload { data_sources }; - sds - } - - pub fn parse_set_fee_payload(encoded_payload: Bytes) -> SetFeePayload { - let mut index = 0; - let val = u64::from_be_bytes([ - encoded_payload.get(index).unwrap(), - encoded_payload.get(index + 1).unwrap(), - encoded_payload.get(index + 2).unwrap(), - encoded_payload.get(index + 3).unwrap(), - encoded_payload.get(index + 4).unwrap(), - encoded_payload.get(index + 5).unwrap(), - encoded_payload.get(index + 6).unwrap(), - encoded_payload.get(index + 7).unwrap(), - ]); - index += 8; - let expo = u64::from_be_bytes([ - encoded_payload.get(index).unwrap(), - encoded_payload.get(index + 1).unwrap(), - encoded_payload.get(index + 2).unwrap(), - encoded_payload.get(index + 3).unwrap(), - encoded_payload.get(index + 4).unwrap(), - encoded_payload.get(index + 5).unwrap(), - encoded_payload.get(index + 6).unwrap(), - encoded_payload.get(index + 7).unwrap(), - ]); - index += 8; - require( - encoded_payload - .len() == index, - PythError::InvalidGovernanceMessage, - ); - let sf = SetFeePayload { - new_fee: val * 10u64.pow(expo.try_as_u32().unwrap()), - }; - sf - } - - pub fn parse_set_valid_period_payload(encoded_payload: Bytes) -> SetValidPeriodPayload { - let mut index = 0; - let valid_time_period_seconds = u64::from_be_bytes([ - encoded_payload.get(index).unwrap(), - encoded_payload.get(index + 1).unwrap(), - encoded_payload.get(index + 2).unwrap(), - encoded_payload.get(index + 3).unwrap(), - encoded_payload.get(index + 4).unwrap(), - encoded_payload.get(index + 5).unwrap(), - encoded_payload.get(index + 6).unwrap(), - encoded_payload.get(index + 7).unwrap(), - ]); - index += 8; - require( - index == encoded_payload - .len(), - PythError::InvalidGovernanceMessage, - ); - let svp = SetValidPeriodPayload { - new_valid_period: valid_time_period_seconds, - }; - svp - } -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_payload.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_payload.sw deleted file mode 100644 index c00838a915..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/governance_payload.sw +++ /dev/null @@ -1,29 +0,0 @@ -library; - -use std::bytes::Bytes; - -use ::data_structures::data_source::DataSource; - -pub struct UpgradeContractPayload { - pub new_implementation: Identity, -} - -pub struct AuthorizeGovernanceDataSourceTransferPayload { - pub claim_vaa: Bytes, -} - -pub struct RequestGovernanceDataSourceTransferPayload { - pub governance_data_source_index: u32, -} - -pub struct SetDataSourcesPayload { - pub data_sources: Vec, -} - -pub struct SetFeePayload { - pub new_fee: u64, -} - -pub struct SetValidPeriodPayload { - pub new_valid_period: u64, -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/price.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/price.sw deleted file mode 100644 index d3d619d751..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/price.sw +++ /dev/null @@ -1,343 +0,0 @@ -library; - -use std::{block::timestamp, bytes::Bytes}; - -use ::errors::PythError; -use ::utils::absolute_of_exponent; -use ::data_structures::wormhole_light::WormholeVM; -use ::pyth_merkle_proof::validate_proof; -const TAI64_DIFFERENCE = 4611686018427387904; -// A price with a degree of uncertainty, represented as a price +- a confidence interval. -// -// The confidence interval roughly corresponds to the standard error of a normal distribution. -// Both the price and confidence are stored in a fixed-point numeric representation, -// `x * (10^expo)`, where `expo` is the exponent. -// -// Please refer to the documentation at https://docs.pyth.network/documentation/pythnet-price-feeds/best-practices for how -// to how this price safely. -pub struct Price { - // Confidence interval around the price - pub confidence: u64, - // Price exponent - // This value represents the absolute value of an i32 in the range -255 to 0. Values other than 0, should be considered negative: - // exponent of 5 means the Pyth Price exponent was -5 - pub exponent: u32, - // Price - pub price: u64, - // The TAI64 timestamp describing when the price was published - pub publish_time: u64, -} -impl Price { - pub fn new( - confidence: u64, - exponent: u32, - price: u64, - publish_time: u64, - ) -> Self { - Self { - confidence, - exponent, - price, - publish_time, - } - } -} -// The `PriceFeedId` type is an alias for `b256` that represents the id for a specific Pyth price feed. -pub type PriceFeedId = b256; -// PriceFeed represents a current aggregate price from Pyth publisher feeds. -pub struct PriceFeed { - // Latest available exponentially-weighted moving average price - pub ema_price: Price, - // The price ID. - pub id: PriceFeedId, - // Latest available price - pub price: Price, -} -impl PriceFeedId { - pub fn is_target(self, target_price_feed_ids: Vec) -> bool { - let mut i = 0; - while i < target_price_feed_ids.len() { - if target_price_feed_ids.get(i).unwrap() == self { - return true; - } - i += 1; - } - false - } - pub fn is_contained_within(self, output_price_feeds: Vec) -> bool { - let mut i = 0; - while i < output_price_feeds.len() { - if output_price_feeds.get(i).unwrap().id == self { - return true; - } - i += 1; - } - false - } -} -impl PriceFeed { - pub fn new(ema_price: Price, id: PriceFeedId, price: Price) -> Self { - Self { - ema_price, - id, - price, - } - } -} -impl PriceFeed { - pub fn parse_message(encoded_price_feed: Bytes) -> Self { - let mut offset = 1u64; - let (_, slice) = encoded_price_feed.split_at(offset); - let (price_feed_id, _) = slice.split_at(32); - let price_feed_id: PriceFeedId = price_feed_id.into(); - offset += 32; - let price = u64::from_be_bytes([ - encoded_price_feed.get(offset).unwrap(), - encoded_price_feed.get(offset + 1).unwrap(), - encoded_price_feed.get(offset + 2).unwrap(), - encoded_price_feed.get(offset + 3).unwrap(), - encoded_price_feed.get(offset + 4).unwrap(), - encoded_price_feed.get(offset + 5).unwrap(), - encoded_price_feed.get(offset + 6).unwrap(), - encoded_price_feed.get(offset + 7).unwrap(), - ]); - offset += 8; - let confidence = u64::from_be_bytes([ - encoded_price_feed.get(offset).unwrap(), - encoded_price_feed.get(offset + 1).unwrap(), - encoded_price_feed.get(offset + 2).unwrap(), - encoded_price_feed.get(offset + 3).unwrap(), - encoded_price_feed.get(offset + 4).unwrap(), - encoded_price_feed.get(offset + 5).unwrap(), - encoded_price_feed.get(offset + 6).unwrap(), - encoded_price_feed.get(offset + 7).unwrap(), - ]); - offset += 8; - // exponent is an i32, expected to be in the range -255 to 0 - let exponent = u32::from_be_bytes([ - encoded_price_feed.get(offset).unwrap(), - encoded_price_feed.get(offset + 1).unwrap(), - encoded_price_feed.get(offset + 2).unwrap(), - encoded_price_feed.get(offset + 3).unwrap(), - ]); - let exponent = absolute_of_exponent(exponent); - require(exponent < 256u32, PythError::InvalidExponent); - offset += 4; - let mut publish_time = u64::from_be_bytes([ - encoded_price_feed.get(offset).unwrap(), - encoded_price_feed.get(offset + 1).unwrap(), - encoded_price_feed.get(offset + 2).unwrap(), - encoded_price_feed.get(offset + 3).unwrap(), - encoded_price_feed.get(offset + 4).unwrap(), - encoded_price_feed.get(offset + 5).unwrap(), - encoded_price_feed.get(offset + 6).unwrap(), - encoded_price_feed.get(offset + 7).unwrap(), - ]); - // skip unused previous_publish_times (8 bytes) - offset += 16; - let ema_price = u64::from_be_bytes([ - encoded_price_feed.get(offset).unwrap(), - encoded_price_feed.get(offset + 1).unwrap(), - encoded_price_feed.get(offset + 2).unwrap(), - encoded_price_feed.get(offset + 3).unwrap(), - encoded_price_feed.get(offset + 4).unwrap(), - encoded_price_feed.get(offset + 5).unwrap(), - encoded_price_feed.get(offset + 6).unwrap(), - encoded_price_feed.get(offset + 7).unwrap(), - ]); - offset += 8; - let ema_confidence = u64::from_be_bytes([ - encoded_price_feed.get(offset).unwrap(), - encoded_price_feed.get(offset + 1).unwrap(), - encoded_price_feed.get(offset + 2).unwrap(), - encoded_price_feed.get(offset + 3).unwrap(), - encoded_price_feed.get(offset + 4).unwrap(), - encoded_price_feed.get(offset + 5).unwrap(), - encoded_price_feed.get(offset + 6).unwrap(), - encoded_price_feed.get(offset + 7).unwrap(), - ]); - offset += 8; - require( - offset <= encoded_price_feed - .len(), - PythError::InvalidPriceFeedDataLength, - ); - //convert publish_time from UNIX to TAI64 - publish_time += TAI64_DIFFERENCE; - require( - publish_time <= timestamp(), - PythError::FuturePriceNotAllowed, - ); - PriceFeed::new( - Price::new(ema_confidence, exponent, ema_price, publish_time), - price_feed_id, - Price::new(confidence, exponent, price, publish_time), - ) - } - pub fn parse_attestation(attestation_size: u16, encoded_payload: Bytes, index: u64) -> Self { - // Skip product id (32 bytes) as unused - let mut attestation_index = index + 32; - let (_, slice) = encoded_payload.split_at(attestation_index); - let (price_feed_id, _) = slice.split_at(32); - let price_feed_id: PriceFeedId = price_feed_id.into(); - attestation_index += 32; - let mut price = u64::from_be_bytes([ - encoded_payload.get(attestation_index).unwrap(), - encoded_payload.get(attestation_index + 1).unwrap(), - encoded_payload.get(attestation_index + 2).unwrap(), - encoded_payload.get(attestation_index + 3).unwrap(), - encoded_payload.get(attestation_index + 4).unwrap(), - encoded_payload.get(attestation_index + 5).unwrap(), - encoded_payload.get(attestation_index + 6).unwrap(), - encoded_payload.get(attestation_index + 7).unwrap(), - ]); - attestation_index += 8; - let mut confidence = u64::from_be_bytes([ - encoded_payload.get(attestation_index).unwrap(), - encoded_payload.get(attestation_index + 1).unwrap(), - encoded_payload.get(attestation_index + 2).unwrap(), - encoded_payload.get(attestation_index + 3).unwrap(), - encoded_payload.get(attestation_index + 4).unwrap(), - encoded_payload.get(attestation_index + 5).unwrap(), - encoded_payload.get(attestation_index + 6).unwrap(), - encoded_payload.get(attestation_index + 7).unwrap(), - ]); - attestation_index += 8; - // exponent is an i32, expected to be in the range -255 to 0 - let exponent = u32::from_be_bytes([ - encoded_payload.get(attestation_index).unwrap(), - encoded_payload.get(attestation_index + 1).unwrap(), - encoded_payload.get(attestation_index + 2).unwrap(), - encoded_payload.get(attestation_index + 3).unwrap(), - ]); - let exponent = absolute_of_exponent(exponent); - require(exponent < 256u32, PythError::InvalidExponent); - attestation_index += 4; - let ema_price = u64::from_be_bytes([ - encoded_payload.get(attestation_index).unwrap(), - encoded_payload.get(attestation_index + 1).unwrap(), - encoded_payload.get(attestation_index + 2).unwrap(), - encoded_payload.get(attestation_index + 3).unwrap(), - encoded_payload.get(attestation_index + 4).unwrap(), - encoded_payload.get(attestation_index + 5).unwrap(), - encoded_payload.get(attestation_index + 6).unwrap(), - encoded_payload.get(attestation_index + 7).unwrap(), - ]); - attestation_index += 8; - let ema_confidence = u64::from_be_bytes([ - encoded_payload.get(attestation_index).unwrap(), - encoded_payload.get(attestation_index + 1).unwrap(), - encoded_payload.get(attestation_index + 2).unwrap(), - encoded_payload.get(attestation_index + 3).unwrap(), - encoded_payload.get(attestation_index + 4).unwrap(), - encoded_payload.get(attestation_index + 5).unwrap(), - encoded_payload.get(attestation_index + 6).unwrap(), - encoded_payload.get(attestation_index + 7).unwrap(), - ]); - attestation_index += 8; - // Status is an enum (encoded as u8) with the following values: - // 0 = UNKNOWN: The price feed is not currently updating for an unknown reason. - // 1 = TRADING: The price feed is updating as expected. - // 2 = HALTED: The price feed is not currently updating because trading in the product has been halted. - // 3 = AUCTION: The price feed is not currently updating because an auction is setting the price. - let status = encoded_payload.get(attestation_index).unwrap(); - // Additionally skip number_of publishers (8 bytes) and attestation_time (8 bytes); as unused - attestation_index += 17; - let mut publish_time = u64::from_be_bytes([ - encoded_payload.get(attestation_index).unwrap(), - encoded_payload.get(attestation_index + 1).unwrap(), - encoded_payload.get(attestation_index + 2).unwrap(), - encoded_payload.get(attestation_index + 3).unwrap(), - encoded_payload.get(attestation_index + 4).unwrap(), - encoded_payload.get(attestation_index + 5).unwrap(), - encoded_payload.get(attestation_index + 6).unwrap(), - encoded_payload.get(attestation_index + 7).unwrap(), - ]); - attestation_index += 8; - if status == 1u8 { - attestation_index += 24; - } else { - // If status is not trading then the latest available price is - // the previous price that is parsed here. - - // previous publish time - publish_time = u64::from_be_bytes([ - encoded_payload.get(attestation_index).unwrap(), - encoded_payload.get(attestation_index + 1).unwrap(), - encoded_payload.get(attestation_index + 2).unwrap(), - encoded_payload.get(attestation_index + 3).unwrap(), - encoded_payload.get(attestation_index + 4).unwrap(), - encoded_payload.get(attestation_index + 5).unwrap(), - encoded_payload.get(attestation_index + 6).unwrap(), - encoded_payload.get(attestation_index + 7).unwrap(), - ]); - attestation_index += 8; - // previous price - price = u64::from_be_bytes([ - encoded_payload.get(attestation_index).unwrap(), - encoded_payload.get(attestation_index + 1).unwrap(), - encoded_payload.get(attestation_index + 2).unwrap(), - encoded_payload.get(attestation_index + 3).unwrap(), - encoded_payload.get(attestation_index + 4).unwrap(), - encoded_payload.get(attestation_index + 5).unwrap(), - encoded_payload.get(attestation_index + 6).unwrap(), - encoded_payload.get(attestation_index + 7).unwrap(), - ]); - attestation_index += 8; - // previous confidence - confidence = u64::from_be_bytes([ - encoded_payload.get(attestation_index).unwrap(), - encoded_payload.get(attestation_index + 1).unwrap(), - encoded_payload.get(attestation_index + 2).unwrap(), - encoded_payload.get(attestation_index + 3).unwrap(), - encoded_payload.get(attestation_index + 4).unwrap(), - encoded_payload.get(attestation_index + 5).unwrap(), - encoded_payload.get(attestation_index + 6).unwrap(), - encoded_payload.get(attestation_index + 7).unwrap(), - ]); - attestation_index += 8; - } - require( - (attestation_index - index) <= attestation_size - .as_u64(), - PythError::InvalidAttestationSize, - ); - //convert publish_time from UNIX to TAI64 - publish_time += TAI64_DIFFERENCE; - PriceFeed::new( - Price::new(ema_confidence, exponent, ema_price, publish_time), - price_feed_id, - Price::new(confidence, exponent, price, publish_time), - ) - } -} -impl PriceFeed { - pub fn extract_from_merkle_proof(digest: Bytes, encoded_proof: Bytes, offset: u64) -> (u64, self) { - // In order to avoid `ref mut` param related MemoryWriteOverlap error - let mut current_offset = offset; - let message_size = u16::from_be_bytes([ - encoded_proof.get(current_offset).unwrap(), - encoded_proof.get(current_offset + 1).unwrap(), - ]).as_u64(); - current_offset += 2; - let (_, slice) = encoded_proof.split_at(current_offset); - let (encoded_message, _) = slice.split_at(message_size); - current_offset += message_size; - let end_offset = validate_proof( - encoded_proof, - current_offset, - digest, - encoded_message - .clone(), - ); - // Message type of 0 is a Price Feed - require( - encoded_message - .get(0) - .unwrap() == 0, - PythError::IncorrectMessageType, - ); - let price_feed = PriceFeed::parse_message(encoded_message); - (end_offset, price_feed) - } -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/update_type.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/update_type.sw deleted file mode 100644 index bf3706275c..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/update_type.sw +++ /dev/null @@ -1,38 +0,0 @@ -library; - -use std::{array_conversions::u32::*, bytes::Bytes}; - -use ::data_structures::{ - accumulator_update::AccumulatorUpdate, - batch_attestation_update::BatchAttestationUpdate, -}; - -const ACCUMULATOR_MAGIC: u32 = 0x504e4155; - -pub enum UpdateType { - Accumulator: AccumulatorUpdate, - BatchAttestation: BatchAttestationUpdate, -} - -impl UpdateType { - pub fn determine_type(data: Bytes) -> Self { - let (magic, _) = data.split_at(4); //TODO: Convert to u32 for comparison with const ACCUMULATOR_MAGIC. Use raw_ptr.read::()? Remove accumulator_magic_bytes() - if data.len() > 4 && magic == accumulator_magic_bytes() { - UpdateType::Accumulator(AccumulatorUpdate::new(data)) - } else { - UpdateType::BatchAttestation((BatchAttestationUpdate::new(data))) - } - } -} - -pub fn accumulator_magic_bytes() -> Bytes { - let accumulator_magic_array = ACCUMULATOR_MAGIC.to_be_bytes(); - - let mut accumulator_magic_bytes = Bytes::with_capacity(4); - accumulator_magic_bytes.push(accumulator_magic_array[0]); - accumulator_magic_bytes.push(accumulator_magic_array[1]); - accumulator_magic_bytes.push(accumulator_magic_array[2]); - accumulator_magic_bytes.push(accumulator_magic_array[3]); - - accumulator_magic_bytes -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/wormhole_light.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/wormhole_light.sw deleted file mode 100644 index 848ff66f35..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/data_structures/wormhole_light.sw +++ /dev/null @@ -1,589 +0,0 @@ -library; - -use ::data_structures::data_source::*; -use ::errors::WormholeError; - -use std::{ - array_conversions::{ - b256::*, - u16::*, - u32::*, - }, - b512::B512, - block::timestamp, - bytes::Bytes, - constants::ZERO_B256, - hash::{ - Hash, - keccak256, - sha256, - }, - storage::storage_vec::*, - vm::evm::ecr::ec_recover_evm_address, -}; - -pub const UPGRADE_MODULE: b256 = 0x00000000000000000000000000000000000000000000000000000000436f7265; - -pub struct GuardianSet { - pub expiration_time: u64, - pub keys: Vec, -} - -impl GuardianSet { - #[storage(read)] - pub fn from_stored(stored: StorageGuardianSet) -> Self { - Self { - expiration_time: stored.expiration_time, - keys: stored.keys.load_vec(), - } - } -} - -pub struct StorageGuardianSet { - pub expiration_time: u64, - pub keys: StorageKey>, -} - -impl StorageGuardianSet { - pub fn new(expiration_time: u64, keys: StorageKey>) -> Self { - StorageGuardianSet { - expiration_time, - keys, - } - } -} - -pub struct GuardianSetUpgrade { - pub action: u8, - pub chain: u16, - pub module: b256, - pub new_guardian_set: StorageGuardianSet, - pub new_guardian_set_index: u32, -} - -impl GuardianSetUpgrade { - pub fn new( - action: u8, - chain: u16, - module: b256, - new_guardian_set: StorageGuardianSet, - new_guardian_set_index: u32, - ) -> Self { - GuardianSetUpgrade { - action, - chain, - module, - new_guardian_set, - new_guardian_set_index, - } - } -} - -impl GuardianSetUpgrade { - #[storage(read, write)] - pub fn parse_encoded_upgrade(current_guardian_set_index: u32, encoded_upgrade: Bytes) -> Self { - let mut index = 0; - let (_, slice) = encoded_upgrade.split_at(index); - let (module, _) = slice.split_at(32); - let module: b256 = module.into(); - require(module == UPGRADE_MODULE, WormholeError::InvalidModule); - index += 32; - let action = encoded_upgrade.get(index).unwrap(); - require(action == 2, WormholeError::InvalidGovernanceAction); - index += 1; - let chain = u16::from_be_bytes([encoded_upgrade.get(index).unwrap(), encoded_upgrade.get(index + 1).unwrap()]); - index += 2; - let new_guardian_set_index = u32::from_be_bytes([ - encoded_upgrade.get(index).unwrap(), - encoded_upgrade.get(index + 1).unwrap(), - encoded_upgrade.get(index + 2).unwrap(), - encoded_upgrade.get(index + 3).unwrap(), - ]); - require( - new_guardian_set_index > current_guardian_set_index, - WormholeError::NewGuardianSetIndexIsInvalid, - ); - index += 4; - let guardian_length = encoded_upgrade.get(index).unwrap(); - index += 1; - let mut new_guardian_set: StorageGuardianSet = StorageGuardianSet::new( - 0, - StorageKey::>::new( - ZERO_B256, - 0, - sha256(("guardian_set_keys", new_guardian_set_index)), - ), - ); - let mut i: u8 = 0; - while i < guardian_length { - let (_, slice) = encoded_upgrade.split_at(index); - let (key, _) = slice.split_at(20); - let key: b256 = key.into(); - new_guardian_set.keys.push(key.rsh(96)); - index += 20; - i += 1; - } - require( - new_guardian_set - .keys - .len() == guardian_length - .as_u64(), - WormholeError::GuardianSetKeysLengthNotEqual, - ); - require( - encoded_upgrade - .len() == index, - WormholeError::InvalidGuardianSetUpgradeLength, - ); - GuardianSetUpgrade::new( - action, - chain, - module, - new_guardian_set, - new_guardian_set_index, - ) - } -} - -pub struct GuardianSignature { - guardian_index: u8, - r: b256, - s: b256, - v: u8, -} - -impl GuardianSignature { - pub fn new(guardian_index: u8, r: b256, s: b256, v: u8) -> Self { - GuardianSignature { - guardian_index, - r, - s, - v, - } - } - // eip-2098: Compact Signature Representation - pub fn compact(self) -> B512 { - let y_parity = b256::from_be_bytes([ - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - 0u8, - self.v - 27u8, - ]); - let shifted_y_parity = y_parity.lsh(255); - let y_parity_and_s = b256::binary_or(shifted_y_parity, self.s); - B512::from((self.r, y_parity_and_s)) - } -} - -impl GuardianSignature { - pub fn verify( - self, - guardian_set_key: b256, - hash: b256, - index: u64, - last_index: u64, -) { - // Ensure that provided signature indices are ascending only - if index > 0 { - require( - self.guardian_index - .as_u64() > last_index, - WormholeError::SignatureIndicesNotAscending, - ); - } - let recovered_signer = ec_recover_evm_address(self.compact(), hash); - require( - recovered_signer - .is_ok() && recovered_signer - .unwrap() - .bits() == guardian_set_key, - WormholeError::SignatureInvalid, - ); - } -} - -pub struct WormholeVM { - pub version: u8, - pub guardian_set_index: u32, - pub governance_action_hash: b256, - // signatures: Vec, // Shown here to represent data layout of VM, but not needed - pub timestamp: u32, - pub nonce: u32, - pub emitter_chain_id: u16, - pub emitter_address: b256, - pub sequence: u64, - pub consistency_level: u8, - pub payload: Bytes, -} - -impl WormholeVM { - pub fn default() -> Self { - WormholeVM { - version: 0u8, - guardian_set_index: 0u32, - governance_action_hash: ZERO_B256, - timestamp: 0u32, - nonce: 0u32, - emitter_chain_id: 0u16, - emitter_address: ZERO_B256, - sequence: 0u64, - consistency_level: 0u8, - payload: Bytes::new(), - } - } - pub fn new( - version: u8, - guardian_set_index: u32, - governance_action_hash: b256, - timestamp_: u32, - nonce: u32, - emitter_chain_id: u16, - emitter_address: b256, - sequence: u64, - consistency_level: u8, - payload: Bytes, - ) -> Self { - WormholeVM { - version, - guardian_set_index, - governance_action_hash, - timestamp: timestamp_, - nonce, - emitter_chain_id, - emitter_address, - sequence, - consistency_level, - payload, - } - } -} - -impl WormholeVM { - #[storage(read)] - pub fn parse_and_verify_wormhole_vm( - current_guardian_set_index: u32, - encoded_vm: Bytes, - wormhole_guardian_sets: StorageKey>, - ) -> Self { - let mut index = 0; - let version = encoded_vm.get(index); - require( - version - .is_some() && version - .unwrap() == 1, - WormholeError::VMVersionIncompatible, - ); - index += 1; - let (_, slice) = encoded_vm.split_at(index); - let (slice, _) = slice.split_at(4); //replace with slice() - let guardian_set_index = u32::from_be_bytes([ - //replace with func - slice.get(0).unwrap(), - slice.get(1).unwrap(), - slice.get(2).unwrap(), - slice.get(3).unwrap(), - ]); - index += 4; - let guardian_set = wormhole_guardian_sets.get(guardian_set_index).try_read(); - require(guardian_set.is_some(), WormholeError::GuardianSetNotFound); - let guardian_set = guardian_set.unwrap(); - require( - guardian_set - .keys - .len() > 0, - WormholeError::InvalidGuardianSetKeysLength, - ); - require( - guardian_set_index == current_guardian_set_index && (guardian_set - .expiration_time == 0 || guardian_set - .expiration_time > timestamp()), - WormholeError::InvalidGuardianSet, - ); - let signers_length = encoded_vm.get(index); - require( - signers_length - .is_some(), - WormholeError::SignersLengthIrretrievable, - ); - let signers_length = signers_length.unwrap().as_u64(); - index += 1; - // 66 is the length of each guardian signature - // 1 (guardianIndex) + 32 (r) + 32 (s) + 1 (v) - let hash_index = index + (signers_length * 66); - require( - hash_index < encoded_vm - .len(), - WormholeError::InvalidSignatureLength, - ); - let (_, slice) = encoded_vm.split_at(hash_index); - let hash = keccak256(keccak256(slice)); - let mut last_index = 0; - let mut i = 0; - while i < signers_length { - let guardian_index = encoded_vm.get(index); - require( - guardian_index - .is_some(), - WormholeError::GuardianIndexIrretrievable, - ); - let guardian_index = guardian_index.unwrap(); - index += 1; - let (_, slice) = encoded_vm.split_at(index); - let (slice, remainder) = slice.split_at(32); - let r: b256 = slice.into(); - index += 32; - let (slice, remainder) = remainder.split_at(32); - let s: b256 = slice.into(); - index += 32; - let v = remainder.get(0); - require(v.is_some(), WormholeError::SignatureVIrretrievable); - let v = v.unwrap() + 27; - index += 1; - let guardian_set_key = guardian_set.keys.get(guardian_index.as_u64()); - require( - guardian_set_key - .is_some(), - WormholeError::GuardianSetKeyIrretrievable, - ); - GuardianSignature::new(guardian_index, r, s, v) - .verify(guardian_set_key.unwrap().read(), hash, i, last_index); - last_index = guardian_index.as_u64(); - i += 1; - } - /* - We're using a fixed point number transformation with 1 decimal to deal with rounding. - This quorum check is critical to assessing whether we have enough Guardian signatures to validate a VM. - If guardian set key length is 0 and signatures length is 0, this could compromise the integrity of both VM and signature verification. - */ - require( - ((((guardian_set - .keys - .len() * 10) / 3) * 2) / 10 + 1) <= signers_length, - WormholeError::NoQuorum, - ); - //ignore VM.signatures - let (_, slice) = encoded_vm.split_at(index); - let (slice, _) = slice.split_at(4); - let _timestamp = u32::from_be_bytes([ - slice.get(0).unwrap(), - slice.get(1).unwrap(), - slice.get(2).unwrap(), - slice.get(3).unwrap(), - ]); - index += 4; - let (_, slice) = encoded_vm.split_at(index); - let (slice, _) = slice.split_at(4); - let nonce = u32::from_be_bytes([ - slice.get(0).unwrap(), - slice.get(1).unwrap(), - slice.get(2).unwrap(), - slice.get(3).unwrap(), - ]); - index += 4; - let (_, slice) = encoded_vm.split_at(index); - let (slice, _) = slice.split_at(2); - let emitter_chain_id = u16::from_be_bytes([slice.get(0).unwrap(), slice.get(1).unwrap()]); - index += 2; - let (_, slice) = encoded_vm.split_at(index); - let (slice, _) = slice.split_at(32); - let emitter_address: b256 = slice.into(); - index += 32; - let (_, slice) = encoded_vm.split_at(index); - let (slice, _) = slice.split_at(8); - let sequence = u64::from_be_bytes([ - slice.get(0).unwrap(), - slice.get(1).unwrap(), - slice.get(2).unwrap(), - slice.get(3).unwrap(), - slice.get(4).unwrap(), - slice.get(5).unwrap(), - slice.get(6).unwrap(), - slice.get(7).unwrap(), - ]); - index += 8; - let consistency_level = encoded_vm.get(index); - require( - consistency_level - .is_some(), - WormholeError::ConsistencyLevelIrretrievable, - ); - index += 1; - require( - index <= encoded_vm - .len(), - WormholeError::InvalidPayloadLength, - ); - let (_, payload) = encoded_vm.split_at(index); - WormholeVM::new( - version - .unwrap(), - guardian_set_index, - hash, - _timestamp, - nonce, - emitter_chain_id, - emitter_address, - sequence, - consistency_level - .unwrap(), - payload, - ) - } - pub fn parse_initial_wormhole_vm(encoded_vm: Bytes) -> Self { - let mut index = 0; - let version = encoded_vm.get(index); - require( - version - .is_some() && version - .unwrap() == 1, - WormholeError::VMVersionIncompatible, - ); - index += 1; - let (_, slice) = encoded_vm.split_at(index); - let (slice, _) = slice.split_at(4); //replace with slice() - let guardian_set_index = u32::from_be_bytes([ - //replace with func - slice.get(0).unwrap(), - slice.get(1).unwrap(), - slice.get(2).unwrap(), - slice.get(3).unwrap(), - ]); - index += 4; - let signers_length = encoded_vm.get(index); - require( - signers_length - .is_some(), - WormholeError::SignersLengthIrretrievable, - ); - let signers_length = signers_length.unwrap().as_u64(); - index += 1; - // 66 is the length of each guardian signature - // 1 (guardianIndex) + 32 (r) + 32 (s) + 1 (v) - let hash_index = index + (signers_length * 66); - require( - hash_index < encoded_vm - .len(), - WormholeError::InvalidSignatureLength, - ); - let (_, slice) = encoded_vm.split_at(hash_index); - let hash = keccak256(keccak256(slice)); - // account for signatures - index += 66 * signers_length; - let (_, slice) = encoded_vm.split_at(index); - let (slice, _) = slice.split_at(4); - let timestamp_ = u32::from_be_bytes([ - slice.get(0).unwrap(), - slice.get(1).unwrap(), - slice.get(2).unwrap(), - slice.get(3).unwrap(), - ]); - index += 4; - let (_, slice) = encoded_vm.split_at(index); - let (slice, _) = slice.split_at(4); - let nonce = u32::from_be_bytes([ - slice.get(0).unwrap(), - slice.get(1).unwrap(), - slice.get(2).unwrap(), - slice.get(3).unwrap(), - ]); - index += 4; - let (_, slice) = encoded_vm.split_at(index); - let (slice, _) = slice.split_at(2); - let emitter_chain_id = u16::from_be_bytes([slice.get(0).unwrap(), slice.get(1).unwrap()]); - index += 2; - let (_, slice) = encoded_vm.split_at(index); - let (slice, _) = slice.split_at(32); - let emitter_address: b256 = slice.into(); - index += 32; - let (_, slice) = encoded_vm.split_at(index); - let (slice, _) = slice.split_at(8); - let sequence = u64::from_be_bytes([ - slice.get(0).unwrap(), - slice.get(1).unwrap(), - slice.get(2).unwrap(), - slice.get(3).unwrap(), - slice.get(4).unwrap(), - slice.get(5).unwrap(), - slice.get(6).unwrap(), - slice.get(7).unwrap(), - ]); - index += 8; - let consistency_level = encoded_vm.get(index); - require( - consistency_level - .is_some(), - WormholeError::ConsistencyLevelIrretrievable, - ); - index += 1; - require( - index <= encoded_vm - .len(), - WormholeError::InvalidPayloadLength, - ); - let (_, payload) = encoded_vm.split_at(index); - WormholeVM::new( - version - .unwrap(), - guardian_set_index, - hash, - timestamp_, - nonce, - emitter_chain_id, - emitter_address, - sequence, - consistency_level - .unwrap(), - payload, - ) - } -} - -impl WormholeVM { - #[storage(read)] - pub fn parse_and_verify_pyth_vm( - current_guardian_set_index: u32, - encoded_vm: Bytes, - wormhole_guardian_sets: StorageKey>, - is_valid_data_source: StorageKey>, - ) -> Self { - let vm = WormholeVM::parse_and_verify_wormhole_vm( - current_guardian_set_index, - encoded_vm, - wormhole_guardian_sets, - ); - require( - DataSource::new(vm.emitter_chain_id, vm.emitter_address) - .is_valid_data_source(is_valid_data_source), - WormholeError::InvalidUpdateDataSource, - ); - vm - } -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/errors.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/errors.sw deleted file mode 100644 index ab74947ae8..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/errors.sw +++ /dev/null @@ -1,72 +0,0 @@ -library; - -pub enum PythError { - FeesCanOnlyBePaidInTheBaseAsset: (), - FuturePriceNotAllowed: (), - GuardianSetNotFound: (), - IncorrectMessageType: (), - InsufficientFee: (), - InvalidArgument: (), - InvalidAttestationSize: (), - InvalidDataSourcesLength: (), - InvalidExponent: (), - InvalidGovernanceDataSource: (), - InvalidGovernanceAction: (), - InvalidGovernanceMessage: (), - InvalidGovernanceModule: (), - InvalidGovernanceTarget: (), - InvalidHeaderSize: (), - InvalidMagic: (), - InvalidMajorVersion: (), - InvalidMinorVersion: (), - InvalidPayloadId: (), - InvalidPayloadLength: (), - InvalidPriceFeedDataLength: (), - InvalidProof: (), - InvalidUpdateData: (), - InvalidUpdateDataLength: (), - InvalidUpdateDataSource: (), - InvalidUpgradeModule: (), - InvalidWormholeAddressToSet: (), - LengthOfPriceFeedIdsAndPublishTimesMustMatch: (), - NewGuardianSetIsEmpty: (), - NumberOfUpdatesIrretrievable: (), - OldGovernanceMessage: (), - /// Emitted when a Price's `publish_time` is stale. - OutdatedPrice: (), - /// Emitted when a PriceFeed could not be retrieved. - PriceFeedNotFound: (), - PriceFeedNotFoundWithinRange: (), - WormholeGovernanceActionNotFound: (), -} - -pub enum WormholeError { - ConsistencyLevelIrretrievable: (), - GovernanceActionAlreadyConsumed: (), - GuardianIndexIrretrievable: (), - GuardianSetHasExpired: (), - GuardianSetKeyIrretrievable: (), - GuardianSetKeysLengthNotEqual: (), - GuardianSetNotFound: (), - InvalidGovernanceAction: (), - InvalidGovernanceChain: (), - InvalidGovernanceContract: (), - InvalidGuardianSet: (), - InvalidGuardianSetKeysLength: (), - InvalidGuardianSetUpgrade: (), - InvalidGuardianSetUpgradeLength: (), - InvalidModule: (), - InvalidPayloadLength: (), - InvalidSignatureLength: (), - InvalidUpdateDataSource: (), - NewGuardianSetIsEmpty: (), - NewGuardianSetIndexIsInvalid: (), - NoQuorum: (), - NotSignedByCurrentGuardianSet: (), - SignatureInvalid: (), - SignatureIndicesNotAscending: (), - SignatureVIrretrievable: (), - SignersLengthIrretrievable: (), - VMSignatureInvalid: (), - VMVersionIncompatible: (), -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/events.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/events.sw deleted file mode 100644 index 4ccfecf2e4..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/events.sw +++ /dev/null @@ -1,43 +0,0 @@ -library; - -use ::data_structures::{data_source::DataSource, price::PriceFeedId,}; - -pub struct ConstructedEvent { - pub guardian_set_index: u32, -} - -pub struct NewGuardianSetEvent { - pub governance_action_hash: b256, - // new_guardian_set: GuardianSet, // TODO: Uncomment when SDK supports logs with nested Vecs https://github.com/FuelLabs/fuels-rs/issues/1046 - pub new_guardian_set_index: u32, -} - -pub struct UpdatedPriceFeedsEvent { - pub updated_price_feeds: Vec, -} - -pub struct ContractUpgradedEvent { - pub old_implementation: Identity, - pub new_implementation: Identity, -} - -pub struct GovernanceDataSourceSetEvent { - pub old_data_source: DataSource, - pub new_data_source: DataSource, - pub initial_sequence: u64, -} - -pub struct DataSourcesSetEvent { - pub old_data_sources: Vec, - pub new_data_sources: Vec, -} - -pub struct FeeSetEvent { - pub old_fee: u64, - pub new_fee: u64, -} - -pub struct ValidPeriodSetEvent { - pub old_valid_period: u64, - pub new_valid_period: u64, -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/interface.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/interface.sw deleted file mode 100644 index 43ac8c334f..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/interface.sw +++ /dev/null @@ -1,323 +0,0 @@ -library; - -// The order of the modules is important because of the dependencies between them. -pub mod pyth_merkle_proof; -pub mod errors; -pub mod utils; -pub mod events; -pub mod data_structures; - -use ::data_structures::{ - data_source::DataSource, - governance_payload::UpgradeContractPayload, - price::{ - Price, - PriceFeed, - PriceFeedId, - }, - wormhole_light::{ - GuardianSet, - }, -}; -use std::{bytes::Bytes, storage::storage_vec::*}; - -abi PythCore { - /// This function returns the exponentially-weighted moving average price and confidence interval. - /// - /// # Arguments - /// - /// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval. - /// - /// # Returns - /// - /// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely. - /// - /// # Reverts - /// - /// * When the EMA price is not available. - #[storage(read)] - fn ema_price(price_feed_id: PriceFeedId) -> Price; - - /// This function Returns the exponentially-weighted moving average price that is no older than `time` seconds - /// from the current time. - /// - /// # Additional Information - /// - /// This function is a sanity-checked version of `ema_price_unsafe` which is useful in - /// applications that require a sufficiently-recent price. - /// - /// # Arguments - /// - /// * `time_period`: [u64] - The period (in seconds) that a price feed is considered valid since its publish time. - /// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval. - /// - /// # Returns - /// - /// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely. - /// - /// # Reverts - /// - /// * When the EMA price is not available. - /// * When the EMA price wasn't updated recently enough. - #[storage(read)] - fn ema_price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price; - - /// This function returns the exponentially-weighted moving average price of a price feed without any sanity checks. - /// - /// # Additional Information - /// - /// This function returns the same price as `ema_price` in the case where the price is available. - /// However, if the price is not recent this function returns the latest available price. - /// - /// The returned price can be from arbitrarily far in the past; this function makes no guarantees that - /// the returned price is recent or useful for any particular application. - /// - /// Users of this function should check the `publish_time` in the `Price` to ensure that the returned price is - /// sufficiently recent for their application. If you are considering using this function, it may be - /// safer / easier to use either `ema_price` or `ema_price_no_older_than`. - /// - /// # Arguments - /// - /// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval. - /// - /// # Returns - /// - /// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely. - #[storage(read)] - fn ema_price_unsafe(price_feed_id: PriceFeedId) -> Price; - - /// This function parses `update_data` and returns price feeds of the given `price_feed_ids` if they are all published - /// within `min_publish_time` and `max_publish_time`. - /// - /// # Additional Information - /// - /// You can use this method if you want to use a Pyth price at a fixed time and not the most recent price; - /// otherwise, please consider using `update_price_feeds`. This method does not store the price updates on-chain. - /// - /// This method requires the caller to pay a fee in wei; the required fee can be computed by calling - /// `update_fee`. - /// - /// # Arguments - /// - /// * `max_publish_time`: [u64] - The maximum acceptable `publish_time` for the given `price_feed_ids`. - /// * `min_publish_time`: [u64] - The minimum acceptable `publish_time` for the given `price_feed_ids`. - /// * `price_feed_ids`: [Vec] - The ids of the price feeds to return PriceFeed data for. - /// * `update_data`: [Bytes] - The price update data. - /// - /// # Returns - /// - /// * [u64] - The number of hashes performed. - /// - /// # Reverts - /// - /// * When the transferred fee is not sufficient - /// * When the update_data is invalid - /// * When there is no update for any of the given `priceIds` within the given time range. - #[storage(read), payable] - fn parse_price_feed_updates( - max_publish_time: u64, - min_publish_time: u64, - price_feed_ids: Vec, - update_data: Vec, - ) -> Vec; - - /// This function returns the price and confidence interval. - /// - /// # Additional Information - /// - /// This function also has some complex behaviours. - /// - /// # Arguments - /// - /// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval. - /// - /// # Returns - /// - /// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely. - /// - /// # Reverts - /// - /// * When the price has not been updated within the last valid time period. - #[storage(read)] - fn price(price_feed_id: PriceFeedId) -> Price; - - /// This function returns the price that is no older than `time` seconds of the current time. - /// - /// # Additional Information - /// - /// This function is a sanity-checked version of `price_unsafe` which is useful in applications that require a - /// sufficiently-recent price. Reverts if the price wasn't updated sufficiently recently. - /// - /// # Arguments - /// - /// * `time_period`: [u64] - The period (in seconds) that a price feed is considered valid since its publish time. - /// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval. - /// - /// # Returns - /// - /// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely. - /// - /// # Reverts - /// - /// * When the price is not available. - /// * When the price wasn't updated recently enough. - #[storage(read)] - fn price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price; - - /// This function returns the price of a price feed without any sanity checks. - /// - /// # Additional Information - /// - /// This function returns the most recent price update in this contract without any recency checks. - /// This function is unsafe as the returned price update may be arbitrarily far in the past. - /// - /// Users of this function should check the `publish_time` in the price to ensure that the returned price is - /// sufficiently recent for their application. If you are considering using this function, it may be - /// safer / easier to use either `getPrice` or `price_no_older_than`. - /// - /// # Arguments - /// - /// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval. - /// - /// # Returns - /// - /// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely. - #[storage(read)] - fn price_unsafe(price_feed_id: PriceFeedId) -> Price; - - /// This function returns the required fee in Wei to update an array of price updates. - /// - /// # Arguments - /// - /// * `update_data`: [Bytes] - The price update data. - /// - /// # Returns - /// - /// * [u64] - The required fee in Wei. - #[storage(read)] - fn update_fee(update_data: Vec) -> u64; - - /// This function updates price feeds with the given update messages. - /// - /// # Additional Information - /// - /// This function requires the caller to pay a fee in wei; the required fee can be computed by calling - /// `update_fee`. - /// Prices will be updated if they are more recent than the current stored prices. - /// The call will succeed even if the update is not the most recent. - /// - /// # Arguments - /// - /// * `update_data`: [Bytes] - The price update data. - /// - /// # Reverts - /// - /// * When the transferred fee is not sufficient. - /// * When the `update_data` is invalid. - #[storage(read, write), payable] - fn update_price_feeds(update_data: Vec); - - /// This function is a wrapper around `update_price_feeds` that reverts fast if a price update is not necessary. - /// - /// # Additional Information - /// - /// A price update is necessary if the current on-chain `publish_time` is older than the given `publish_time`. It relies solely on the - /// given `publish_time` for the price feeds and does not read the actual price update publish time within `update_data`. - /// - /// This method requires the caller to pay a fee in wei; the required fee can be computed by calling - /// `update_fee`. - /// - /// `price_feed_ids` and `publish_times` are two arrays with the same size that correspond to senders known `publish_time` - /// of each PriceFeedId when calling this method. If all of price feeds within `price_feed_ids` have updated and have - /// a newer or equal publish time than the given publish time, it will reject the transaction to save gas. - /// Otherwise, it calls `update_price_feeds` to update the prices. - /// - /// # Arguments - /// - /// * `price_feed_ids`: [Vec] - Vector of price feed ids; `price_feed_ids[i]` corresponds to known price feed id of `publish_times[i]`. - /// * `publish_times`: [Vec] - Vector of publish times; `publish_times[i]` corresponds to known publish time of `price_feed_ids[i]`. - /// * `update_data`: [Bytes] - The price update data. - /// - /// - /// # Reverts - /// - /// * When update is not necessary. - /// * When the transferred fee is not sufficient. - /// * When the `update_data` is invalid. - #[storage(read, write), payable] - fn update_price_feeds_if_necessary( - price_feed_ids: Vec, - publish_times: Vec, - update_data: Vec, - ); - - /// This function returns the period (in seconds) that a price feed is considered valid since its publish time. - /// - /// # Returns - /// - /// * [u64] - The period (in seconds) that a price feed is considered valid since its publish time. - #[storage(read)] - fn valid_time_period() -> u64; -} - -abi PythInit { - #[storage(read, write)] - fn constructor( - data_sources: Vec, - governance_data_source: DataSource, - wormhole_governance_data_source: DataSource, - single_update_fee: u64, - valid_time_period_seconds: u64, - wormhole_guardian_set_addresses: Vec, - wormhole_guardian_set_index: u32, - chain_id: u16, - ); -} - -abi PythInfo { - #[storage(read)] - fn latest_publish_time(price_feed_id: PriceFeedId) -> u64; - - /// @notice Returns true if a price feed with the given id exists. - /// @param price_feed_id The Pyth Price Feed ID of which to check its existence. - #[storage(read)] - fn price_feed_exists(price_feed_id: PriceFeedId) -> bool; - - /// @notice Returns the price feed with given id. - /// @dev Reverts if the price does not exist. - /// @param price_feed_id The Pyth Price Feed ID of which to fetch the PriceFeed. - #[storage(read)] - fn price_feed_unsafe(price_feed_id: PriceFeedId) -> PriceFeed; - - #[storage(read)] - fn single_update_fee() -> u64; - - #[storage(read)] - fn is_valid_data_source(data_source: DataSource) -> bool; - - #[storage(read)] - fn valid_data_sources() -> Vec; - - #[storage(read)] - fn last_executed_governance_sequence() -> u64; - - #[storage(read)] - fn chain_id() -> u16; -} - -abi WormholeGuardians { - #[storage(read)] - fn current_guardian_set_index() -> u32; - - #[storage(read)] - fn current_wormhole_provider() -> DataSource; - - #[storage(read)] - fn governance_action_is_consumed(hash: b256) -> bool; - - #[storage(read)] - fn guardian_set(index: u32) -> GuardianSet; - - #[storage(read, write)] - fn submit_new_guardian_set(vm: Bytes); -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/pyth_merkle_proof.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/pyth_merkle_proof.sw deleted file mode 100644 index a26f91fcb0..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/pyth_merkle_proof.sw +++ /dev/null @@ -1,63 +0,0 @@ -library; - -use std::{bytes::Bytes, hash::{Hash, keccak256}}; -use ::errors::PythError; - -pub const MERKLE_LEAF_PREFIX = 0u8; -pub const MERKLE_NODE_PREFIX = 1u8; - -fn leaf_hash(data: Bytes) -> Bytes { - let mut bytes = Bytes::new(); - bytes.push(MERKLE_LEAF_PREFIX); - bytes.append(data); - - let (slice, _) = Bytes::from(keccak256(bytes)).split_at(20); - - slice -} - -fn node_hash(child_a: Bytes, child_b: Bytes) -> Bytes { - let mut bytes = Bytes::with_capacity(41); - bytes.push(MERKLE_NODE_PREFIX); - - let a: b256 = child_a.into(); - let b: b256 = child_b.into(); - if a > b { - bytes.append(child_b); - bytes.append(child_a); - } else { - bytes.append(child_a); - bytes.append(child_b); - } - - let (slice, _) = Bytes::from(keccak256(bytes)).split_at(20); - - slice -} - -pub fn validate_proof( - encoded_proof: Bytes, - ref mut proof_offset: u64, - root: Bytes, - leaf_data: Bytes, -) -> u64 { - let mut current_digest = leaf_hash(leaf_data); - let proof_size = encoded_proof.get(proof_offset).unwrap().as_u64(); - proof_offset += 1; - - let mut i = 0; - while i < proof_size { - let (_, slice) = encoded_proof.split_at(proof_offset); - let (sibling_digest, _) = slice.split_at(20); - proof_offset += 20; - current_digest = node_hash(current_digest, sibling_digest); - i += 1; - } - - let current_digest_b256: b256 = current_digest.into(); - let root_b256: b256 = root.into(); - - require(current_digest_b256 == root_b256, PythError::InvalidProof); - - proof_offset -} diff --git a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/utils.sw b/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/utils.sw deleted file mode 100644 index 28ef0b430b..0000000000 --- a/internal/benchmarks/test/fixtures/forc-projects/pyth-interface/src/utils.sw +++ /dev/null @@ -1,17 +0,0 @@ -library; - -pub fn absolute_of_exponent(exponent: u32) -> u32 { - if exponent == 0u32 { - exponent - } else { - u32::max() - exponent + 1 - } -} - -#[storage(read)] -pub fn total_fee( - total_number_of_updates: u64, - single_update_fee: StorageKey, -) -> u64 { - total_number_of_updates * single_update_fee.read() -} From 2b3047c855be5bd94ecbaa47c79f635fe93d39ea Mon Sep 17 00:00:00 2001 From: chad Date: Thu, 19 Sep 2024 10:39:10 -0500 Subject: [PATCH 19/21] chore: update to large contract factory --- internal/benchmarks/src/contract-interaction.bench.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/benchmarks/src/contract-interaction.bench.ts b/internal/benchmarks/src/contract-interaction.bench.ts index 27d3f796a8..add8eb8425 100644 --- a/internal/benchmarks/src/contract-interaction.bench.ts +++ b/internal/benchmarks/src/contract-interaction.bench.ts @@ -9,7 +9,7 @@ import type { CounterContract, CallTestContract } from '../test/typegen/contract import { CounterContractFactory, CallTestContractFactory, - PythContractFactory, + LargeContractFactory, } from '../test/typegen/contracts'; import { isDevnet, runBenchmark } from './config'; @@ -94,7 +94,7 @@ describe('Contract Interaction Benchmarks', () => { }); bench('should successfully execute a contract deploy as blobs', async () => { - const factory = new PythContractFactory(wallet); + const factory = new LargeContractFactory(wallet); const { waitForResult } = await factory.deployAsBlobTx({ chunkSizeMultiplier: 0.9, }); From af76ec69772201f89e6efbdab2e7ef95aa6c2364 Mon Sep 17 00:00:00 2001 From: chad Date: Thu, 19 Sep 2024 10:47:02 -0500 Subject: [PATCH 20/21] lint: forc formatting --- internal/benchmarks/test/fixtures/forc-projects/Forc.toml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/internal/benchmarks/test/fixtures/forc-projects/Forc.toml b/internal/benchmarks/test/fixtures/forc-projects/Forc.toml index a88486f141..36733197e5 100644 --- a/internal/benchmarks/test/fixtures/forc-projects/Forc.toml +++ b/internal/benchmarks/test/fixtures/forc-projects/Forc.toml @@ -1,6 +1,2 @@ [workspace] -members = [ - "call-test-contract", - "counter-contract", - "large-contract", -] +members = ["call-test-contract", "counter-contract", "large-contract"] From be9151dc37f269f5018e55b43ac736b8cb623479 Mon Sep 17 00:00:00 2001 From: chad Date: Fri, 27 Sep 2024 12:35:15 -0500 Subject: [PATCH 21/21] chore: remove private key --- internal/benchmarks/src/cost-estimation.bench.ts | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/internal/benchmarks/src/cost-estimation.bench.ts b/internal/benchmarks/src/cost-estimation.bench.ts index dd50683c84..cffa9ba735 100644 --- a/internal/benchmarks/src/cost-estimation.bench.ts +++ b/internal/benchmarks/src/cost-estimation.bench.ts @@ -47,10 +47,7 @@ describe('Cost Estimation Benchmarks', () => { receiver1 = Wallet.generate({ provider }); receiver2 = Wallet.generate({ provider }); receiver3 = Wallet.generate({ provider }); - sender = Wallet.fromPrivateKey( - '0x30bb0bc68f5d2ec3b523cee5a65503031b40679d9c72280cd8088c2cfbc34e38', - provider - ); + sender = Wallet.generate({ provider }); }; beforeAll(setupTestEnvironment);