From 894660973cfd06faae39f74c66a39616f5b2239f Mon Sep 17 00:00:00 2001 From: Gabriel Rocheleau Date: Tue, 18 Jul 2023 04:56:04 -0400 Subject: [PATCH] client: migrate tests to vite (#2797) * client: migrate tests to vite * client: update test scripts * client: fix rpc engine tests * client: fix rpc engine tests * client: remove unnecessary stringification * client: fix more rpc tests * client: misc test fixes * client: fix ci script still using tape * Rename libp2p tests to avoid vitest running them * fix instanceof tests * Update vitest.config so that test:CLI runs * Update vitest.config so that test:CLI runs * Fixes for client.spec.ts * Fix error in sender.spec.ts * Fix test formatting rlpxserver.spec.ts * Update unit test config, randomize rpc port * test fixes * more test fixes * Fix engine tests * Partial test fixes * Fix merge integration test * fix fcu hex handling * Add timeouts and fix lightsync * Various test and type fixes * fix txpool tests * correct bytes2hex import * Fix lightethereumservice tests * client: fix lesprotocol test * Fix most full ethereum service tests * Fix fullethereumservice test * "Fix" flow control test * Fix rlxppeer test * client: fix lightsync integration test timeouts * client: update client ci * client: increase timeout for some tests * client: remove only from flowcontrol test * client: more test fixes * client: increase timeout for miner * client: increase timeout for miner * client: increase more timeouts and fix missing it statement * fix integration tests * fix lint rules * fix npm script * Fix lint file extension * Fix lint config, again * File path fix --------- Co-authored-by: acolytec3 <17355484+acolytec3@users.noreply.github.com> Co-authored-by: ScottyPoi Co-authored-by: Scotty <66335769+ScottyPoi@users.noreply.github.com> Co-authored-by: Holger Drewes --- .github/workflows/client-build.yml | 2 +- config/eslint.cjs | 1 + .../cli-libp2p.spec.ts | 25 +- .../{libp2psender.ts => libp2psender.ts.old} | 0 .../net/test/libp2pnode.spec.ts | 19 - .../net/test/libp2pnode.spec.ts.old | 17 + ...p2ppeer.spec.ts => libp2ppeer.spec.ts.old} | 35 +- ...ender.spec.ts => libp2psender.spec.ts.old} | 21 +- ...erver.spec.ts => libp2pserver.spec.ts.old} | 59 +- packages/client/package.json | 7 +- packages/client/src/rpc/modules/engine.ts | 2 +- packages/client/src/sync/beaconsync.ts | 2 +- packages/client/src/util/rpc.ts | 6 +- packages/client/test/blockchain/chain.spec.ts | 75 +- packages/client/test/cli/cli-rpc.spec.ts | 43 +- packages/client/test/cli/cli-sync.spec.ts | 97 +- packages/client/test/client.spec.ts | 75 +- packages/client/test/config.spec.ts | 57 +- .../client/test/execution/vmexecution.spec.ts | 180 +-- .../test/integration/beaconsync.spec.ts | 29 +- packages/client/test/integration/cli.spec.ts | 21 +- .../client/test/integration/client.spec.ts | 41 +- .../integration/fullethereumservice.spec.ts | 115 +- .../client/test/integration/fullsync.spec.ts | 24 +- .../integration/lightethereumservice.spec.ts | 12 - .../client/test/integration/lightsync.spec.ts | 152 +-- .../client/test/integration/merge.spec.ts | 105 +- .../client/test/integration/miner.spec.ts | 36 +- .../client/test/integration/mocks/mockpeer.ts | 2 +- .../client/test/integration/peerpool.spec.ts | 34 +- packages/client/test/integration/pow.spec.ts | 19 +- packages/client/test/logging.spec.ts | 27 +- packages/client/test/miner/miner.spec.ts | 541 ++++----- .../client/test/miner/pendingBlock.spec.ts | 168 +-- packages/client/test/net/peer/peer.spec.ts | 34 +- .../client/test/net/peer/rlpxpeer.spec.ts | 103 +- packages/client/test/net/peerpool.spec.ts | 56 +- .../test/net/protocol/boundprotocol.spec.ts | 50 +- .../test/net/protocol/ethprotocol.spec.ts | 91 +- .../test/net/protocol/flowcontrol.spec.ts | 29 +- .../test/net/protocol/lesprotocol.spec.ts | 27 +- .../client/test/net/protocol/protocol.spec.ts | 56 +- .../test/net/protocol/rlpxsender.spec.ts | 35 +- .../client/test/net/protocol/sender.spec.ts | 20 +- .../test/net/protocol/snapprotocol.spec.ts | 213 ++-- .../client/test/net/server/rlpxserver.spec.ts | 108 +- .../client/test/rpc/admin/nodeInfo.spec.ts | 28 +- .../test/rpc/debug/traceTransaction.spec.ts | 299 ++--- .../rpc/engine/exchangeCapabilities.spec.ts | 28 +- .../exchangeTransitionConfigurationV1.spec.ts | 32 +- .../rpc/engine/forkchoiceUpdatedV1.spec.ts | 641 +++++----- .../engine/getPayloadBodiesByHashV1.spec.ts | 351 +++--- .../engine/getPayloadBodiesByRangeV1.spec.ts | 363 +++--- .../test/rpc/engine/getPayloadV1.spec.ts | 95 +- .../test/rpc/engine/getPayloadV3.spec.ts | 176 +-- .../test/rpc/engine/newPayloadV1.spec.ts | 458 ++++---- .../test/rpc/engine/newPayloadV2.spec.ts | 167 ++- .../test/rpc/engine/newPayloadV3.spec.ts | 176 ++- .../newPayloadV3VersionedHashes.spec.ts | 49 +- .../test/rpc/engine/withdrawals.spec.ts | 41 +- .../client/test/rpc/eth/blockNumber.spec.ts | 38 +- packages/client/test/rpc/eth/call.spec.ts | 259 ++--- packages/client/test/rpc/eth/chainId.spec.ts | 79 +- .../client/test/rpc/eth/estimateGas.spec.ts | 325 +++--- packages/client/test/rpc/eth/gasPrice.spec.ts | 342 +++--- .../client/test/rpc/eth/getBalance.spec.ts | 189 +-- .../test/rpc/eth/getBlockByHash.spec.ts | 122 +- .../test/rpc/eth/getBlockByNumber.spec.ts | 229 ++-- .../getBlockTransactionCountByHash.spec.ts | 92 +- .../getBlockTransactionCountByNumber.spec.ts | 269 +++-- packages/client/test/rpc/eth/getCode.spec.ts | 195 ++-- packages/client/test/rpc/eth/getLogs.spec.ts | 450 +++---- packages/client/test/rpc/eth/getProof.spec.ts | 166 +-- .../client/test/rpc/eth/getStorageAt.spec.ts | 134 ++- .../getTransactionByBlockHashAndIndex.spec.ts | 128 +- .../test/rpc/eth/getTransactionByHash.spec.ts | 144 +-- .../test/rpc/eth/getTransactionCount.spec.ts | 159 +-- .../rpc/eth/getTransactionReceipt.spec.ts | 204 ++-- .../eth/getUncleCountByBlockNumber.spec.ts | 38 +- .../test/rpc/eth/protocolVersion.spec.ts | 22 +- .../test/rpc/eth/sendRawTransaction.spec.ts | 492 ++++---- packages/client/test/rpc/eth/syncing.spec.ts | 165 +-- packages/client/test/rpc/helpers.ts | 9 +- .../client/test/rpc/net/listening.spec.ts | 54 +- .../client/test/rpc/net/peerCount.spec.ts | 24 +- packages/client/test/rpc/net/version.spec.ts | 88 +- packages/client/test/rpc/rpc.spec.ts | 384 +++--- .../client/test/rpc/txpool/content.spec.ts | 135 ++- packages/client/test/rpc/util.ts | 6 +- .../test/rpc/util/CLConnectionManager.spec.ts | 28 +- packages/client/test/rpc/validation.spec.ts | 1030 +++++++++-------- .../test/rpc/web3/clientVersion.spec.ts | 52 +- packages/client/test/rpc/web3/sha3.spec.ts | 82 +- packages/client/test/rpc/websocket.spec.ts | 131 +-- .../test/service/fullethereumservice.spec.ts | 363 +++--- .../test/service/lightethereumservice.spec.ts | 97 +- packages/client/test/sim/4844devnet5.spec.ts | 29 +- packages/client/test/sim/eof.spec.ts | 59 +- packages/client/test/sim/mainnet.spec.ts | 31 +- packages/client/test/sim/sharding.spec.ts | 38 +- packages/client/test/sim/snapsync.spec.ts | 113 +- packages/client/test/sync/beaconsync.spec.ts | 250 ++-- .../test/sync/fetcher/accountfetcher.spec.ts | 63 +- .../test/sync/fetcher/blockfetcher.spec.ts | 81 +- .../test/sync/fetcher/bytecodefetcher.spec.ts | 68 +- .../client/test/sync/fetcher/fetcher.spec.ts | 51 +- .../test/sync/fetcher/headerfetcher.spec.ts | 45 +- .../sync/fetcher/reverseblockfetcher.spec.ts | 97 +- .../test/sync/fetcher/storagefetcher.spec.ts | 80 +- packages/client/test/sync/fullsync.spec.ts | 60 +- packages/client/test/sync/lightsync.spec.ts | 54 +- packages/client/test/sync/skeleton.spec.ts | 733 ++++++------ packages/client/test/sync/snapsync.spec.ts | 20 +- packages/client/test/sync/sync.spec.ts | 17 +- packages/client/test/sync/txpool.spec.ts | 351 +++--- packages/client/test/util/parse.spec.ts | 30 +- packages/client/test/util/rpc.spec.ts | 15 +- packages/client/vitest.config.unit.ts | 9 + 118 files changed, 7436 insertions(+), 7357 deletions(-) rename packages/client/{test/cli => libp2pBrowserBuild}/cli-libp2p.spec.ts (80%) rename packages/client/libp2pBrowserBuild/net/{libp2psender.ts => libp2psender.ts.old} (100%) delete mode 100644 packages/client/libp2pBrowserBuild/net/test/libp2pnode.spec.ts create mode 100644 packages/client/libp2pBrowserBuild/net/test/libp2pnode.spec.ts.old rename packages/client/libp2pBrowserBuild/net/test/{libp2ppeer.spec.ts => libp2ppeer.spec.ts.old} (75%) rename packages/client/libp2pBrowserBuild/net/test/{libp2psender.spec.ts => libp2psender.spec.ts.old} (59%) rename packages/client/libp2pBrowserBuild/net/test/{libp2pserver.spec.ts => libp2pserver.spec.ts.old} (74%) delete mode 100644 packages/client/test/integration/lightethereumservice.spec.ts create mode 100644 packages/client/vitest.config.unit.ts diff --git a/.github/workflows/client-build.yml b/.github/workflows/client-build.yml index 38a70f84d2..bc9765c1ae 100644 --- a/.github/workflows/client-build.yml +++ b/.github/workflows/client-build.yml @@ -95,4 +95,4 @@ jobs: with: timeout_minutes: 10 max_attempts: 3 - command: cd ${{github.workspace}}/packages/client && npm run tape -- 'test/integration/cli.spec.ts' + command: cd ${{github.workspace}}/packages/client && npx vitest run test/integration/cli.spec.ts diff --git a/config/eslint.cjs b/config/eslint.cjs index 5999d762a4..1462faabaf 100644 --- a/config/eslint.cjs +++ b/config/eslint.cjs @@ -28,6 +28,7 @@ module.exports = { 'webpack.config.js', 'vitest.config.ts', 'vitest.config.browser.ts', + 'vitest.config.unit.ts' ], extends: [ 'typestrict', diff --git a/packages/client/test/cli/cli-libp2p.spec.ts b/packages/client/libp2pBrowserBuild/cli-libp2p.spec.ts similarity index 80% rename from packages/client/test/cli/cli-libp2p.spec.ts rename to packages/client/libp2pBrowserBuild/cli-libp2p.spec.ts index a7e0f2d2f2..57ca7d08e1 100644 --- a/packages/client/test/cli/cli-libp2p.spec.ts +++ b/packages/client/libp2pBrowserBuild/cli-libp2p.spec.ts @@ -1,20 +1,19 @@ import { spawn } from 'child_process' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import type { ChildProcessWithoutNullStreams } from 'child_process' -const end = (child: ChildProcessWithoutNullStreams, hasEnded: boolean, st: tape.Test) => { +const end = (child: ChildProcessWithoutNullStreams, hasEnded: boolean) => { if (hasEnded) return hasEnded = true child.stdout.removeAllListeners() child.stderr.removeAllListeners() const res = child.kill('SIGINT') - st.ok(res, 'client shut down successfully') - st.end() + assert.ok(res, 'client shut down successfully') } -tape('[CLI] rpc', (t) => { - t.test('libp2p should start up', (st) => { +describe('[CLI] rpc', () => { + it('libp2p should start up', () => { const file = require.resolve('../../dist/bin/cli.js') const child = spawn(process.execPath, [ file, @@ -32,7 +31,7 @@ tape('[CLI] rpc', (t) => { const message: string = data.toString() if (message.includes('transport=libp2p')) { - st.pass('libp2p server started') + assert.ok(true, 'libp2p server started') const bootnodeAddressArray = message.split(' ') const bootnodeAddressIndex = bootnodeAddressArray.findIndex((chunk: string) => chunk.startsWith('url=') @@ -54,10 +53,10 @@ tape('[CLI] rpc', (t) => { child2.stdout.on('data', async (data) => { const message: string = data.toString() if (message.includes('Peer added')) { - st.pass('connected to peer over libp2p') + assert.ok(true, 'connected to peer over libp2p') child2.kill('SIGINT') child2.stdout.removeAllListeners() - end(child, false, st) + end(child, false) } }) } @@ -65,14 +64,14 @@ tape('[CLI] rpc', (t) => { child.stderr.on('data', (data) => { const message: string = data.toString() - st.fail(`stderr: ${message}`) - end(child, hasEnded, st) + assert.fail(`stderr: ${message}`) + end(child, hasEnded) }) child.on('close', (code) => { if (typeof code === 'number' && code > 0) { - st.fail(`child process exited with code ${code}`) - end(child, hasEnded, st) + assert.fail(`child process exited with code ${code}`) + end(child, hasEnded) } }) }) diff --git a/packages/client/libp2pBrowserBuild/net/libp2psender.ts b/packages/client/libp2pBrowserBuild/net/libp2psender.ts.old similarity index 100% rename from packages/client/libp2pBrowserBuild/net/libp2psender.ts rename to packages/client/libp2pBrowserBuild/net/libp2psender.ts.old diff --git a/packages/client/libp2pBrowserBuild/net/test/libp2pnode.spec.ts b/packages/client/libp2pBrowserBuild/net/test/libp2pnode.spec.ts deleted file mode 100644 index 2eed86d5fa..0000000000 --- a/packages/client/libp2pBrowserBuild/net/test/libp2pnode.spec.ts +++ /dev/null @@ -1,19 +0,0 @@ -import * as tape from 'tape' -import * as td from 'testdouble' - -tape('[Libp2pNode]', async (t) => { - td.replace('libp2p') - const { Libp2pNode } = await import('../peer/libp2pnode') - - t.test('should be a libp2p bundle', (t) => { - const peerId = td.object('PeerId') as any - const node = new Libp2pNode({ peerId }) - t.equals(node.constructor.name, Libp2pNode.name, 'is libp2p bundle') - t.end() - }) - - t.test('should reset td', (t) => { - td.reset() - t.end() - }) -}) diff --git a/packages/client/libp2pBrowserBuild/net/test/libp2pnode.spec.ts.old b/packages/client/libp2pBrowserBuild/net/test/libp2pnode.spec.ts.old new file mode 100644 index 0000000000..032e48c3cc --- /dev/null +++ b/packages/client/libp2pBrowserBuild/net/test/libp2pnode.spec.ts.old @@ -0,0 +1,17 @@ +import { assert, describe, it } from 'vitest' +import * as td from 'testdouble' + +describe('[Libp2pNode]', async () => { + td.replace('libp2p') + const { Libp2pNode } = await import('../peer/libp2pnode') + + it('should be a libp2p bundle', () => { + const peerId = td.object('PeerId') as any + const node = new Libp2pNode({ peerId }) + assert.equal(node.constructor.name, Libp2pNode.name, 'is libp2p bundle') + }) + + it('should reset td', () => { + td.reset() + }) +}) diff --git a/packages/client/libp2pBrowserBuild/net/test/libp2ppeer.spec.ts b/packages/client/libp2pBrowserBuild/net/test/libp2ppeer.spec.ts.old similarity index 75% rename from packages/client/libp2pBrowserBuild/net/test/libp2ppeer.spec.ts rename to packages/client/libp2pBrowserBuild/net/test/libp2ppeer.spec.ts.old index dd7b33d3fa..9105b8be8a 100644 --- a/packages/client/libp2pBrowserBuild/net/test/libp2ppeer.spec.ts +++ b/packages/client/libp2pBrowserBuild/net/test/libp2ppeer.spec.ts.old @@ -1,5 +1,5 @@ import { multiaddr } from 'multiaddr' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import * as td from 'testdouble' import { Config } from '../../../src/config' @@ -8,7 +8,7 @@ import { Event } from '../../../src/types' import type { Libp2pPeer } from '../../../src/net/peer' import type { Protocol } from '../../../src/net/protocol' -tape('[Libp2pPeer]', async (t) => { +describe('[Libp2pPeer]', async () => { td.replace('peer-id') const Libp2pNode = td.constructor(['start', 'stop', 'dial', 'dialProtocol'] as any) @@ -20,39 +20,40 @@ tape('[Libp2pPeer]', async (t) => { const { Libp2pPeer } = await import('../peer/libp2ppeer') - t.test('should initialize correctly', async (t) => { + it('should initialize correctly', async () => { const config = new Config() const multiaddrs = [ multiaddr('/ip4/192.0.2.1/tcp/12345'), multiaddr('/ip4/192.0.2.1/tcp/23456'), ] const peer = new Libp2pPeer({ config, multiaddrs }) - t.equals(peer.address, '/ip4/192.0.2.1/tcp/12345,/ip4/192.0.2.1/tcp/23456', 'address correct') - t.end() + assert.equal( + peer.address, + '/ip4/192.0.2.1/tcp/12345,/ip4/192.0.2.1/tcp/23456', + 'address correct' + ) }) - t.test('should connect to peer', async (t) => { + it('should connect to peer', async () => { const config = new Config() const peer = new Libp2pPeer({ config }) config.events.on(Event.PEER_CONNECTED, (peer) => { - t.equals((peer as Libp2pPeer).address, '/ip4/0.0.0.0/tcp/0', 'connected') - t.end() + assert.equal((peer as Libp2pPeer).address, '/ip4/0.0.0.0/tcp/0', 'connected') }) await peer.connect() }) - t.test('should accept peer connection', async (t) => { + it('should accept peer connection', async () => { const config = new Config() const peer: any = new Libp2pPeer({ config }) peer.bindProtocol = td.func() td.when(peer.bindProtocol('proto' as any, 'conn' as any)).thenResolve(null) await peer.accept('proto', 'conn', 'server') - t.equals(peer.server, 'server', 'server set') - t.ok(peer.inbound, 'inbound set to true') - t.end() + assert.equal(peer.server, 'server', 'server set') + assert.ok(peer.inbound, 'inbound set to true') }) - t.test('should bind protocols', async (t) => { + it('should bind protocols', async () => { const config = new Config() const protocol = { name: 'proto', versions: [1], open: () => {} } as Protocol const badProto = { name: 'bad', versions: [1], open: () => {} } as Protocol @@ -66,13 +67,11 @@ tape('[Libp2pPeer]', async (t) => { td.when(node.dialProtocol(td.matchers.anything(), '/proto/1')).thenResolve(null) td.when(node.dialProtocol(td.matchers.anything(), '/bad/1')).thenReject(new Error('bad')) await peer.bindProtocols(node, td.matchers.anything(), 'server') - t.equals(peer.server, 'server', 'server set') - t.ok((peer as any).connected, 'connected set to true') - t.end() + assert.equal(peer.server, 'server', 'server set') + assert.ok((peer as any).connected, 'connected set to true') }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/libp2pBrowserBuild/net/test/libp2psender.spec.ts b/packages/client/libp2pBrowserBuild/net/test/libp2psender.spec.ts.old similarity index 59% rename from packages/client/libp2pBrowserBuild/net/test/libp2psender.spec.ts rename to packages/client/libp2pBrowserBuild/net/test/libp2psender.spec.ts.old index 8e61eb4b61..020f6d2a1c 100644 --- a/packages/client/libp2pBrowserBuild/net/test/libp2psender.spec.ts +++ b/packages/client/libp2pBrowserBuild/net/test/libp2psender.spec.ts.old @@ -5,32 +5,30 @@ import { Libp2pSender } from '../../../src/net/protocol' const DuplexPair = require('it-pair/duplex') -tape('[Libp2pSender]', (t) => { - t.test('should send/receive status', (t) => { +describe('[Libp2pSender]', () => { + it('should send/receive status', () => { const conn = DuplexPair() const sender = new Libp2pSender(conn[0]) const receiver = new Libp2pSender(conn[1]) receiver.on('status', (status: any) => { - t.equal(bytesToHex(status.id), '05', 'status received') - t.equal(bytesToHex(receiver.status.id), '05', 'status getter') - t.end() + assert.equal(bytesToHex(status.id), '05', 'status received') + assert.equal(bytesToHex(receiver.status.id), '05', 'status getter') }) sender.sendStatus({ id: hexToBytes('0x05') }) }) - t.test('should send/receive message', (t) => { + it('should send/receive message', () => { const conn = DuplexPair() const sender = new Libp2pSender(conn[0]) const receiver = new Libp2pSender(conn[1]) receiver.on('message', (message: any) => { - t.equal(message.code, 1, 'message received (code)') - t.equal(bytesToHex(message.payload), '05', 'message received (payload)') - t.end() + assert.equal(message.code, 1, 'message received (code)') + assert.equal(bytesToHex(message.payload), '05', 'message received (payload)') }) sender.sendMessage(1, hexToBytes('0x05')) }) - t.test('should catch errors', (t) => { + it('should catch errors', () => { const [conn] = DuplexPair() const err0 = { ...conn, @@ -38,7 +36,6 @@ tape('[Libp2pSender]', (t) => { throw new Error('err0') }, } as any - t.throws(() => new Libp2pSender(err0), /err0/, 'catch error') - t.end() + assert.throws(() => new Libp2pSender(err0), /err0/, 'catch error') }) }) diff --git a/packages/client/libp2pBrowserBuild/net/test/libp2pserver.spec.ts b/packages/client/libp2pBrowserBuild/net/test/libp2pserver.spec.ts.old similarity index 74% rename from packages/client/libp2pBrowserBuild/net/test/libp2pserver.spec.ts rename to packages/client/libp2pBrowserBuild/net/test/libp2pserver.spec.ts.old index da23eb3aa4..71a0493084 100644 --- a/packages/client/libp2pBrowserBuild/net/test/libp2pserver.spec.ts +++ b/packages/client/libp2pBrowserBuild/net/test/libp2pserver.spec.ts.old @@ -1,7 +1,7 @@ import { bytesToUtf8, utf8ToBytes } from '@ethereumjs/util' import { EventEmitter } from 'events' import { multiaddr } from 'multiaddr' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import * as td from 'testdouble' import { Config } from '../../../src/config' @@ -9,7 +9,7 @@ import { getLogger } from '../../../src/logging' import { Event } from '../../../src/types' import { wait } from '../../../test/integration/util' -tape('[Libp2pServer]', async (t) => { +describe('[Libp2pServer]', async () => { const Libp2pPeer = td.replace('../../../src/net/peer/libp2ppeer') Libp2pPeer.id = 'id0' @@ -41,7 +41,7 @@ tape('[Libp2pServer]', async (t) => { const { Libp2pServer } = await import('../server/libp2pserver') - t.test('should initialize correctly', async (t) => { + it('should initialize correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const multiaddrs = [ multiaddr('/ip4/192.0.2.1/tcp/12345'), @@ -53,32 +53,30 @@ tape('[Libp2pServer]', async (t) => { bootnodes: ['0.0.0.0:3030', '1.1.1.1:3031'], key: utf8ToBytes('abcd'), }) - t.deepEquals((server as any).multiaddrs, multiaddrs, 'multiaddrs correct') - t.deepEquals( + assert.deepEqual((server as any).multiaddrs, multiaddrs, 'multiaddrs correct') + assert.deepEqual( server.bootnodes, [multiaddr('/ip4/0.0.0.0/tcp/3030'), multiaddr('/ip4/1.1.1.1/tcp/3031')], 'bootnodes split' ) - t.equals(bytesToUtf8(server.key!), 'abcd', 'key is correct') - t.equals(server.name, 'libp2p', 'get name') - t.equals( + assert.equal(bytesToUtf8(server.key!), 'abcd', 'key is correct') + assert.equal(server.name, 'libp2p', 'get name') + assert.equal( (await server.getPeerId()).toB58String(), '12D3KooWHnPxZvSVGxToTNaK1xd9z3J1TkQM2S2hLeX4bhraGE64', 'computes correct peerId' ) - t.end() }) - t.test('should get peer info', async (t) => { + it('should get peer info', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const server = new Libp2pServer({ config }) const connection = td.object() connection.remotePeer = 'id0' - t.equals(server.getPeerInfo(connection)[0], 'id0', 'got id') - t.end() + assert.equal(server.getPeerInfo(connection)[0], 'id0', 'got id') }) - t.test('should create peer', async (t) => { + it('should create peer', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const multiaddrs = [multiaddr('/ip4/6.6.6.6')] const server = new Libp2pServer({ config, multiaddrs }) @@ -88,13 +86,11 @@ tape('[Libp2pServer]', async (t) => { }, } as any const peer = server.createPeer(peerId, []) - t.equals(peer.constructor.name, 'Libp2pPeer', 'created peer') - t.equals((server as any).peers.get(peer.id), peer, 'has peer') - t.end() + assert.equal(peer.constructor.name, 'Libp2pPeer', 'created peer') + assert.equal((server as any).peers.get(peer.id), peer, 'has peer') }) - t.test('should start/stop server and test banning', async (t) => { - t.plan(12) + it('should start/stop server and test banning', async () => { const config = new Config({ transports: [], logger: getLogger({ loglevel: 'off' }), @@ -139,35 +135,34 @@ tape('[Libp2pServer]', async (t) => { ;(server as any).peers.set('id', peer) server.addProtocols(protos) config.events.on(Event.SERVER_LISTENING, (info) => - t.deepEquals(info, { transport: 'libp2p', url: 'ma0/p2p/id' }, 'listening') + assert.deepEqual(info, { transport: 'libp2p', url: 'ma0/p2p/id' }, 'listening') ) - config.events.once(Event.PEER_CONNECTED, (p) => t.equals(p, peer, 'peer connected')) - config.events.on(Event.SERVER_ERROR, (err) => t.equals(err.message, 'err0', 'got err0')) - t.notOk(server.ban('peer'), 'unbannable') - t.notOk(await server.stop(), 'not started') + config.events.once(Event.PEER_CONNECTED, (p) => assert.equal(p, peer, 'peer connected')) + config.events.on(Event.SERVER_ERROR, (err) => assert.equal(err.message, 'err0', 'got err0')) + assert.notOk(server.ban('peer'), 'unbannable') + assert.notOk(await server.stop(), 'not started') await server.start() ;(server as any).node.emit('error', new Error('err0')) - t.notOk(server.addProtocols([]), 'cannot add protocols after start') + assert.notOk(server.addProtocols([]), 'cannot add protocols after start') server.ban('peer0', 10) - t.ok(server.isBanned('peer0'), 'banned') + assert.ok(server.isBanned('peer0'), 'banned') await wait(100) - t.notOk(server.isBanned('peer0'), 'ban expired') + assert.notOk(server.isBanned('peer0'), 'ban expired') const { node } = server as any - t.equals(node.constructor.name, 'Libp2pNode', 'libp2p node created') + assert.equal(node.constructor.name, 'Libp2pNode', 'libp2p node created') node.emit('peer:discovery', peerId) td.when(peer2.bindProtocols(node, 'id2', server)).thenResolve(null) - server.config.events.once(Event.PEER_CONNECTED, () => t.ok('peer2 connected')) + server.config.events.once(Event.PEER_CONNECTED, () => assert.ok('peer2 connected')) node.emit('peer:discovery', peerId2) td.when(server.getPeerInfo('conn3' as any)).thenReturn([peerId3, 'ma1' as any]) node.connectionManager.emit('peer:connect', 'conn3') td.verify(server.createPeer(peerId3, ['ma1'] as any, td.matchers.anything())) - t.ok((await server.start()) === false, 'server already started') + assert.ok((await server.start()) === false, 'server already started') await server.stop() - t.notOk(server.running, 'stopped') + assert.notOk(server.running, 'stopped') }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/package.json b/packages/client/package.json index 7a45b0e231..1068187210 100644 --- a/packages/client/package.json +++ b/packages/client/package.json @@ -49,11 +49,10 @@ "lint:fix": "../../config/cli/lint-fix.sh", "preinstall": "npm run binWorkaround", "prepublishOnly": "../../config/cli/prepublish.sh", - "tape": "tape -r ts-node/register", "test": "npm run test:unit && npm run test:integration", - "test:cli": "npm run tape -- 'test/cli/*.spec.ts'", - "test:integration": "npm run tape -- 'test/integration/**/*.spec.ts'", - "test:unit": "npm run tape -- 'test/!(integration|cli|sim)/**/*.spec.ts' 'test/*.spec.ts'", + "test:cli": "npx vitest run ./test/cli/*.spec.ts", + "test:integration": "npx vitest run ./test/integration/*.spec.ts", + "test:unit": "npx vitest run test/* -c=./vitest.config.unit.ts", "tsc": "../../config/cli/ts-compile.sh" }, "dependencies": { diff --git a/packages/client/src/rpc/modules/engine.ts b/packages/client/src/rpc/modules/engine.ts index 1ba5c1323a..32c88e3f7a 100644 --- a/packages/client/src/rpc/modules/engine.ts +++ b/packages/client/src/rpc/modules/engine.ts @@ -931,7 +931,7 @@ export class Engine { payloadStatus: { status: Status.INVALID, validationError: null, - latestValidHash: bytesToUnprefixedHex(zeros(32)), + latestValidHash: bytesToHex(zeros(32)), }, payloadId: null, } diff --git a/packages/client/src/sync/beaconsync.ts b/packages/client/src/sync/beaconsync.ts index e2a8a4fb3e..bac2a9014b 100644 --- a/packages/client/src/sync/beaconsync.ts +++ b/packages/client/src/sync/beaconsync.ts @@ -108,7 +108,7 @@ export class BeaconSynchronizer extends Synchronizer { * blockchain. Returns null if no valid peer is found */ async best(): Promise { - let best: [Peer, BigInt] | undefined + let best: [Peer, bigint] | undefined const peers = this.pool.peers.filter(this.syncable.bind(this)) if (peers.length < this.config.minPeers && !this.forceSync) return for (const peer of peers) { diff --git a/packages/client/src/util/rpc.ts b/packages/client/src/util/rpc.ts index adbc1d2e94..0ba5c311d3 100644 --- a/packages/client/src/util/rpc.ts +++ b/packages/client/src/util/rpc.ts @@ -1,6 +1,6 @@ import { json as jsonParser } from 'body-parser' -import * as Connect from 'connect' -import * as cors from 'cors' +import Connect from 'connect' +import cors from 'cors' import { createServer } from 'http' import { Server as RPCServer } from 'jayson/promise' import { decode } from 'jwt-simple' @@ -8,10 +8,10 @@ import { inspect } from 'util' import type { Logger } from '../logging' import type { RPCManager } from '../rpc' +import type { IncomingMessage } from 'connect' import type { HttpServer } from 'jayson/promise' import type { TAlgorithm } from 'jwt-simple' -type IncomingMessage = Connect.IncomingMessage const algorithm: TAlgorithm = 'HS256' type CreateRPCServerOpts = { diff --git a/packages/client/test/blockchain/chain.spec.ts b/packages/client/test/blockchain/chain.spec.ts index 66508ddd88..bb6cd12852 100644 --- a/packages/client/test/blockchain/chain.spec.ts +++ b/packages/client/test/blockchain/chain.spec.ts @@ -3,7 +3,7 @@ import { Block } from '@ethereumjs/block' import { Blockchain } from '@ethereumjs/blockchain' import { KeyEncoding, ValueEncoding, bytesToHex, equalsBytes } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Chain } from '../../src/blockchain' import { Config } from '../../src/config' @@ -13,8 +13,8 @@ import type { BlockData, HeaderData } from '@ethereumjs/block' const config = new Config({ accountCache: 10000, storageCache: 1000 }) -tape('[Chain]', (t) => { - t.test('should test blockchain DB is initialized', async (t) => { +describe('[Chain]', () => { + it('should test blockchain DB is initialized', async () => { const chain = await Chain.create({ config }) const db = chain.chainDB as LevelDB @@ -29,27 +29,28 @@ tape('[Chain]', (t) => { keyEncoding: KeyEncoding.String, valueEncoding: ValueEncoding.String, }) - t.equal(value, testValue, 'read value matches written value') - t.end() + assert.equal(value, testValue, 'read value matches written value') }) - t.test('should retrieve chain properties', async (t) => { + it('should retrieve chain properties', async () => { const chain = await Chain.create({ config }) await chain.open() - t.equal(chain.networkId, BigInt(1), 'get chain.networkId') - t.equal(chain.blocks.td.toString(10), '17179869184', 'get chain.blocks.td') - t.equal(chain.blocks.height.toString(10), '0', 'get chain.blocks.height') - t.equal( + assert.equal(chain.networkId, BigInt(1), 'get chain.networkId') + assert.equal(chain.blocks.td.toString(10), '17179869184', 'get chain.blocks.td') + assert.equal(chain.blocks.height.toString(10), '0', 'get chain.blocks.height') + assert.equal( bytesToHex(chain.genesis.hash()), '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', 'get chain.genesis' ) - t.ok(equalsBytes(chain.genesis.hash(), chain.blocks.latest!.hash()), 'get chain.block.latest') + assert.ok( + equalsBytes(chain.genesis.hash(), chain.blocks.latest!.hash()), + 'get chain.block.latest' + ) await chain.close() - t.end() }) - t.test('should detect unopened chain', async (t) => { + it('should detect unopened chain', async () => { const blockchain = await Blockchain.create({ validateBlocks: false, validateConsensus: false, @@ -64,65 +65,64 @@ tape('[Chain]', (t) => { common: config.chainCommon, }) - t.equal(await chain.update(), false, 'skip update if not opened') - t.equal(await chain.close(), false, 'skip close if not opened') - t.notOk(chain.opened, 'chain should be closed') - t.notOk(chain.blocks.height, 'chain should be empty if not opened') + assert.equal(await chain.update(), false, 'skip update if not opened') + assert.equal(await chain.close(), false, 'skip close if not opened') + assert.notOk(chain.opened, 'chain should be closed') + assert.notOk(chain.blocks.height, 'chain should be empty if not opened') try { await chain.putHeaders([block.header]) - t.fail('should error if chain is closed') + assert.fail('should error if chain is closed') } catch (error) { - t.pass('threw an error when chain is closed') + assert.ok(true, 'threw an error when chain is closed') } await chain.close() try { await chain.putBlocks([block]) - t.fail('should error if chain is closed') + assert.fail('should error if chain is closed') } catch (error) { - t.pass('threw an error when chain is closed') + assert.ok(true, 'threw an error when chain is closed') } await chain.close() - t.notOk(chain.opened, 'chain should close') + assert.notOk(chain.opened, 'chain should close') try { await chain.getBlocks(block.hash()) - t.fail('should error if chain is closed') + assert.fail('should error if chain is closed') } catch (error) { - t.pass('threw an error when chain is closed') + assert.ok(true, 'threw an error when chain is closed') } await chain.close() try { await chain.getBlock(block.hash()) - t.fail('should error if chain is closed') + assert.fail('should error if chain is closed') } catch (error) { - t.pass('threw an error when chain is closed') + assert.ok(true, 'threw an error when chain is closed') } try { await chain.getCanonicalHeadHeader() - t.fail('should error if chain is closed') + assert.fail('should error if chain is closed') } catch (error) { - t.pass('threw an error when chain is closed') + assert.ok(true, 'threw an error when chain is closed') } await chain.close() try { await chain.getCanonicalHeadBlock() - t.fail('should error if chain is closed') + assert.fail('should error if chain is closed') } catch (error) { - t.pass('threw an error when chain is closed') + assert.ok(true, 'threw an error when chain is closed') } await chain.close() try { await chain.getTd(block.hash(), block.header.number) - t.fail('should error if chain is closed') + assert.fail('should error if chain is closed') } catch (error) { - t.pass('threw an error when chain is closed') + assert.ok(true, 'threw an error when chain is closed') } await chain.open() - t.equal(await chain.open(), false, 'skip open if already opened') + assert.equal(await chain.open(), false, 'skip open if already opened') await chain.close() - t.end() }) - t.test('should add block to chain', async (t) => { + it('should add block to chain', async () => { // TODO: add test cases with activated block validation const blockchain = await Blockchain.create({ validateBlocks: false, @@ -139,9 +139,8 @@ tape('[Chain]', (t) => { common: config.chainCommon, }) await chain.putBlocks([block]) - t.equal(chain.blocks.td.toString(16), '4abcdffff', 'get chain.td') - t.equal(chain.blocks.height.toString(10), '1', 'get chain.height') + assert.equal(chain.blocks.td.toString(16), '4abcdffff', 'get chain.td') + assert.equal(chain.blocks.height.toString(10), '1', 'get chain.height') await chain.close() - t.end() }) }) diff --git a/packages/client/test/cli/cli-rpc.spec.ts b/packages/client/test/cli/cli-rpc.spec.ts index b4771b461c..9858c5eea2 100644 --- a/packages/client/test/cli/cli-rpc.spec.ts +++ b/packages/client/test/cli/cli-rpc.spec.ts @@ -1,23 +1,22 @@ import { spawn } from 'child_process' import { Client } from 'jayson/promise' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import type { ChildProcessWithoutNullStreams } from 'child_process' const cliArgs = ['--rpc', '--ws', '--dev', '--transports=rlpx'] -const end = (child: ChildProcessWithoutNullStreams, hasEnded: boolean, st: tape.Test) => { +const end = (child: ChildProcessWithoutNullStreams, hasEnded: boolean) => { if (hasEnded) return hasEnded = true child.stdout.removeAllListeners() child.stderr.removeAllListeners() const res = child.kill('SIGINT') - st.ok(res, 'client shut down successfully') - st.end() + assert.ok(res, 'client shut down successfully') } -tape('[CLI] rpc', (t) => { - t.test('should return valid responses from http and ws endpoints', (st) => { +describe('[CLI] rpc', () => { + it('should return valid responses from http and ws endpoints', () => { const file = require.resolve('../../dist/bin/cli.js') const child = spawn(process.execPath, [file, ...cliArgs]) const hasEnded = false @@ -28,7 +27,7 @@ tape('[CLI] rpc', (t) => { // if http endpoint startup message detected, call http endpoint with RPC method const client = Client.http({ port: 8545 }) const res = await client.request('web3_clientVersion', [], 2.0) - st.ok(res.result.includes('EthereumJS'), 'read from HTTP RPC') + assert.ok(res.result.includes('EthereumJS'), 'read from HTTP RPC') } if (message.includes('ws://')) { @@ -36,28 +35,28 @@ tape('[CLI] rpc', (t) => { const client = Client.websocket({ url: 'ws://localhost:8545' }) ;(client as any).ws.on('open', async function () { const res = await client.request('web3_clientVersion', [], 2.0) - st.ok(res.result.includes('EthereumJS'), 'read from WS RPC') + assert.ok(res.result.includes('EthereumJS'), 'read from WS RPC') ;(client as any).ws.close() - end(child, hasEnded, st) + end(child, hasEnded) }) } }) child.stderr.on('data', (data) => { const message: string = data.toString() - st.fail(`stderr: ${message}`) - end(child, hasEnded, st) + assert.fail(`stderr: ${message}`) + end(child, hasEnded) }) child.on('close', (code) => { if (typeof code === 'number' && code > 0) { - st.fail(`child process exited with code ${code}`) - end(child, hasEnded, st) + assert.fail(`child process exited with code ${code}`) + end(child, hasEnded) } }) }) - t.test('http and ws endpoints should not start when cli args omitted', (st) => { + it('http and ws endpoints should not start when cli args omitted', () => { const file = require.resolve('../../dist/bin/cli.js') const rpcDisabledArgs = cliArgs.filter((arg) => !['--rpc', '--ws'].includes(arg)) const child = spawn(process.execPath, [file, ...rpcDisabledArgs]) @@ -66,27 +65,27 @@ tape('[CLI] rpc', (t) => { child.stdout.on('data', async (data) => { const message: string = data.toString() if (message.includes('address=http://')) { - st.fail('http endpoint should not be enabled') + assert.fail('http endpoint should not be enabled') } if (message.includes('address=ws://')) { - st.fail('ws endpoint should not be enabled') + assert.fail('ws endpoint should not be enabled') } if (message.includes('Miner: Assembling block')) { - st.pass('miner started and no rpc endpoints started') - end(child, hasEnded, st) + assert.ok('miner started and no rpc endpoints started') + end(child, hasEnded) } }) child.stderr.on('data', (data) => { const message: string = data.toString() - st.fail(`stderr: ${message}`) - end(child, hasEnded, st) + assert.fail(`stderr: ${message}`) + end(child, hasEnded) }) child.on('close', (code) => { if (typeof code === 'number' && code > 0) { - st.fail(`child process exited with code ${code}`) - end(child, hasEnded, st) + assert.fail(`child process exited with code ${code}`) + end(child, hasEnded) } }) }) diff --git a/packages/client/test/cli/cli-sync.spec.ts b/packages/client/test/cli/cli-sync.spec.ts index 31ee0ae0db..e7b10662d6 100644 --- a/packages/client/test/cli/cli-sync.spec.ts +++ b/packages/client/test/cli/cli-sync.spec.ts @@ -1,58 +1,61 @@ import { spawn } from 'child_process' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' // get args for --network and --syncmode const cliArgs = process.argv.filter( (arg) => arg.startsWith('--network') || arg.startsWith('--sync') ) -tape('[CLI] sync', (t) => { - t.test('should begin downloading blocks', { timeout: 260000 }, (st) => { - const file = require.resolve('../../dist/bin/cli.js') - const child = spawn(process.execPath, [file, ...cliArgs]) - - let hasEnded = false - const end = () => { - if (hasEnded) return - hasEnded = true - child.stdout.removeAllListeners() - child.stderr.removeAllListeners() - child.kill('SIGINT') - st.end() - } - - child.stdout.on('data', (data) => { - const message: string = data.toString() - - // log message for easier debugging - // eslint-disable-next-line no-console - console.log(message) - - if (message.toLowerCase().includes('error')) { - st.fail(message) - return end() +describe('[CLI] sync', () => { + it( + 'should begin downloading blocks', + () => { + const file = require.resolve('../../dist/bin/cli.js') + const child = spawn(process.execPath, [file, ...cliArgs]) + + let hasEnded = false + const end = () => { + if (hasEnded) return + hasEnded = true + child.stdout.removeAllListeners() + child.stderr.removeAllListeners() + child.kill('SIGINT') } - if (message.includes('Imported')) { - st.pass('successfully imported blocks or headers') - return end() - } - }) - - child.stderr.on('data', (data) => { - const message: string = data.toString() - if (message.includes('Possible EventEmitter memory leak detected')) { - // This is okay. - return - } - st.fail(`stderr: ${message}`) - end() - }) - child.on('close', (code) => { - if (typeof code === 'number' && code > 0) { - st.fail(`child process exited with code ${code}`) + child.stdout.on('data', (data) => { + const message: string = data.toString() + + // log message for easier debugging + // eslint-disable-next-line no-console + console.log(message) + + if (message.toLowerCase().includes('error')) { + assert.fail(message) + return end() + } + if (message.includes('Imported')) { + assert.ok(true, 'successfully imported blocks or headers') + return end() + } + }) + + child.stderr.on('data', (data) => { + const message: string = data.toString() + if (message.includes('Possible EventEmitter memory leak detected')) { + // This is okay. + return + } + assert.fail(`stderr: ${message}`) end() - } - }) - }) + }) + + child.on('close', (code) => { + if (typeof code === 'number' && code > 0) { + assert.fail(`child process exited with code ${code}`) + end() + } + }) + }, + { timeout: 260000 } + ) }) diff --git a/packages/client/test/client.spec.ts b/packages/client/test/client.spec.ts index 30328db128..f596ab0884 100644 --- a/packages/client/test/client.spec.ts +++ b/packages/client/test/client.spec.ts @@ -1,10 +1,12 @@ -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' +import { EthereumClient } from '../src/client' import { Config } from '../src/config' import { PeerPool } from '../src/net/peerpool' +import { RlpxServer } from '../src/net/server' -tape('[EthereumClient]', async (t) => { +describe('[EthereumClient]', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) class FullEthereumService { open() {} @@ -36,40 +38,51 @@ tape('[EthereumClient]', async (t) => { td.when(Server.prototype.stop()).thenResolve() td.when(Server.prototype.bootstrap()).thenResolve() - const { EthereumClient } = await import('../src/client') + // const { EthereumClient } = await import('../src/client') - t.test('should initialize correctly', async (t) => { - const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) - const client = await EthereumClient.create({ config }) - t.ok(client.services[0] instanceof FullEthereumService, 'added service') - t.end() - }) + it( + 'should initialize correctly', + async () => { + const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) + const client = await EthereumClient.create({ config }) + assert.ok('lightserv' in client.services[0], 'added FullEthereumService') + assert.ok('execution' in client.services[0], 'added FullEthereumService') + assert.ok('txPool' in client.services[0], 'added FullEthereumService') + }, + { timeout: 30000 } + ) - t.test('should open', async (t) => { - t.plan(2) - const servers = [new Server()] as any - const config = new Config({ servers, accountCache: 10000, storageCache: 1000 }) - const client = await EthereumClient.create({ config }) + it( + 'should open', + async () => { + const servers = [new RlpxServer({ config: new Config() })] + const config = new Config({ servers, accountCache: 10000, storageCache: 1000 }) + const client = await EthereumClient.create({ config }) - await client.open() - t.ok(client.opened, 'opened') - t.equals(await client.open(), false, 'already opened') - }) + await client.open() + assert.ok(client.opened, 'opened') + assert.equal(await client.open(), false, 'already opened') + }, + { timeout: 15000 } + ) - t.test('should start/stop', async (t) => { - const servers = [new Server()] as any - const config = new Config({ servers, accountCache: 10000, storageCache: 1000 }) - const client = await EthereumClient.create({ config }) - await client.start() - t.ok(client.started, 'started') - t.equals(await client.start(), false, 'already started') - await client.stop() - t.notOk(client.started, 'stopped') - t.equals(await client.stop(), false, 'already stopped') - }) + it( + 'should start/stop', + async () => { + const servers = [new Server()] as any + const config = new Config({ servers, accountCache: 10000, storageCache: 1000 }) + const client = await EthereumClient.create({ config }) + await client.start() + assert.ok(client.started, 'started') + assert.equal(await client.start(), false, 'already started') + await client.stop() + assert.notOk(client.started, 'stopped') + assert.equal(await client.stop(), false, 'already stopped') + }, + { timeout: 10000 } + ) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/config.spec.ts b/packages/client/test/config.spec.ts index 0f56d611a5..60dddddc9a 100644 --- a/packages/client/test/config.spec.ts +++ b/packages/client/test/config.spec.ts @@ -1,86 +1,77 @@ import { Chain, Common } from '@ethereumjs/common' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Config, DataDirectory } from '../src/config' -tape('[Config]', (t) => { - t.test('Initialization with default parameters', (t) => { +describe('[Config]', () => { + it('Initialization with default parameters', () => { const config = new Config() - t.equal(config.maxPeers, 25) - t.end() + assert.equal(config.maxPeers, 25) }) - t.test('Initialization with parameters passed', (t) => { + it('Initialization with parameters passed', () => { const config = new Config({ maxPeers: 10, accountCache: 10000, storageCache: 1000 }) - t.equal(config.maxPeers, 10) - t.end() + assert.equal(config.maxPeers, 10) }) - t.test('Chain data default directory', (t) => { + it('Chain data default directory', () => { const config = new Config() - t.equal(config.getDataDirectory(DataDirectory.Chain), './datadir/mainnet/chain') - t.end() + assert.equal(config.getDataDirectory(DataDirectory.Chain), './datadir/mainnet/chain') }) - t.test('State data default directory', (t) => { + it('State data default directory', () => { const config = new Config() - t.equal(config.getDataDirectory(DataDirectory.State), './datadir/mainnet/state') - t.end() + assert.equal(config.getDataDirectory(DataDirectory.State), './datadir/mainnet/state') }) - t.test('Meta data default directory', (t) => { + it('Meta data default directory', () => { const config = new Config() - t.equal(config.getDataDirectory(DataDirectory.Meta), './datadir/mainnet/meta') - t.end() + assert.equal(config.getDataDirectory(DataDirectory.Meta), './datadir/mainnet/meta') }) - t.test('peer discovery default mainnet setting', (t) => { + it('peer discovery default mainnet setting', () => { const common = new Common({ chain: Chain.Mainnet }) const config = new Config({ common, accountCache: 10000, storageCache: 1000 }) - t.equal(config.discDns, false, 'disables DNS peer discovery for mainnet') - t.equal(config.discV4, true, 'enables DNS peer discovery for mainnet') - t.end() + assert.equal(config.discDns, false, 'disables DNS peer discovery for mainnet') + assert.equal(config.discV4, true, 'enables DNS peer discovery for mainnet') }) - t.test('peer discovery default testnet settings', (t) => { + it('peer discovery default testnet settings', () => { let config for (const chain of [Chain.Goerli, Chain.Sepolia]) { const common = new Common({ chain }) config = new Config({ common }) - t.equal(config.discDns, true, `enables DNS peer discovery for ${chain}`) - t.equal(config.discV4, false, `disables V4 peer discovery for ${chain}`) + assert.equal(config.discDns, true, `enables DNS peer discovery for ${chain}`) + assert.equal(config.discV4, false, `disables V4 peer discovery for ${chain}`) } - t.end() }) - t.test('--discDns=true/false', (t) => { + it('--discDns=true/false', () => { let common, config, chain chain = Chain.Mainnet common = new Common({ chain }) config = new Config({ common, discDns: true }) - t.equal(config.discDns, true, `default discDns setting can be overridden to true`) + assert.equal(config.discDns, true, `default discDns setting can be overridden to true`) chain = Chain.Goerli common = new Common({ chain }) config = new Config({ common, discDns: false }) - t.equal(config.discDns, false, `default discDns setting can be overridden to false`) - t.end() + assert.equal(config.discDns, false, `default discDns setting can be overridden to false`) }) - t.test('--discV4=true/false', (t) => { + it('--discV4=true/false', () => { let common, config, chain chain = Chain.Mainnet common = new Common({ chain }) config = new Config({ common, discV4: false }) - t.equal(config.discDns, false, `default discV4 setting can be overridden to false`) + assert.equal(config.discDns, false, `default discV4 setting can be overridden to false`) chain = Chain.Goerli common = new Common({ chain }) config = new Config({ common, discV4: true }) - t.equal(config.discDns, true, `default discV4 setting can be overridden to true`) - t.end() + assert.equal(config.discDns, true, `default discV4 setting can be overridden to true`) }) }) diff --git a/packages/client/test/execution/vmexecution.spec.ts b/packages/client/test/execution/vmexecution.spec.ts index 4b87eb0455..f145a5d2d9 100644 --- a/packages/client/test/execution/vmexecution.spec.ts +++ b/packages/client/test/execution/vmexecution.spec.ts @@ -3,25 +3,24 @@ import { Blockchain } from '@ethereumjs/blockchain' import { Chain as ChainEnum, Common, Hardfork } from '@ethereumjs/common' import { bytesToHex } from '@ethereumjs/util' import { VM } from '@ethereumjs/vm' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Chain } from '../../src/blockchain' import { Config } from '../../src/config' import { VMExecution } from '../../src/execution' import { closeRPC, setupChain } from '../rpc/helpers' -import * as blocksDataGoerli from '../testdata/blocks/goerli.json' -import * as blocksDataMainnet from '../testdata/blocks/mainnet.json' -import * as testnet from '../testdata/common/testnet.json' -import * as shanghaiJSON from '../testdata/geth-genesis/withdrawals.json' +import blocksDataGoerli from '../testdata/blocks/goerli.json' +import blocksDataMainnet from '../testdata/blocks/mainnet.json' +import testnet from '../testdata/common/testnet.json' +import shanghaiJSON from '../testdata/geth-genesis/withdrawals.json' -tape('[VMExecution]', async (t) => { - t.test('Initialization', async (t) => { +describe('[VMExecution]', async () => { + it('Initialization', async () => { const vm = await VM.create() const config = new Config({ vm, transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const exec = new VMExecution({ config, chain }) - t.equals(exec.vm, vm, 'should use vm provided') - t.end() + assert.equal(exec.vm, vm, 'should use vm provided') }) async function testSetup(blockchain: Blockchain, common?: Common) { @@ -33,76 +32,86 @@ tape('[VMExecution]', async (t) => { return exec } - t.test('Block execution / Hardforks PoW (mainnet)', async (t) => { - let blockchain = await Blockchain.create({ - validateBlocks: true, - validateConsensus: false, - }) - let exec = await testSetup(blockchain) - const oldHead = await exec.vm.blockchain.getIteratorHead!() - await exec.run() - let newHead = await exec.vm.blockchain.getIteratorHead!() - t.deepEqual(newHead.hash(), oldHead.hash(), 'should not modify blockchain on empty run') - - blockchain = await Blockchain.fromBlocksData(blocksDataMainnet, { - validateBlocks: true, - validateConsensus: false, - }) - exec = await testSetup(blockchain) - await exec.run() - newHead = await exec.vm.blockchain.getIteratorHead!() - t.equals(newHead.header.number, BigInt(5), 'should run all blocks') - - const common = new Common({ chain: 'testnet', customChains: [testnet] }) - exec = await testSetup(blockchain, common) - await exec.run() - t.equal(exec.hardfork, 'byzantium', 'should update HF on block run') - - t.end() - }) - - t.test('Test block execution using executeBlocks function', async (t) => { - let blockchain = await Blockchain.create({ - validateBlocks: true, - validateConsensus: false, - }) - let exec = await testSetup(blockchain) - - blockchain = await Blockchain.fromBlocksData(blocksDataMainnet, { - validateBlocks: true, - validateConsensus: false, - }) - exec = await testSetup(blockchain) - await exec.run() - - t.doesNotThrow(async () => exec.executeBlocks(1, 5, []), 'blocks should execute without error') - - t.end() - }) - - t.test('Should fail opening if vmPromise already assigned', async (t) => { - const blockchain = await Blockchain.create({ - validateBlocks: true, - validateConsensus: false, - }) - const exec = await testSetup(blockchain) - t.equal(exec.started, true, 'execution should be opened') - await exec.stop() - t.equal(exec.started, false, 'execution should be stopped') - exec['vmPromise'] = (async () => 0)() - await exec.open() - t.equal(exec.started, false, 'execution should be stopped') - exec['vmPromise'] = undefined - await exec.open() - t.equal(exec.started, true, 'execution should be restarted') - exec['vmPromise'] = (async () => 0)() - await exec.stop() - t.equal(exec.started, false, 'execution should be restopped') - t.equal(exec['vmPromise'], undefined, 'vmPromise should be reset') - t.end() - }) + it( + 'Block execution / Hardforks PoW (mainnet)', + async () => { + let blockchain = await Blockchain.create({ + validateBlocks: true, + validateConsensus: false, + }) + let exec = await testSetup(blockchain) + const oldHead = await exec.vm.blockchain.getIteratorHead!() + await exec.run() + let newHead = await exec.vm.blockchain.getIteratorHead!() + assert.deepEqual(newHead.hash(), oldHead.hash(), 'should not modify blockchain on empty run') + + blockchain = await Blockchain.fromBlocksData(blocksDataMainnet, { + validateBlocks: true, + validateConsensus: false, + }) + exec = await testSetup(blockchain) + await exec.run() + newHead = await exec.vm.blockchain.getIteratorHead!() + assert.equal(newHead.header.number, BigInt(5), 'should run all blocks') + + const common = new Common({ chain: 'testnet', customChains: [testnet] }) + exec = await testSetup(blockchain, common) + await exec.run() + assert.equal(exec.hardfork, 'byzantium', 'should update HF on block run') + }, + { timeout: 60000 } + ) + + it( + 'Test block execution using executeBlocks function', + async () => { + let blockchain = await Blockchain.create({ + validateBlocks: true, + validateConsensus: false, + }) + let exec = await testSetup(blockchain) + + blockchain = await Blockchain.fromBlocksData(blocksDataMainnet, { + validateBlocks: true, + validateConsensus: false, + }) + exec = await testSetup(blockchain) + await exec.run() + + assert.doesNotThrow( + async () => exec.executeBlocks(1, 5, []), + 'blocks should execute without error' + ) + }, + { timeout: 60000 } + ) + + it( + 'Should fail opening if vmPromise already assigned', + async () => { + const blockchain = await Blockchain.create({ + validateBlocks: true, + validateConsensus: false, + }) + const exec = await testSetup(blockchain) + assert.equal(exec.started, true, 'execution should be opened') + await exec.stop() + assert.equal(exec.started, false, 'execution should be stopped') + exec['vmPromise'] = (async () => 0)() + await exec.open() + assert.equal(exec.started, false, 'execution should be stopped') + exec['vmPromise'] = undefined + await exec.open() + assert.equal(exec.started, true, 'execution should be restarted') + exec['vmPromise'] = (async () => 0)() + await exec.stop() + assert.equal(exec.started, false, 'execution should be restopped') + assert.equal(exec['vmPromise'], undefined, 'vmPromise should be reset') + }, + { timeout: 30000 } + ) - t.test('Block execution / Hardforks PoA (goerli)', async (t) => { + it('Block execution / Hardforks PoA (goerli)', async () => { const common = new Common({ chain: ChainEnum.Goerli, hardfork: Hardfork.Chainstart }) let blockchain = await Blockchain.create({ validateBlocks: true, @@ -113,7 +122,7 @@ tape('[VMExecution]', async (t) => { const oldHead = await exec.vm.blockchain.getIteratorHead!() await exec.run() let newHead = await exec.vm.blockchain.getIteratorHead!() - t.deepEqual(newHead.hash(), oldHead.hash(), 'should not modify blockchain on empty run') + assert.deepEqual(newHead.hash(), oldHead.hash(), 'should not modify blockchain on empty run') blockchain = await Blockchain.fromBlocksData(blocksDataGoerli, { validateBlocks: true, @@ -123,12 +132,10 @@ tape('[VMExecution]', async (t) => { exec = await testSetup(blockchain, common) await exec.run() newHead = await exec.vm.blockchain.getIteratorHead!() - t.equals(newHead.header.number, BigInt(7), 'should run all blocks') - - t.end() + assert.equal(newHead.header.number, BigInt(7), 'should run all blocks') }) - t.test('Block execution / Hardforks PoA (goerli)', async (t) => { + it('Block execution / Hardforks PoA (goerli)', async () => { const { server, execution, blockchain } = await setupChain(shanghaiJSON, 'post-merge', { engine: true, }) @@ -144,7 +151,7 @@ tape('[VMExecution]', async (t) => { await execution.run() let newHead = await blockchain.getIteratorHead() - t.equal( + assert.equal( bytesToHex(block.hash()), bytesToHex(newHead.hash()), 'vmHead should be on the latest block' @@ -153,7 +160,7 @@ tape('[VMExecution]', async (t) => { // reset head and run again await blockchain.setIteratorHead('vm', oldHead.hash()) newHead = await blockchain.getIteratorHead() - t.equal( + assert.equal( bytesToHex(oldHead.hash()), bytesToHex(newHead.hash()), 'vmHead should be on the latest block' @@ -161,14 +168,13 @@ tape('[VMExecution]', async (t) => { await execution.run() newHead = await blockchain.getIteratorHead() - t.equal( + assert.equal( bytesToHex(block.hash()), bytesToHex(newHead.hash()), 'vmHead should be on the latest block' ) closeRPC(server) - t.end() }) }) diff --git a/packages/client/test/integration/beaconsync.spec.ts b/packages/client/test/integration/beaconsync.spec.ts index a7a5a6994a..c0547c95dc 100644 --- a/packages/client/test/integration/beaconsync.spec.ts +++ b/packages/client/test/integration/beaconsync.spec.ts @@ -1,21 +1,21 @@ import { BlockHeader } from '@ethereumjs/block' import { Common } from '@ethereumjs/common' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Event } from '../../src/types' -import * as genesisJSON from '../testdata/geth-genesis/post-merge.json' +import genesisJSON from '../testdata/geth-genesis/post-merge.json' import { destroy, setup, wait } from './util' const originalValidate = (BlockHeader as any).prototype._consensusFormatValidation -tape('[Integration:BeaconSync]', async (t) => { +describe('[Integration:BeaconSync]', () => { const common = Common.fromGethGenesis(genesisJSON, { chain: 'post-merge' }) common.setHardforkBy({ blockNumber: BigInt(0), td: BigInt(0) }) - t.test('should sync blocks', async (t) => { - ;(BlockHeader as any).prototype._consensusFormatValidation = td.func() + it('should sync blocks', async () => { + BlockHeader.prototype['_consensusFormatValidation'] = td.func() td.replace('@ethereumjs/block', { BlockHeader }) const [remoteServer, remoteService] = await setup({ location: '127.0.0.2', height: 20, common }) @@ -31,27 +31,27 @@ tape('[Integration:BeaconSync]', async (t) => { await localService.synchronizer!.stop() await localServer.discover('remotePeer1', '127.0.0.2') localService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { - t.equals(localService.chain.blocks.height, BigInt(20), 'synced') + assert.equal(localService.chain.blocks.height, BigInt(20), 'synced') await destroy(localServer, localService) await destroy(remoteServer, remoteService) }) await localService.synchronizer!.start() }) - t.test('should not sync with stale peers', async (t) => { + it('should not sync with stale peers', async () => { const [remoteServer, remoteService] = await setup({ location: '127.0.0.2', height: 9, common }) const [localServer, localService] = await setup({ location: '127.0.0.1', height: 10, common }) localService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { - t.fail('synced with a stale peer') + assert.fail('synced with a stale peer') }) await localServer.discover('remotePeer', '127.0.0.2') await wait(300) await destroy(localServer, localService) await destroy(remoteServer, remoteService) - t.pass('did not sync') + assert.ok(true, 'did not sync') }) - t.test('should sync with best peer', async (t) => { + it('should sync with best peer', async () => { const [remoteServer1, remoteService1] = await setup({ location: '127.0.0.2', height: 7, @@ -82,7 +82,7 @@ tape('[Integration:BeaconSync]', async (t) => { localService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { if (localService.chain.blocks.height === BigInt(10)) { - t.pass('synced with best peer') + assert.ok(true, 'synced with best peer') await destroy(localServer, localService) await destroy(remoteServer1, remoteService1) await destroy(remoteServer2, remoteService2) @@ -90,10 +90,9 @@ tape('[Integration:BeaconSync]', async (t) => { }) await localService.synchronizer!.start() }) -}) +}, 30000) -tape('reset TD', (t) => { - ;(BlockHeader as any).prototype._consensusFormatValidation = originalValidate +it('reset TD', () => { + BlockHeader.prototype['_consensusFormatValidation'] = originalValidate td.reset() - t.end() }) diff --git a/packages/client/test/integration/cli.spec.ts b/packages/client/test/integration/cli.spec.ts index 0e11cd5ee9..df48c1216d 100644 --- a/packages/client/test/integration/cli.spec.ts +++ b/packages/client/test/integration/cli.spec.ts @@ -1,15 +1,14 @@ import { spawn } from 'child_process' -import * as tape from 'tape' - -tape('[CLI]', (t) => { - t.test('should start up client and execute blocks blocks', { timeout: 300000 }, (t) => { +import { assert, describe, it } from 'vitest' +describe('[CLI]', () => { + it('should start up client and execute blocks blocks', () => { const file = require.resolve('../../dist/bin/cli.js') const child = spawn(process.execPath, [file, '--dev=poa']) let hasEnded = false const timeout = setTimeout(() => { - t.fail('timed out before finishing') + assert.fail('timed out before finishing') // eslint-disable-next-line @typescript-eslint/no-use-before-define end() }, 240000) @@ -19,7 +18,6 @@ tape('[CLI]', (t) => { if (!hasEnded) { hasEnded = true child.kill('SIGINT') - t.end() } } @@ -30,25 +28,22 @@ tape('[CLI]', (t) => { console.log(message) if (message.toLowerCase().includes('error') === true) { - t.fail(message) - return end() + assert.fail(message) } if (message.includes('Executed') === true) { - t.pass('successfully executed blocks') + assert.ok(true, 'successfully executed blocks') return end() } }) child.stderr.on('data', (data) => { const message = data.toString() - t.fail(`stderr: ${message}`) - end() + assert.fail(`stderr: ${message}`) }) child.on('close', (code) => { if (code !== null && code > 0) { - t.fail(`child process exited with code ${code}`) - end() + assert.fail(`child process exited with code ${code}`) } }) }) diff --git a/packages/client/test/integration/client.spec.ts b/packages/client/test/integration/client.spec.ts index d0ce2b3d79..4e863da9a4 100644 --- a/packages/client/test/integration/client.spec.ts +++ b/packages/client/test/integration/client.spec.ts @@ -1,4 +1,4 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { EthereumClient } from '../../src/client' import { Config, SyncMode } from '../../src/config' @@ -6,7 +6,7 @@ import { Event } from '../../src/types' import { MockServer } from './mocks/mockserver' -tape('[Integration:EthereumClient]', async (t) => { +describe('[Integration:EthereumClient]', async () => { const serverConfig = new Config({ accountCache: 10000, storageCache: 1000 }) const servers = [new MockServer({ config: serverConfig }) as any] const config = new Config({ @@ -21,18 +21,27 @@ tape('[Integration:EthereumClient]', async (t) => { ;(config.servers[0].config as any).events = config.events const client = await EthereumClient.create({ config }) - t.test('should start/stop', async (t) => { - t.plan(4) - client.config.events.on(Event.SERVER_ERROR, (err) => t.equal(err.message, 'err0', 'got error')) - client.config.events.on(Event.SERVER_LISTENING, (details: any) => { - t.deepEqual(details, { transport: 'mock', url: 'mock://127.0.0.1' }, 'server listening') - }) - await client.open() - ;(client.service('eth') as any).interval = 100 - client.config.events.emit(Event.SERVER_ERROR, new Error('err0'), client.config.servers[0]) - await client.start() - t.ok((client.service('eth') as any).synchronizer.running, 'sync running') - await client.stop() - t.pass('client stopped') - }) + it( + 'should start/stop', + async () => { + client.config.events.on(Event.SERVER_ERROR, (err) => + assert.equal(err.message, 'err0', 'got error') + ) + client.config.events.on(Event.SERVER_LISTENING, (details: any) => { + assert.deepEqual( + details, + { transport: 'mock', url: 'mock://127.0.0.1' }, + 'server listening' + ) + }) + await client.open() + ;(client.service('eth') as any).interval = 100 + client.config.events.emit(Event.SERVER_ERROR, new Error('err0'), client.config.servers[0]) + await client.start() + assert.ok((client.service('eth') as any).synchronizer.running, 'sync running') + await client.stop() + assert.ok(true, 'client stopped') + }, + { timeout: 20000 } + ) }) diff --git a/packages/client/test/integration/fullethereumservice.spec.ts b/packages/client/test/integration/fullethereumservice.spec.ts index 9dd7882c0f..78bd7c6db3 100644 --- a/packages/client/test/integration/fullethereumservice.spec.ts +++ b/packages/client/test/integration/fullethereumservice.spec.ts @@ -4,8 +4,8 @@ import { Hardfork } from '@ethereumjs/common' import { DefaultStateManager } from '@ethereumjs/statemanager' import { FeeMarketEIP1559Transaction } from '@ethereumjs/tx' import { Account, bytesToHex, equalsBytes, hexToBytes, toBytes } from '@ethereumjs/util' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Config } from '../../src/config' import { FullEthereumService } from '../../src/service' @@ -17,7 +17,7 @@ import { destroy } from './util' const config = new Config({ accountCache: 10000, storageCache: 1000 }) -tape('[Integration:FullEthereumService]', async (t) => { +describe('[Integration:FullEthereumService]', async () => { // Stub out setStateRoot since correct state root doesn't exist in mock state. const ogSetStateRoot = DefaultStateManager.prototype.setStateRoot DefaultStateManager.prototype.setStateRoot = (): any => {} @@ -46,68 +46,71 @@ tape('[Integration:FullEthereumService]', async (t) => { return [server, service] } - t.test('should handle ETH requests', async (t) => { - t.plan(8) - const [server, service] = await setup() - const peer = await server.accept('peer0') - const [reqId1, headers] = await peer.eth!.getBlockHeaders({ block: BigInt(1), max: 2 }) - const hash = hexToBytes('0xa321d27cd2743617c1c1b0d7ecb607dd14febcdfca8f01b79c3f0249505ea069') - t.equal(reqId1, BigInt(1), 'handled GetBlockHeaders') - t.ok(equalsBytes(headers![1].hash(), hash), 'handled GetBlockHeaders') - const res = await peer.eth!.getBlockBodies({ hashes: [hash] }) - const [reqId2, bodies] = res - t.equal(reqId2, BigInt(2), 'handled GetBlockBodies') - t.deepEquals(bodies, [[[], []]], 'handled GetBlockBodies') - service.config.events.on(Event.PROTOCOL_MESSAGE, async (msg) => { - switch (msg.name) { - case 'NewBlockHashes': { - t.pass('handled NewBlockHashes') - break + it( + 'should handle ETH requests', + async () => { + const [server, service] = await setup() + const peer = await server.accept('peer0') + const [reqId1, headers] = await peer.eth!.getBlockHeaders({ block: BigInt(1), max: 2 }) + const hash = hexToBytes('0xa321d27cd2743617c1c1b0d7ecb607dd14febcdfca8f01b79c3f0249505ea069') + assert.equal(reqId1, BigInt(1), 'handled GetBlockHeaders') + assert.ok(equalsBytes(headers![1].hash(), hash), 'handled GetBlockHeaders') + const res = await peer.eth!.getBlockBodies({ hashes: [hash] }) + const [reqId2, bodies] = res + assert.equal(reqId2, BigInt(2), 'handled GetBlockBodies') + assert.deepEqual(bodies, [[[], []]], 'handled GetBlockBodies') + service.config.events.on(Event.PROTOCOL_MESSAGE, async (msg) => { + switch (msg.name) { + case 'NewBlockHashes': { + assert.ok(true, 'handled NewBlockHashes') + break + } + case 'NewBlock': { + assert.ok(true, 'handled NewBlock') + await destroy(server, service) + break + } } - case 'NewBlock': { - t.pass('handled NewBlock') - await destroy(server, service) - break - } - } - }) - peer.eth!.send('NewBlockHashes', [[hash, BigInt(2)]]) + }) + peer.eth!.send('NewBlockHashes', [[hash, BigInt(2)]]) - const block = Block.fromBlockData( - { - header: { - number: 1, - difficulty: 1, + const block = Block.fromBlockData( + { + header: { + number: 1, + difficulty: 1, + }, }, - }, - { common: config.chainCommon } - ) - peer.eth!.send('NewBlock', [block, BigInt(1)]) + { common: config.chainCommon } + ) + peer.eth!.send('NewBlock', [block, BigInt(1)]) - const txData = - '0x02f901100180843b9aca00843b9aca008402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' - const tx = FeeMarketEIP1559Transaction.fromSerializedTx(toBytes(txData)) - await service.execution.vm.stateManager.putAccount( - tx.getSenderAddress(), - new Account(BigInt(0), BigInt('40000000000100000')) - ) - await service.txPool.add(tx) - service.config.chainCommon.getHardforkBy = td.func() - td.when(service.config.chainCommon.getHardforkBy(td.matchers.anything())).thenReturn( - Hardfork.London - ) - const [_, txs] = await peer.eth!.getPooledTransactions({ hashes: [tx.hash()] }) - t.ok(equalsBytes(txs[0].hash(), tx.hash()), 'handled GetPooledTransactions') + const txData = + '0x02f901100180843b9aca00843b9aca008402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' + const tx = FeeMarketEIP1559Transaction.fromSerializedTx(toBytes(txData)) + await service.execution.vm.stateManager.putAccount( + tx.getSenderAddress(), + new Account(BigInt(0), BigInt('40000000000100000')) + ) + await service.txPool.add(tx) + service.config.chainCommon.getHardforkBy = td.func() + td.when(service.config.chainCommon.getHardforkBy(td.matchers.anything())).thenReturn( + Hardfork.London + ) + const [_, txs] = await peer.eth!.getPooledTransactions({ hashes: [tx.hash()] }) + assert.ok(equalsBytes(txs[0].hash(), tx.hash()), 'handled GetPooledTransactions') - peer.eth!.send('Transactions', [tx]) - t.pass('handled Transactions') - }) + peer.eth!.send('Transactions', [tx]) + assert.ok(true, 'handled Transactions') + }, + { timeout: 30000 } + ) - t.test('should handle LES requests', async (t) => { + it('should handle LES requests', async () => { const [server, service] = await setup() const peer = await server.accept('peer0') const { headers } = await peer.les!.getBlockHeaders({ block: BigInt(1), max: 2 }) - t.equals( + assert.equal( bytesToHex(headers[1].hash()), '0xa321d27cd2743617c1c1b0d7ecb607dd14febcdfca8f01b79c3f0249505ea069', 'handled GetBlockHeaders' @@ -117,5 +120,5 @@ tape('[Integration:FullEthereumService]', async (t) => { // unstub setStateRoot DefaultStateManager.prototype.setStateRoot = ogSetStateRoot DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy - }) + }, 30000) }) diff --git a/packages/client/test/integration/fullsync.spec.ts b/packages/client/test/integration/fullsync.spec.ts index 7340c1ca2d..f74cd96aa3 100644 --- a/packages/client/test/integration/fullsync.spec.ts +++ b/packages/client/test/integration/fullsync.spec.ts @@ -1,38 +1,38 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Event } from '../../src/types' import { destroy, setup, wait } from './util' -tape('[Integration:FullSync]', async (t) => { - t.test('should sync blocks', async (t) => { +describe('[Integration:FullSync]', async () => { + it('should sync blocks', async () => { const [remoteServer, remoteService] = await setup({ location: '127.0.0.2', height: 20 }) const [localServer, localService] = await setup({ location: '127.0.0.1', height: 0 }) await localService.synchronizer!.stop() await localServer.discover('remotePeer1', '127.0.0.2') // await localService.synchronizer.sync() localService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { - t.equals(localService.chain.blocks.height, BigInt(20), 'synced') + assert.equal(localService.chain.blocks.height, BigInt(20), 'synced') await destroy(localServer, localService) await destroy(remoteServer, remoteService) }) await localService.synchronizer!.start() - }) + }, 30000) - t.test('should not sync with stale peers', async (t) => { + it('should not sync with stale peers', async () => { const [remoteServer, remoteService] = await setup({ location: '127.0.0.2', height: 9 }) const [localServer, localService] = await setup({ location: '127.0.0.1', height: 10 }) localService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { - t.fail('synced with a stale peer') + assert.fail('synced with a stale peer') }) await localServer.discover('remotePeer', '127.0.0.2') await wait(300) await destroy(localServer, localService) await destroy(remoteServer, remoteService) - t.pass('did not sync') - }) + assert.ok(true, 'did not sync') + }, 30000) - t.test('should sync with best peer', async (t) => { + it('should sync with best peer', async () => { const [remoteServer1, remoteService1] = await setup({ location: '127.0.0.2', height: 7 }) const [remoteServer2, remoteService2] = await setup({ location: '127.0.0.3', height: 10 }) const [localServer, localService] = await setup({ @@ -46,12 +46,12 @@ tape('[Integration:FullSync]', async (t) => { localService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { if (localService.chain.blocks.height === BigInt(10)) { - t.pass('synced with best peer') + assert.ok(true, 'synced with best peer') await destroy(localServer, localService) await destroy(remoteServer1, remoteService1) await destroy(remoteServer2, remoteService2) } }) await localService.synchronizer!.start() - }) + }, 30000) }) diff --git a/packages/client/test/integration/lightethereumservice.spec.ts b/packages/client/test/integration/lightethereumservice.spec.ts deleted file mode 100644 index a293048307..0000000000 --- a/packages/client/test/integration/lightethereumservice.spec.ts +++ /dev/null @@ -1,12 +0,0 @@ -import * as tape from 'tape' - -import { destroy, setup } from './util' - -tape('[Integration:LightEthereumService]', async (t) => { - t.test('should handle LES requests', async (t) => { - const [server, service] = await setup() - // TO DO: test handlers once they are implemented - await destroy(server, service) - t.end() - }) -}) diff --git a/packages/client/test/integration/lightsync.spec.ts b/packages/client/test/integration/lightsync.spec.ts index 2068b48b42..54b08d0687 100644 --- a/packages/client/test/integration/lightsync.spec.ts +++ b/packages/client/test/integration/lightsync.spec.ts @@ -1,80 +1,92 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { SyncMode } from '../../src/config' import { Event } from '../../src/types' import { destroy, setup, wait } from './util' -tape('[Integration:LightSync]', async (t) => { - t.test('should sync headers', async (t) => { - const [remoteServer, remoteService] = await setup({ - location: '127.0.0.2', - height: 20, - syncmode: SyncMode.Full, - }) - const [localServer, localService] = await setup({ - location: '127.0.0.1', - height: 0, - syncmode: SyncMode.Light, - }) - await localService.synchronizer!.stop() - await localServer.discover('remotePeer1', '127.0.0.2') - localService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { - t.equals(localService.chain.headers.height, BigInt(20), 'synced') +describe('[Integration:LightSync]', async () => { + it( + 'should sync headers', + async () => { + const [remoteServer, remoteService] = await setup({ + location: '127.0.0.2', + height: 20, + syncmode: SyncMode.Full, + }) + const [localServer, localService] = await setup({ + location: '127.0.0.1', + height: 0, + syncmode: SyncMode.Light, + }) + await localService.synchronizer!.stop() + await localServer.discover('remotePeer1', '127.0.0.2') + localService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { + assert.equal(localService.chain.headers.height, BigInt(20), 'synced') + await destroy(localServer, localService) + await destroy(remoteServer, remoteService) + }) + await localService.synchronizer!.start() + }, + { timeout: 30000 } + ) + + it( + 'should not sync with stale peers', + async () => { + const [remoteServer, remoteService] = await setup({ + location: '127.0.0.2', + height: 9, + syncmode: SyncMode.Full, + }) + const [localServer, localService] = await setup({ + location: '127.0.0.1', + height: 10, + syncmode: SyncMode.Light, + }) + localService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { + assert.fail('synced with a stale peer') + }) + await localServer.discover('remotePeer', '127.0.0.2') + await wait(100) await destroy(localServer, localService) await destroy(remoteServer, remoteService) - }) - await localService.synchronizer!.start() - }) + assert.ok(true, 'did not sync') + }, + { timeout: 30000 } + ) - t.test('should not sync with stale peers', async (t) => { - const [remoteServer, remoteService] = await setup({ - location: '127.0.0.2', - height: 9, - syncmode: SyncMode.Full, - }) - const [localServer, localService] = await setup({ - location: '127.0.0.1', - height: 10, - syncmode: SyncMode.Light, - }) - localService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { - t.fail('synced with a stale peer') - }) - await localServer.discover('remotePeer', '127.0.0.2') - await wait(100) - await destroy(localServer, localService) - await destroy(remoteServer, remoteService) - t.pass('did not sync') - }) - - t.test('should sync with best peer', async (t) => { - const [remoteServer1, remoteService1] = await setup({ - location: '127.0.0.2', - height: 9, - syncmode: SyncMode.Full, - }) - const [remoteServer2, remoteService2] = await setup({ - location: '127.0.0.3', - height: 10, - syncmode: SyncMode.Full, - }) - const [localServer, localService] = await setup({ - location: '127.0.0.1', - height: 0, - syncmode: SyncMode.Light, - }) - await localService.synchronizer!.stop() - await localServer.discover('remotePeer1', '127.0.0.2') - await localServer.discover('remotePeer2', '127.0.0.3') - localService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { - if (localService.chain.headers.height === BigInt(10)) { - t.pass('synced with best peer') - await destroy(localServer, localService) - await destroy(remoteServer1, remoteService1) - await destroy(remoteServer2, remoteService2) - } - }) - await localService.synchronizer!.start() - }) + it( + 'should sync with best peer', + async () => { + const [remoteServer1, remoteService1] = await setup({ + location: '127.0.0.2', + height: 9, + syncmode: SyncMode.Full, + }) + const [remoteServer2, remoteService2] = await setup({ + location: '127.0.0.3', + height: 10, + syncmode: SyncMode.Full, + }) + const [localServer, localService] = await setup({ + location: '127.0.0.1', + height: 0, + syncmode: SyncMode.Light, + }) + await localService.synchronizer!.stop() + await localServer.discover('remotePeer1', '127.0.0.2') + await localServer.discover('remotePeer2', '127.0.0.3') + localService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { + if (localService.chain.headers.height === BigInt(10)) { + assert.ok(true, 'synced with best peer') + await destroy(localServer, localService) + await destroy(remoteServer1, remoteService1) + await destroy(remoteServer2, remoteService2) + } + }) + await localService.synchronizer!.start() + }, + { timeout: 30000 } + ) }) diff --git a/packages/client/test/integration/merge.spec.ts b/packages/client/test/integration/merge.spec.ts index b8bb1b57bd..a16d7d9b8f 100644 --- a/packages/client/test/integration/merge.spec.ts +++ b/packages/client/test/integration/merge.spec.ts @@ -8,7 +8,7 @@ import { Hardfork, } from '@ethereumjs/common' import { Address, hexToBytes } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Chain } from '../../src/blockchain' import { Config } from '../../src/config' @@ -20,7 +20,7 @@ import { destroy, setup } from './util' import type { CliqueConsensus } from '@ethereumjs/blockchain' -tape('[Integration:Merge]', async (t) => { +describe('[Integration:Merge]', async () => { const commonPoA = Common.custom( { consensus: { @@ -99,7 +99,7 @@ tape('[Integration:Merge]', async (t) => { return [server, service] } - t.test('should mine and stop at the merge (PoA)', async (t) => { + it('should mine and stop at the merge (PoA)', async () => { const [server, service] = await minerSetup(commonPoA) const [remoteServer, remoteService] = await setup({ location: '127.0.0.2', @@ -109,32 +109,34 @@ tape('[Integration:Merge]', async (t) => { ;(remoteService.chain.blockchain.consensus as CliqueConsensus).cliqueActiveSigners = () => [ accounts[0][0], ] // stub - ;(BlockHeader as any).prototype._consensusFormatValidation = () => {} //stub + BlockHeader.prototype['_consensusFormatValidation'] = () => {} //stub await server.discover('remotePeer1', '127.0.0.2') const targetTTD = BigInt(5) - remoteService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { - const { td } = remoteService.chain.headers - if (td === targetTTD) { - t.equal( - remoteService.chain.headers.td, - targetTTD, - 'synced blocks to the merge successfully' - ) - // Make sure the miner has stopped - t.notOk(service.miner!.running, 'miner should not be running') - await destroy(server, service) - await destroy(remoteServer, remoteService) - t.end() - } - if (td > targetTTD) { - t.fail('chain should not exceed merge TTD') - } + + await new Promise((resolve) => { + remoteService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { + const { td } = remoteService.chain.headers + if (td === targetTTD) { + assert.equal( + remoteService.chain.headers.td, + targetTTD, + 'synced blocks to the merge successfully' + ) + // Make sure the miner has stopped + assert.notOk(service.miner!.running, 'miner should not be running') + await destroy(server, service) + await destroy(remoteServer, remoteService) + resolve(undefined) + } + if (td > targetTTD) { + assert.fail('chain should not exceed merge TTD') + } + }) + void remoteService.synchronizer!.start() }) - await remoteService.synchronizer!.start() - await new Promise(() => {}) // resolves once t.end() is called - }) + }, 60000) - t.test('should mine and stop at the merge (PoW)', async (t) => { + it('should mine and stop at the merge (PoW)', async () => { const [server, service] = await minerSetup(commonPoW) const [remoteServer, remoteService] = await setup({ location: '127.0.0.2', @@ -144,32 +146,33 @@ tape('[Integration:Merge]', async (t) => { await server.discover('remotePeer1', '127.0.0.2') const targetTTD = BigInt(1000) let terminalHeight: bigint | undefined - remoteService.config.events.on(Event.CHAIN_UPDATED, async () => { - const { height, td } = remoteService.chain.headers - if (td > targetTTD) { - if (terminalHeight === undefined || terminalHeight === BigInt(0)) { - terminalHeight = height + await new Promise((resolve) => { + remoteService.config.events.on(Event.CHAIN_UPDATED, async () => { + const { height, td } = remoteService.chain.headers + if (td > targetTTD) { + if (terminalHeight === undefined || terminalHeight === BigInt(0)) { + terminalHeight = height + } + assert.equal( + remoteService.chain.headers.height, + terminalHeight, + 'synced blocks to the merge successfully' + ) + // Make sure the miner has stopped + assert.notOk(service.miner!.running, 'miner should not be running') + await destroy(server, service) + await destroy(remoteServer, remoteService) + resolve(undefined) + } + if ( + typeof terminalHeight === 'bigint' && + terminalHeight !== BigInt(0) && + terminalHeight < height + ) { + assert.fail('chain should not exceed merge terminal block') } - t.equal( - remoteService.chain.headers.height, - terminalHeight, - 'synced blocks to the merge successfully' - ) - // Make sure the miner has stopped - t.notOk(service.miner!.running, 'miner should not be running') - await destroy(server, service) - await destroy(remoteServer, remoteService) - t.end() - } - if ( - typeof terminalHeight === 'bigint' && - terminalHeight !== BigInt(0) && - terminalHeight < height - ) { - t.fail('chain should not exceed merge terminal block') - } + }) + void remoteService.synchronizer!.start() }) - await remoteService.synchronizer!.start() - await new Promise(() => {}) // resolves once t.end() is called - }) + }, 120000) }) diff --git a/packages/client/test/integration/miner.spec.ts b/packages/client/test/integration/miner.spec.ts index ac37af2389..ca80cca2a0 100644 --- a/packages/client/test/integration/miner.spec.ts +++ b/packages/client/test/integration/miner.spec.ts @@ -7,7 +7,7 @@ import { Hardfork, } from '@ethereumjs/common' import { Address, hexToBytes } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Chain } from '../../src/blockchain' import { Config } from '../../src/config' @@ -19,7 +19,7 @@ import { destroy, setup } from './util' import type { CliqueConsensus } from '@ethereumjs/blockchain' -tape('[Integration:Miner]', async (t) => { +describe('[Integration:Miner]', async () => { // Schedule london at 0 and also unset any past scheduled timestamp hardforks that might collide with test const hardforks = new Common({ chain: ChainCommon.Goerli }) .hardforks() @@ -78,10 +78,9 @@ tape('[Integration:Miner]', async (t) => { return [server, service] } - t.test( + it( 'should mine blocks while a peer stays connected to tip of chain', - { timeout: 25000 }, - async (t) => { + async () => { const [server, service] = await minerSetup() const [remoteServer, remoteService] = await setup({ location: '127.0.0.2', @@ -94,16 +93,23 @@ tape('[Integration:Miner]', async (t) => { ;(remoteService as FullEthereumService).execution.run = async () => 1 // stub await server.discover('remotePeer1', '127.0.0.2') const targetHeight = BigInt(5) - remoteService.config.events.on(Event.SYNC_SYNCHRONIZED, async (chainHeight) => { - if (chainHeight === targetHeight) { - t.equal(remoteService.chain.blocks.height, targetHeight, 'synced blocks successfully') - await destroy(server, service) - await destroy(remoteServer, remoteService) - t.end() - } + await new Promise((resolve) => { + remoteService.config.events.on(Event.SYNC_SYNCHRONIZED, async (chainHeight) => { + if (chainHeight === targetHeight) { + assert.equal( + remoteService.chain.blocks.height, + targetHeight, + 'synced blocks successfully' + ) + await destroy(server, service) + await destroy(remoteServer, remoteService) + resolve(undefined) + + void remoteService.synchronizer!.start() + } + }) }) - await remoteService.synchronizer!.start() - await new Promise(() => {}) // resolves once t.end() is called - } + }, + { timeout: 25000 } ) }) diff --git a/packages/client/test/integration/mocks/mockpeer.ts b/packages/client/test/integration/mocks/mockpeer.ts index 0bd5442440..18d3020133 100644 --- a/packages/client/test/integration/mocks/mockpeer.ts +++ b/packages/client/test/integration/mocks/mockpeer.ts @@ -1,6 +1,6 @@ import { EventEmitter } from 'events' import { pipe } from 'it-pipe' -import * as pushable from 'it-pushable' +import pushable from 'it-pushable' import { Peer } from '../../../src/net/peer' import { Event } from '../../../src/types' diff --git a/packages/client/test/integration/peerpool.spec.ts b/packages/client/test/integration/peerpool.spec.ts index 9c3040a3d8..c2964a5b13 100644 --- a/packages/client/test/integration/peerpool.spec.ts +++ b/packages/client/test/integration/peerpool.spec.ts @@ -1,5 +1,5 @@ import { Blockchain } from '@ethereumjs/blockchain' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Config } from '../../src/config' import { PeerPool } from '../../src/net/peerpool' @@ -10,7 +10,7 @@ import { MockChain } from './mocks/mockchain' import { MockServer } from './mocks/mockserver' import { wait } from './util' -tape('[Integration:PeerPool]', async (t) => { +describe('[Integration:PeerPool]', async () => { async function setup(protocols: EthProtocol[] = []): Promise<[MockServer, PeerPool]> { const serverConfig = new Config({ accountCache: 10000, storageCache: 1000 }) const server = new MockServer({ config: serverConfig }) as any @@ -27,43 +27,43 @@ tape('[Integration:PeerPool]', async (t) => { await server.stop() } - t.test('should open', async (t) => { + it('should open', async () => { const [server, pool] = await setup() - t.ok((pool as any).opened, 'opened') + assert.ok((pool as any).opened, 'opened') await destroy(server, pool) }) - t.test('should add/remove peer', async (t) => { + it('should add/remove peer', async () => { const [server, pool] = await setup() pool.config.events.on(Event.POOL_PEER_ADDED, (peer: any) => - t.equal(peer.id, 'peer0', 'added peer') + assert.equal(peer.id, 'peer0', 'added peer') ) pool.config.events.on(Event.POOL_PEER_REMOVED, (peer: any) => - t.equal(peer.id, 'peer0', 'removed peer') + assert.equal(peer.id, 'peer0', 'removed peer') ) pool.add(await server.accept('peer0')) await wait(100) server.disconnect('peer0') await destroy(server, pool) - t.pass('destroyed') + assert.ok(true, 'destroyed') }) - t.test('should ban peer', async (t) => { + it('should ban peer', async () => { const [server, pool] = await setup() pool.config.events.on(Event.POOL_PEER_ADDED, (peer: any) => - t.equal(peer.id, 'peer0', 'added peer') + assert.equal(peer.id, 'peer0', 'added peer') ) pool.config.events.on(Event.POOL_PEER_BANNED, (peer: any) => - t.equal(peer.id, 'peer0', 'banned peer') + assert.equal(peer.id, 'peer0', 'banned peer') ) pool.add(await server.accept('peer0')) await wait(100) pool.ban(pool.peers[0]) await destroy(server, pool) - t.pass('destroyed') + assert.ok(true, 'destroyed') }) - t.test('should handle peer messages', async (t) => { + it('should handle peer messages', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const blockchain = await Blockchain.create({ validateBlocks: false, @@ -78,14 +78,16 @@ tape('[Integration:PeerPool]', async (t) => { }), ] const [server, pool] = await setup(protocols) - config.events.on(Event.POOL_PEER_ADDED, (peer: any) => t.equal(peer.id, 'peer0', 'added peer')) + config.events.on(Event.POOL_PEER_ADDED, (peer: any) => + assert.equal(peer.id, 'peer0', 'added peer') + ) config.events.on(Event.PROTOCOL_MESSAGE, (msg: any, proto: any, peer: any) => { - t.deepEqual([msg, proto, peer.id], ['msg0', 'proto0', 'peer0'], 'got message') + assert.deepEqual([msg, proto, peer.id], ['msg0', 'proto0', 'peer0'], 'got message') }) pool.add(await server.accept('peer0')) await wait(100) config.events.emit(Event.PROTOCOL_MESSAGE, 'msg0', 'proto0', pool.peers[0]) await destroy(server, pool) - t.pass('destroyed') + assert.ok(true, 'destroyed') }) }) diff --git a/packages/client/test/integration/pow.spec.ts b/packages/client/test/integration/pow.spec.ts index e43d62d649..df57b95a4c 100644 --- a/packages/client/test/integration/pow.spec.ts +++ b/packages/client/test/integration/pow.spec.ts @@ -1,7 +1,7 @@ import { Common, Hardfork } from '@ethereumjs/common' import { Address, hexToBytes, parseGethGenesisState } from '@ethereumjs/util' import { removeSync } from 'fs-extra' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Config } from '../../src' import { createInlineClient } from '../sim/simutils' @@ -73,13 +73,13 @@ async function setupPowDevnet(prefundAddress: Address, cleanStart: boolean) { return client } -const mineBlockAndstopClient = async (client: EthereumClient, t: tape.Test) => { +const mineBlockAndstopClient = async (client: EthereumClient) => { await new Promise((resolve) => { client.config.logger.on('data', (data) => { if (data.message.includes('Miner: Found PoW solution') === true && client.started) { - t.pass('found a PoW solution') + assert.ok(true, 'found a PoW solution') void client.stop().then(() => { - t.ok(!client.started, 'client stopped successfully') + assert.ok(!client.started, 'client stopped successfully') resolve(undefined) }) } @@ -87,9 +87,10 @@ const mineBlockAndstopClient = async (client: EthereumClient, t: tape.Test) => { }) } -tape('PoW client test', { timeout: 60000 }, async (t) => { - t.plan(3) - const client = await setupPowDevnet(minerAddress, true) - t.ok(client.started, 'client started successfully') - await mineBlockAndstopClient(client, t) +describe('PoW client test', async () => { + it('starts the client successfully', async () => { + const client = await setupPowDevnet(minerAddress, true) + assert.ok(client.started, 'client started successfully') + await mineBlockAndstopClient(client) + }, 60000) }) diff --git a/packages/client/test/logging.spec.ts b/packages/client/test/logging.spec.ts index b53e6cc3ef..fd75927bd0 100644 --- a/packages/client/test/logging.spec.ts +++ b/packages/client/test/logging.spec.ts @@ -1,54 +1,51 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { getLogger } from '../src/logging' -tape('[Logging]', (t) => { +describe('[Logging]', () => { const logger = getLogger({ logLevel: 'info', logFile: 'ethereumjs.log', logLevelFile: 'info' }) const format = logger.transports.find((t: any) => t.name === 'console')!.format! - t.test('should have correct transports', (st) => { - st.ok( + it('should have correct transports', () => { + assert.ok( logger.transports.find((t: any) => t.name === 'console') !== undefined, 'should have stdout transport' ) - st.ok( + assert.ok( logger.transports.find((t: any) => t.name === 'file') !== undefined, 'should have file transport' ) - st.end() }) - t.test('should log error stacks properly', (st) => { + it('should log error stacks properly', () => { try { throw new Error('an error') } catch (e: any) { e.level = 'error' - st.ok( + assert.ok( /an error\n {4}at/.test((format.transform(e) as any).message), 'log message should contain stack trace (1)' ) - st.ok( + assert.ok( /an error\n {4}at/.test((format.transform({ level: 'error', message: e }) as any).message), 'log message should contain stack trace (2)' ) - st.end() } }) - t.test('should colorize key=value pairs', (st) => { + it('should colorize key=value pairs', () => { if (process.env.GITHUB_ACTION !== undefined) { - st.skip('no color functionality in ci') - return st.end() + assert.ok(true, 'no color functionality in ci') + return } const { message } = format.transform({ level: 'info', message: 'test key=value', }) as any - st.equal( + assert.equal( message, 'test \x1B[38;2;0;128;0mkey\x1B[39m=value ', 'key=value pairs should be colorized' ) - st.end() }) }) diff --git a/packages/client/test/miner/miner.spec.ts b/packages/client/test/miner/miner.spec.ts index fd9b242733..c844a4f52f 100644 --- a/packages/client/test/miner/miner.spec.ts +++ b/packages/client/test/miner/miner.spec.ts @@ -5,8 +5,8 @@ import { FeeMarketEIP1559Transaction, LegacyTransaction } from '@ethereumjs/tx' import { Address, equalsBytes, hexToBytes } from '@ethereumjs/util' import { AbstractLevel } from 'abstract-level' import { keccak256 } from 'ethereum-cryptography/keccak' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Chain } from '../../src/blockchain' import { Config } from '../../src/config' @@ -35,9 +35,9 @@ const setBalance = async (vm: VM, address: Address, balance: bigint) => { await vm.stateManager.commit() } -tape('[Miner]', async (t) => { - const originalValidate = (BlockHeader as any).prototype._consensusFormatValidation - ;(BlockHeader as any).prototype._consensusFormatValidation = td.func() +describe('[Miner]', async () => { + const originalValidate = BlockHeader.prototype['_consensusFormatValidation'] + BlockHeader.prototype['_consensusFormatValidation'] = td.func() td.replace('@ethereumjs/block', { BlockHeader }) // Stub out setStateRoot so txPool.validate checks will pass since correct state root @@ -174,7 +174,7 @@ tape('[Miner]', async (t) => { } const txA01 = createTx() // A -> B, nonce: 0, value: 1, normal gasPrice - const txA011g = createTx( + const txA011 = createTx( // A -> B, nonce: 0, value: 1, normal gasPrice, mainnet as chain A, B, @@ -189,41 +189,38 @@ tape('[Miner]', async (t) => { const txA03 = createTx(A, B, 2, 1, 3000000000) // A -> B, nonce: 2, value: 1, 3x gasPrice const txB01 = createTx(B, A, 0, 1, 2500000000) // B -> A, nonce: 0, value: 1, 2.5x gasPrice - t.test('should initialize correctly', (t) => { + it('should initialize correctly', () => { const chain = new FakeChain() as any const service = new FullEthereumService({ config: customConfig, chain, }) const miner = new Miner({ config: customConfig, service }) - t.notOk(miner.running) - t.end() + assert.notOk(miner.running) }) - t.test('should start/stop', async (t) => { - t.plan(4) + it('should start/stop', async () => { const chain = new FakeChain() as any const service = new FullEthereumService({ config: customConfig, chain, }) let miner = new Miner({ config: customConfig, service }) - t.notOk(miner.running) + assert.notOk(miner.running) miner.start() - t.ok(miner.running) + assert.ok(miner.running) await wait(10) miner.stop() - t.notOk(miner.running) + assert.notOk(miner.running) // Should not start when config.mine=false const configMineFalse = new Config({ transports: [], accounts, mine: false }) miner = new Miner({ config: configMineFalse, service }) miner.start() - t.notOk(miner.running, 'miner should not start when config.mine=false') + assert.notOk(miner.running, 'miner should not start when config.mine=false') }) - t.test('assembleBlocks() -> with a single tx', async (t) => { - t.plan(1) + it('assembleBlocks() -> with a single tx', async () => { const chain = new FakeChain() as any const service = new FullEthereumService({ config: customConfig, @@ -245,7 +242,7 @@ tape('[Miner]', async (t) => { ;(vm.blockchain.consensus as CliqueConsensus).cliqueActiveSigners = () => [A.address] // stub chain.putBlocks = (blocks: Block[]) => { - t.equal(blocks[0].transactions.length, 1, 'new block should include tx') + assert.equal(blocks[0].transactions.length, 1, 'new block should include tx') miner.stop() txPool.stop() } @@ -253,8 +250,7 @@ tape('[Miner]', async (t) => { await wait(500) }) - t.test('assembleBlocks() -> with a hardfork mismatching tx', async (t) => { - t.plan(3) + it('assembleBlocks() -> with a hardfork mismatching tx', async () => { const chain = new FakeChain() as any const service = new FullEthereumService({ config: goerliConfig, @@ -272,20 +268,20 @@ tape('[Miner]', async (t) => { await setBalance(vm, A.address, BigInt('200000000000001')) // add tx - txA011g.common.setHardfork(Hardfork.Paris) - await txPool.add(txA011g) - t.equal(txPool.txsInPool, 1, 'transaction should be in pool') + txA011.common.setHardfork(Hardfork.Paris) + await txPool.add(txA011) + assert.equal(txPool.txsInPool, 1, 'transaction should be in pool') // disable consensus to skip PoA block signer validation ;(vm.blockchain.consensus as CliqueConsensus).cliqueActiveSigners = () => [A.address] // stub chain.putBlocks = (blocks: Block[]) => { - t.equal( + assert.equal( blocks[0].transactions.length, 0, 'new block should not include tx due to hardfork mismatch' ) - t.equal(txPool.txsInPool, 1, 'transaction should remain in pool') + assert.equal(txPool.txsInPool, 1, 'transaction should remain in pool') miner.stop() txPool.stop() } @@ -293,52 +289,48 @@ tape('[Miner]', async (t) => { await wait(500) }) - t.test( - 'assembleBlocks() -> with multiple txs, properly ordered by gasPrice and nonce', - async (t) => { - t.plan(4) - const chain = new FakeChain() as any - const _config = { - ...customConfig, - } - const service = new FullEthereumService({ - config: customConfig, - chain, - }) - const miner = new Miner({ config: customConfig, service, skipHardForkValidation: true }) - const { txPool } = service - const { vm } = service.execution - txPool.start() - miner.start() + it('assembleBlocks() -> with multiple txs, properly ordered by gasPrice and nonce', async () => { + const chain = new FakeChain() as any + const _config = { + ...customConfig, + } + const service = new FullEthereumService({ + config: customConfig, + chain, + }) + const miner = new Miner({ config: customConfig, service, skipHardForkValidation: true }) + const { txPool } = service + const { vm } = service.execution + txPool.start() + miner.start() - await setBalance(vm, A.address, BigInt('400000000000001')) - await setBalance(vm, B.address, BigInt('400000000000001')) - - // add txs - await txPool.add(txA01) - await txPool.add(txA02) - await txPool.add(txA03) - await txPool.add(txB01) - - // disable consensus to skip PoA block signer validation - ;(vm.blockchain as any)._validateConsensus = false - - chain.putBlocks = (blocks: Block[]) => { - const msg = 'txs in block should be properly ordered by gasPrice and nonce' - const expectedOrder = [txB01, txA01, txA02, txA03] - for (const [index, tx] of expectedOrder.entries()) { - const txHash = blocks[0].transactions[index]?.hash() - t.ok(txHash !== undefined && equalsBytes(txHash, tx.hash()), msg) - } - miner.stop() - txPool.stop() + await setBalance(vm, A.address, BigInt('400000000000001')) + await setBalance(vm, B.address, BigInt('400000000000001')) + + // add txs + await txPool.add(txA01) + await txPool.add(txA02) + await txPool.add(txA03) + await txPool.add(txB01) + + // disable consensus to skip PoA block signer validation + ;(vm.blockchain as any)._validateConsensus = false + + chain.putBlocks = (blocks: Block[]) => { + const msg = 'txs in block should be properly ordered by gasPrice and nonce' + const expectedOrder = [txB01, txA01, txA02, txA03] + for (const [index, tx] of expectedOrder.entries()) { + const txHash = blocks[0].transactions[index]?.hash() + assert.ok(txHash !== undefined && equalsBytes(txHash, tx.hash()), msg) } - await (miner as any).queueNextAssembly(0) - await wait(500) + miner.stop() + txPool.stop() } - ) - t.test('assembleBlocks() -> with saveReceipts', async (t) => { - t.plan(9) + await (miner as any).queueNextAssembly(0) + await wait(500) + }) + + it('assembleBlocks() -> with saveReceipts', async () => { const chain = new FakeChain() as any const config = new Config({ transports: [], @@ -362,7 +354,7 @@ tape('[Miner]', async (t) => { txPool.start() miner.start() - t.ok(receiptsManager, 'receiptsManager should be initialized') + assert.ok(receiptsManager, 'receiptsManager should be initialized') await setBalance(vm, A.address, BigInt('400000000000001')) await setBalance(vm, B.address, BigInt('400000000000001')) @@ -381,25 +373,24 @@ tape('[Miner]', async (t) => { const expectedOrder = [txB01, txA01, txA02, txA03] for (const [index, tx] of expectedOrder.entries()) { const txHash = blocks[0].transactions[index]?.hash() - t.ok(txHash !== undefined && equalsBytes(txHash, tx.hash()), msg) + assert.ok(txHash !== undefined && equalsBytes(txHash, tx.hash()), msg) } miner.stop() txPool.stop() } await (miner as any).queueNextAssembly(0) let receipt = await receiptsManager!.getReceipts(txB01.hash()) - t.ok(receipt, 'receipt should be saved') + assert.ok(receipt, 'receipt should be saved') receipt = await receiptsManager!.getReceipts(txA01.hash()) - t.ok(receipt, 'receipt should be saved') + assert.ok(receipt, 'receipt should be saved') receipt = await receiptsManager!.getReceipts(txA02.hash()) - t.ok(receipt, 'receipt should be saved') + assert.ok(receipt, 'receipt should be saved') receipt = await receiptsManager!.getReceipts(txA03.hash()) - t.ok(receipt, 'receipt should be saved') + assert.ok(receipt, 'receipt should be saved') await wait(500) }) - t.test('assembleBlocks() -> should not include tx under the baseFee', async (t) => { - t.plan(1) + it('assembleBlocks() -> should not include tx under the baseFee', async () => { const customChainParams = { hardforks: [{ name: 'london', block: 0 }] } const common = Common.custom(customChainParams, { baseChain: CommonChain.Goerli, @@ -444,13 +435,13 @@ tape('[Miner]', async (t) => { try { await txPool.add(tx, true) } catch { - t.fail('txPool should throw trying to add a tx with an invalid maxFeePerGas') + assert.fail('txPool should throw trying to add a tx with an invalid maxFeePerGas') } // disable consensus to skip PoA block signer validation ;(vm.blockchain as any)._validateConsensus = false ;(service.synchronizer as FullSynchronizer).handleNewBlock = async (block: Block) => { - t.equal(block.transactions.length, 0, 'should not include tx') + assert.equal(block.transactions.length, 0, 'should not include tx') miner.stop() txPool.stop() } @@ -459,8 +450,7 @@ tape('[Miner]', async (t) => { await wait(500) }) - t.test("assembleBlocks() -> should stop assembling a block after it's full", async (t) => { - t.plan(1) + it("assembleBlocks() -> should stop assembling a block after it's full", async () => { const chain = new FakeChain() as any const gasLimit = 100000 const block = Block.fromBlockData({ header: { gasLimit } }, { common: customCommon }) @@ -503,7 +493,7 @@ tape('[Miner]', async (t) => { ;(vm.blockchain as any)._validateConsensus = false chain.putBlocks = (blocks: Block[]) => { - t.equal(blocks[0].transactions.length, 1, 'only one tx should be included') + assert.equal(blocks[0].transactions.length, 1, 'only one tx should be included') miner.stop() txPool.stop() } @@ -511,214 +501,227 @@ tape('[Miner]', async (t) => { await wait(500) }) - t.test('assembleBlocks() -> should stop assembling when a new block is received', async (t) => { - t.plan(2) - const chain = new FakeChain() as any - const config = new Config({ - transports: [], - accountCache: 10000, - storageCache: 1000, - accounts, - mine: true, - common: customCommon, - }) - chain.putBlocks = () => { - t.fail('should have stopped assembling when a new block was received') - } - const service = new FullEthereumService({ - config, - chain, - }) - const miner = new Miner({ config, service, skipHardForkValidation: true }) + it( + 'assembleBlocks() -> should stop assembling when a new block is received', + async () => { + const chain = new FakeChain() as any + const config = new Config({ + transports: [], + accountCache: 10000, + storageCache: 1000, + accounts, + mine: true, + common: customCommon, + }) + const service = new FullEthereumService({ + config, + chain, + }) + const miner = new Miner({ config, service, skipHardForkValidation: true }) - // stub chainUpdated so assemble isn't called again - // when emitting Event.CHAIN_UPDATED in this test - ;(miner as any).chainUpdated = async () => {} + // stub chainUpdated so assemble isn't called again + // when emitting Event.CHAIN_UPDATED in this test + ;(miner as any).chainUpdated = async () => {} - const { txPool } = service - const { vm } = service.execution - txPool.start() - miner.start() - - await setBalance(vm, A.address, BigInt('200000000000001')) + const { txPool } = service + const { vm } = service.execution + txPool.start() + miner.start() - // add many txs to slow assembling - let privateKey = keccak256(new Uint8Array(0)) - for (let i = 0; i < 1000; i++) { - // In order not to pollute TxPool with too many txs from the same address - // (or txs which are already known), keep generating a new address for each tx - const address = Address.fromPrivateKey(privateKey) - await setBalance(vm, address, BigInt('200000000000001')) - const tx = createTx({ address, privateKey }) - await txPool.add(tx) - privateKey = keccak256(privateKey) - } + await setBalance(vm, A.address, BigInt('200000000000001')) + + // add many txs to slow assembling + let privateKey = keccak256(new Uint8Array(0)) + for (let i = 0; i < 1000; i++) { + // In order not to pollute TxPool with too many txs from the same address + // (or txs which are already known), keep generating a new address for each tx + const address = Address.fromPrivateKey(privateKey) + await setBalance(vm, address, BigInt('200000000000001')) + const tx = createTx({ address, privateKey }) + await txPool.add(tx) + privateKey = keccak256(privateKey) + } - await (miner as any).queueNextAssembly(5) - await wait(5) - t.ok((miner as any).assembling, 'miner should be assembling') - config.events.emit(Event.CHAIN_UPDATED) - await wait(25) - t.notOk((miner as any).assembling, 'miner should have stopped assembling') - miner.stop() - txPool.stop() - }) + chain.putBlocks = () => { + assert.fail('should have stopped assembling when a new block was received') + } + await (miner as any).queueNextAssembly(5) + await wait(5) + assert.ok((miner as any).assembling, 'miner should be assembling') + config.events.emit(Event.CHAIN_UPDATED) + await wait(25) + assert.notOk((miner as any).assembling, 'miner should have stopped assembling') + miner.stop() + txPool.stop() + }, + { timeout: 120000 } + ) - t.test('should handle mining over the london hardfork block', async (t) => { - const customChainParams = { - hardforks: [ - { name: 'chainstart', block: 0 }, - { name: 'berlin', block: 2 }, - { name: 'london', block: 3 }, - ], - } - const common = Common.custom(customChainParams, { baseChain: CommonChain.Goerli }) - common.setHardforkBy({ blockNumber: 0 }) - const config = new Config({ - transports: [], - accountCache: 10000, - storageCache: 1000, - accounts, - mine: true, - common, - }) - const chain = await Chain.create({ config }) - await chain.open() - const service = new FullEthereumService({ - config, - chain, - }) - const miner = new Miner({ config, service, skipHardForkValidation: true }) + it( + 'should handle mining over the london hardfork block', + async () => { + const customChainParams = { + hardforks: [ + { name: 'chainstart', block: 0 }, + { name: 'berlin', block: 2 }, + { name: 'london', block: 3 }, + ], + } + const common = Common.custom(customChainParams, { baseChain: CommonChain.Goerli }) + common.setHardforkBy({ blockNumber: 0 }) + const config = new Config({ + transports: [], + accountCache: 10000, + storageCache: 1000, + accounts, + mine: true, + common, + }) + const chain = await Chain.create({ config }) + await chain.open() + const service = new FullEthereumService({ + config, + chain, + }) + const miner = new Miner({ config, service, skipHardForkValidation: true }) - const { vm } = service.execution - ;(vm.blockchain.consensus as CliqueConsensus).cliqueActiveSigners = () => [A.address] // stub - vm.blockchain.validateHeader = td.func() // stub - ;(miner as any).chainUpdated = async () => {} // stub - miner.start() - await wait(100) + const { vm } = service.execution + ;(vm.blockchain.consensus as CliqueConsensus).cliqueActiveSigners = () => [A.address] // stub + vm.blockchain.validateHeader = td.func() // stub + ;(miner as any).chainUpdated = async () => {} // stub + miner.start() + await wait(100) - // in this test we need to explicitly update common with - // setHardforkBy() to test the hardfork() value - // since the vmexecution run method isn't reached in this - // stubbed configuration. + // in this test we need to explicitly update common with + // setHardforkBy() to test the hardfork() value + // since the vmexecution run method isn't reached in this + // stubbed configuration. - // block 1: chainstart - await (miner as any).queueNextAssembly(0) - await wait(100) - config.execCommon.setHardforkBy({ blockNumber: 1 }) - t.equal(config.execCommon.hardfork(), Hardfork.Chainstart) + // block 1: chainstart + await (miner as any).queueNextAssembly(0) + await wait(100) + config.execCommon.setHardforkBy({ blockNumber: 1 }) + assert.equal(config.execCommon.hardfork(), Hardfork.Chainstart) - // block 2: berlin - await (miner as any).queueNextAssembly(0) - await wait(100) - config.execCommon.setHardforkBy({ blockNumber: 2 }) - t.equal(config.execCommon.hardfork(), Hardfork.Berlin) - const blockHeader2 = await chain.getCanonicalHeadHeader() + // block 2: berlin + await (miner as any).queueNextAssembly(0) + await wait(100) + config.execCommon.setHardforkBy({ blockNumber: 2 }) + assert.equal(config.execCommon.hardfork(), Hardfork.Berlin) + const blockHeader2 = await chain.getCanonicalHeadHeader() - // block 3: london - await (miner as any).queueNextAssembly(0) - await wait(100) - const blockHeader3 = await chain.getCanonicalHeadHeader() - config.execCommon.setHardforkBy({ blockNumber: 3 }) - t.equal(config.execCommon.hardfork(), Hardfork.London) - t.equal( - blockHeader2.gasLimit * BigInt(2), - blockHeader3.gasLimit, - 'gas limit should be double previous block' - ) - const initialBaseFee = config.execCommon.paramByEIP('gasConfig', 'initialBaseFee', 1559)! - t.equal(blockHeader3.baseFeePerGas!, initialBaseFee, 'baseFee should be initial value') - - // block 4 - await (miner as any).queueNextAssembly(0) - await wait(100) - const blockHeader4 = await chain.getCanonicalHeadHeader() - config.execCommon.setHardforkBy({ blockNumber: 4 }) - t.equal(config.execCommon.hardfork(), Hardfork.London) - t.equal( - blockHeader4.baseFeePerGas!, - blockHeader3.calcNextBaseFee(), - 'baseFee should be as calculated' - ) - t.ok((await chain.getCanonicalHeadHeader()).number === BigInt(4)) - miner.stop() - await chain.close() - }) + // block 3: london + await (miner as any).queueNextAssembly(0) + await wait(100) + const blockHeader3 = await chain.getCanonicalHeadHeader() + config.execCommon.setHardforkBy({ blockNumber: 3 }) + assert.equal(config.execCommon.hardfork(), Hardfork.London) + assert.equal( + blockHeader2.gasLimit * BigInt(2), + blockHeader3.gasLimit, + 'gas limit should be double previous block' + ) + const initialBaseFee = config.execCommon.paramByEIP('gasConfig', 'initialBaseFee', 1559)! + assert.equal(blockHeader3.baseFeePerGas!, initialBaseFee, 'baseFee should be initial value') - t.test('should handle mining ethash PoW', async (t) => { - const addr = A.address.toString().slice(2) - const consensusConfig = { ethash: true } - const defaultChainData = { - config: { - chainId: 123456, - homesteadBlock: 0, - eip150Block: 0, - eip150Hash: '0x0000000000000000000000000000000000000000000000000000000000000000', - eip155Block: 0, - eip158Block: 0, - byzantiumBlock: 0, - constantinopleBlock: 0, - petersburgBlock: 0, - istanbulBlock: 0, - berlinBlock: 0, - londonBlock: 0, - ...consensusConfig, - }, - nonce: '0x0', - timestamp: '0x614b3731', - gasLimit: '0x47b760', - difficulty: '0x1', - mixHash: '0x0000000000000000000000000000000000000000000000000000000000000000', - coinbase: '0x0000000000000000000000000000000000000000', - number: '0x0', - gasUsed: '0x0', - parentHash: '0x0000000000000000000000000000000000000000000000000000000000000000', - baseFeePerGas: 7, - } - const extraData = '0x' + '0'.repeat(32) - const chainData = { - ...defaultChainData, - extraData, - alloc: { [addr]: { balance: '0x10000000000000000000' } }, - } - const common = Common.fromGethGenesis(chainData, { chain: 'devnet', hardfork: Hardfork.London }) - ;(common as any)._chainParams['genesis'].difficulty = 1 - const config = new Config({ - transports: [], - accountCache: 10000, - storageCache: 1000, - accounts, - mine: true, - common, - }) - const chain = await Chain.create({ config }) - await chain.open() - const service = new FullEthereumService({ - config, - chain, - }) - const miner = new Miner({ config, service, skipHardForkValidation: true }) - ;(chain.blockchain as any)._validateConsensus = false - ;(miner as any).chainUpdated = async () => {} // stub - miner.start() - await wait(1000) - config.events.on(Event.CHAIN_UPDATED, async () => { - t.equal(chain.blocks.latest!.header.number, BigInt(1)) + // block 4 + await (miner as any).queueNextAssembly(0) + await wait(100) + const blockHeader4 = await chain.getCanonicalHeadHeader() + config.execCommon.setHardforkBy({ blockNumber: 4 }) + assert.equal(config.execCommon.hardfork(), Hardfork.London) + assert.equal( + blockHeader4.baseFeePerGas!, + blockHeader3.calcNextBaseFee(), + 'baseFee should be as calculated' + ) + assert.ok((await chain.getCanonicalHeadHeader()).number === BigInt(4)) miner.stop() await chain.close() - t.end() - }) - await (miner as any).queueNextAssembly(0) - await wait(10000) - }) + }, + { timeout: 10000 } + ) + + it( + 'should handle mining ethash PoW', + async () => { + const addr = A.address.toString().slice(2) + const consensusConfig = { ethash: true } + const defaultChainData = { + config: { + chainId: 123456, + homesteadBlock: 0, + eip150Block: 0, + eip150Hash: '0x0000000000000000000000000000000000000000000000000000000000000000', + eip155Block: 0, + eip158Block: 0, + byzantiumBlock: 0, + constantinopleBlock: 0, + petersburgBlock: 0, + istanbulBlock: 0, + berlinBlock: 0, + londonBlock: 0, + ...consensusConfig, + }, + nonce: '0x0', + timestamp: '0x614b3731', + gasLimit: '0x47b760', + difficulty: '0x1', + mixHash: '0x0000000000000000000000000000000000000000000000000000000000000000', + coinbase: '0x0000000000000000000000000000000000000000', + number: '0x0', + gasUsed: '0x0', + parentHash: '0x0000000000000000000000000000000000000000000000000000000000000000', + baseFeePerGas: 7, + } + const extraData = '0x' + '0'.repeat(32) + const chainData = { + ...defaultChainData, + extraData, + alloc: { [addr]: { balance: '0x10000000000000000000' } }, + } + const common = Common.fromGethGenesis(chainData, { + chain: 'devnet', + hardfork: Hardfork.London, + }) + ;(common as any)._chainParams['genesis'].difficulty = 1 + ;(common as any)._chainParams['genesis'].difficulty = 1 + const config = new Config({ + transports: [], + accountCache: 10000, + storageCache: 1000, + accounts, + mine: true, + common, + }) + const chain = await Chain.create({ config }) + await chain.open() + const service = new FullEthereumService({ + config, + chain, + }) + const miner = new Miner({ config, service, skipHardForkValidation: true }) + ;(chain.blockchain as any)._validateConsensus = false + ;(miner as any).chainUpdated = async () => {} // stub + miner.start() + await wait(1000) + config.events.on(Event.CHAIN_UPDATED, async () => { + assert.equal(chain.blocks.latest!.header.number, BigInt(1)) + miner.stop() + await chain.close() + }) + await (miner as any).queueNextAssembly(0) + await wait(10000) + }, + { timeout: 120000 } + ) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() // according to https://github.com/testdouble/testdouble.js/issues/379#issuecomment-415868424 // mocking indirect dependencies is not properly supported, but it works for us in this file, // so we will replace the original functions to avoid issues in other tests that come after ;(BlockHeader as any).prototype._consensusFormatValidation = originalValidate DefaultStateManager.prototype.setStateRoot = ogStateManagerSetStateRoot - t.end() }) }) diff --git a/packages/client/test/miner/pendingBlock.spec.ts b/packages/client/test/miner/pendingBlock.spec.ts index 4c8de9b603..6f47980e4f 100644 --- a/packages/client/test/miner/pendingBlock.spec.ts +++ b/packages/client/test/miner/pendingBlock.spec.ts @@ -21,10 +21,10 @@ import { } from '@ethereumjs/util' import { VM } from '@ethereumjs/vm' import * as kzg from 'c-kzg' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' -import * as gethGenesis from '../../../block/test/testdata/4844-hardfork.json' +import gethGenesis from '../../../block/test/testdata/4844-hardfork.json' import { Config } from '../../src/config' import { getLogger } from '../../src/logging' import { PendingBlock } from '../../src/miner' @@ -89,9 +89,9 @@ const setup = () => { return { txPool } } -tape('[PendingBlock]', async (t) => { - const originalValidate = (BlockHeader as any).prototype._consensusFormatValidation - ;(BlockHeader as any).prototype._consensusFormatValidation = td.func() +describe('[PendingBlock]', async () => { + const originalValidate = BlockHeader.prototype['_consensusFormatValidation'] + BlockHeader.prototype['_consensusFormatValidation'] = td.func() td.replace('@ethereumjs/block', { BlockHeader }) const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot @@ -123,7 +123,7 @@ tape('[PendingBlock]', async (t) => { const txB01 = createTx(B, A, 0, 1, 2500000000) // B -> A, nonce: 0, value: 1, 2.5x gasPrice const txB011 = createTx(B, A, 0, 1, 2500000000) // B -> A, nonce: 0, value: 1, 2.5x gasPrice - t.test('should start and build', async (t) => { + it('should start and build', async () => { const { txPool } = setup() const vm = await VM.create({ common }) await setBalance(vm, A.address, BigInt(5000000000000000)) @@ -134,20 +134,23 @@ tape('[PendingBlock]', async (t) => { const pendingBlock = new PendingBlock({ config, txPool, skipHardForkValidation: true }) const parentBlock = await vm.blockchain.getCanonicalHeadBlock!() const payloadId = await pendingBlock.start(vm, parentBlock) - t.equal(pendingBlock.pendingPayloads.size, 1, 'should set the pending payload') + assert.equal(pendingBlock.pendingPayloads.size, 1, 'should set the pending payload') await txPool.add(txB01) const built = await pendingBlock.build(payloadId) - if (!built) return t.fail('pendingBlock did not return') + if (!built) return assert.fail('pendingBlock did not return') const [block, receipts] = built - t.equal(block?.header.number, BigInt(1), 'should have built block number 1') - t.equal(block?.transactions.length, 3, 'should include txs from pool') - t.equal(receipts.length, 3, 'receipts should match number of transactions') + assert.equal(block?.header.number, BigInt(1), 'should have built block number 1') + assert.equal(block?.transactions.length, 3, 'should include txs from pool') + assert.equal(receipts.length, 3, 'receipts should match number of transactions') pendingBlock.pruneSetToMax(0) - t.equal(pendingBlock.pendingPayloads.size, 0, 'should reset the pending payload after build') - t.end() + assert.equal( + pendingBlock.pendingPayloads.size, + 0, + 'should reset the pending payload after build' + ) }) - t.test('should include txs with mismatching hardforks that can still be executed', async (t) => { + it('should include txs with mismatching hardforks that can still be executed', async () => { const { txPool } = setup() const vm = await VM.create({ common }) await setBalance(vm, A.address, BigInt(5000000000000000)) @@ -155,14 +158,14 @@ tape('[PendingBlock]', async (t) => { txA011.common.setHardfork(Hardfork.Paris) await txPool.add(txA011) - t.equal(txPool.txsInPool, 1, '1 txA011 should be added') + assert.equal(txPool.txsInPool, 1, '1 txA011 should be added') // skip hardfork validation for ease const pendingBlock = new PendingBlock({ config, txPool }) const parentBlock = await vm.blockchain.getCanonicalHeadBlock!() const payloadId = await pendingBlock.start(vm, parentBlock) - t.equal(pendingBlock.pendingPayloads.size, 1, 'should set the pending payload') + assert.equal(pendingBlock.pendingPayloads.size, 1, 'should set the pending payload') const payload = pendingBlock.pendingPayloads.get(bytesToHex(payloadId)) - t.equal( + assert.equal( (payload as any).transactions.filter( (tx: TypedTransaction) => bytesToHex(tx.hash()) === bytesToHex(txA011.hash()) ).length, @@ -172,13 +175,13 @@ tape('[PendingBlock]', async (t) => { txB011.common.setHardfork(Hardfork.Paris) await txPool.add(txB011) - t.equal(txPool.txsInPool, 2, '1 txB011 should be added') + assert.equal(txPool.txsInPool, 2, '1 txB011 should be added') const built = await pendingBlock.build(payloadId) - if (!built) return t.fail('pendingBlock did not return') + if (!built) return assert.fail('pendingBlock did not return') const [block] = built - t.equal(block?.header.number, BigInt(1), 'should have built block number 1') - t.equal(block?.transactions.length, 2, 'should include txs from pool') - t.equal( + assert.equal(block?.header.number, BigInt(1), 'should have built block number 1') + assert.equal(block?.transactions.length, 2, 'should include txs from pool') + assert.equal( (payload as any).transactions.filter( (tx: TypedTransaction) => bytesToHex(tx.hash()) === bytesToHex(txB011.hash()) ).length, @@ -186,11 +189,14 @@ tape('[PendingBlock]', async (t) => { 'txB011 should be in block' ) pendingBlock.pruneSetToMax(0) - t.equal(pendingBlock.pendingPayloads.size, 0, 'should reset the pending payload after build') - t.end() + assert.equal( + pendingBlock.pendingPayloads.size, + 0, + 'should reset the pending payload after build' + ) }) - t.test('should start and stop', async (t) => { + it('should start and stop', async () => { const { txPool } = setup() await txPool.add(txA01) const pendingBlock = new PendingBlock({ config, txPool, skipHardForkValidation: true }) @@ -198,13 +204,16 @@ tape('[PendingBlock]', async (t) => { await setBalance(vm, A.address, BigInt(5000000000000000)) const parentBlock = await vm.blockchain.getCanonicalHeadBlock!() const payloadId = await pendingBlock.start(vm, parentBlock) - t.equal(pendingBlock.pendingPayloads.size, 1, 'should set the pending payload') + assert.equal(pendingBlock.pendingPayloads.size, 1, 'should set the pending payload') pendingBlock.stop(payloadId) - t.equal(pendingBlock.pendingPayloads.size, 0, 'should reset the pending payload after stopping') - t.end() + assert.equal( + pendingBlock.pendingPayloads.size, + 0, + 'should reset the pending payload after stopping' + ) }) - t.test('should stop adding txs when block is full', async (t) => { + it('should stop adding txs when block is full', async () => { const { txPool } = setup() // set gas limit low so that can accomodate 2 txs @@ -236,24 +245,31 @@ tape('[PendingBlock]', async (t) => { await setBalance(vm, A.address, BigInt(5000000000000000)) const parentBlock = await vm.blockchain.getCanonicalHeadBlock!() const payloadId = await pendingBlock.start(vm, parentBlock) - t.equal(pendingBlock.pendingPayloads.size, 1, 'should set the pending payload') + assert.equal(pendingBlock.pendingPayloads.size, 1, 'should set the pending payload') // Add a tx to const built = await pendingBlock.build(payloadId) - if (!built) return t.fail('pendingBlock did not return') + if (!built) return assert.fail('pendingBlock did not return') const [block, receipts] = built - t.equal(block?.header.number, BigInt(1), 'should have built block number 1') - t.equal(block?.transactions.length, 2, 'should include txs from pool that fit in the block') - t.equal(receipts.length, 2, 'receipts should match number of transactions') + assert.equal(block?.header.number, BigInt(1), 'should have built block number 1') + assert.equal( + block?.transactions.length, + 2, + 'should include txs from pool that fit in the block' + ) + assert.equal(receipts.length, 2, 'receipts should match number of transactions') pendingBlock.pruneSetToMax(0) - t.equal(pendingBlock.pendingPayloads.size, 0, 'should reset the pending payload after build') + assert.equal( + pendingBlock.pendingPayloads.size, + 0, + 'should reset the pending payload after build' + ) // reset gas Limit common['_chainParams'].genesis.gasLimit = prevGasLimit - t.end() }) - t.test('should skip adding txs when tx too big to fit', async (t) => { + it('should skip adding txs when tx too big to fit', async () => { const { txPool } = setup() const vm = await VM.create({ common }) await setBalance(vm, A.address, BigInt(5000000000000000)) @@ -273,57 +289,67 @@ tape('[PendingBlock]', async (t) => { await setBalance(vm, A.address, BigInt(5000000000000000)) const parentBlock = await vm.blockchain.getCanonicalHeadBlock!() const payloadId = await pendingBlock.start(vm, parentBlock) - t.equal(pendingBlock.pendingPayloads.size, 1, 'should set the pending payload') + assert.equal(pendingBlock.pendingPayloads.size, 1, 'should set the pending payload') const built = await pendingBlock.build(payloadId) - if (!built) return t.fail('pendingBlock did not return') + if (!built) return assert.fail('pendingBlock did not return') const [block, receipts] = built - t.equal(block?.header.number, BigInt(1), 'should have built block number 1') - t.equal(block?.transactions.length, 2, 'should include txs from pool that fit in the block') - t.equal(receipts.length, 2, 'receipts should match number of transactions') + assert.equal(block?.header.number, BigInt(1), 'should have built block number 1') + assert.equal( + block?.transactions.length, + 2, + 'should include txs from pool that fit in the block' + ) + assert.equal(receipts.length, 2, 'receipts should match number of transactions') pendingBlock.pruneSetToMax(0) - t.equal(pendingBlock.pendingPayloads.size, 0, 'should reset the pending payload after build') - t.end() + assert.equal( + pendingBlock.pendingPayloads.size, + 0, + 'should reset the pending payload after build' + ) }) - t.test('should not add tx that errors (sender with insufficient funds)', async (t) => { + it('should not add tx that errors (sender with insufficient funds)', async () => { const { txPool } = setup() await txPool.add(txA01) const pendingBlock = new PendingBlock({ config, txPool, skipHardForkValidation: true }) const vm = await VM.create({ common }) const parentBlock = await vm.blockchain.getCanonicalHeadBlock!() const payloadId = await pendingBlock.start(vm, parentBlock) - t.equal(pendingBlock.pendingPayloads.size, 1, 'should set the pending payload') + assert.equal(pendingBlock.pendingPayloads.size, 1, 'should set the pending payload') const built = await pendingBlock.build(payloadId) - if (!built) return t.fail('pendingBlock did not return') + if (!built) return assert.fail('pendingBlock did not return') const [block, receipts] = built - t.equal(block?.header.number, BigInt(1), 'should have built block number 1') - t.equal( + assert.equal(block?.header.number, BigInt(1), 'should have built block number 1') + assert.equal( block.transactions.length, 0, 'should not include tx with sender that has insufficient funds' ) - t.equal(receipts.length, 0, 'receipts should match number of transactions') + assert.equal(receipts.length, 0, 'receipts should match number of transactions') pendingBlock.pruneSetToMax(0) - t.equal(pendingBlock.pendingPayloads.size, 0, 'should reset the pending payload after build') - t.end() + assert.equal( + pendingBlock.pendingPayloads.size, + 0, + 'should reset the pending payload after build' + ) }) - t.test('should throw when blockchain does not have getTotalDifficulty function', async (st) => { + it('should throw when blockchain does not have getTotalDifficulty function', async () => { const { txPool } = setup() const pendingBlock = new PendingBlock({ config, txPool, skipHardForkValidation: true }) const vm = (txPool as any).vm try { await pendingBlock.start(vm, new Block()) - st.fail('should have thrown') + assert.fail('should have thrown') } catch (err: any) { - st.equal( + assert.equal( err.message, 'cannot get iterator head: blockchain has no getTotalDifficulty function' ) } }) - t.test('construct blob bundles', async (st) => { + it('construct blob bundles', async () => { try { initKZG(kzg, __dirname + '/../../src/trustedSetups/devnet6.txt') // eslint-disable-next-line @@ -372,7 +398,7 @@ tape('[PendingBlock]', async (t) => { ).sign(A.privateKey) await txPool.add(txNorm) - st.equal(txPool.txsInPool, 4, '4 txs should still be in the pool') + assert.equal(txPool.txsInPool, 4, '4 txs should still be in the pool') const pendingBlock = new PendingBlock({ config, txPool }) const vm = await VM.create({ common }) @@ -385,20 +411,19 @@ tape('[PendingBlock]', async (t) => { const payloadId = await pendingBlock.start(vm, parentBlock) const [block, _receipts, _value, blobsBundles] = (await pendingBlock.build(payloadId)) ?? [] - st.ok(block !== undefined && blobsBundles !== undefined) - st.equal(block!.transactions.length, 2, 'Only two blob txs should be included') - st.equal(blobsBundles!.blobs.length, 6, 'maximum 6 blobs should be included') - st.equal(blobsBundles!.commitments.length, 6, 'maximum 6 commitments should be included') - st.equal(blobsBundles!.proofs.length, 6, 'maximum 6 proofs should be included') + assert.ok(block !== undefined && blobsBundles !== undefined) + assert.equal(block!.transactions.length, 2, 'Only two blob txs should be included') + assert.equal(blobsBundles!.blobs.length, 6, 'maximum 6 blobs should be included') + assert.equal(blobsBundles!.commitments.length, 6, 'maximum 6 commitments should be included') + assert.equal(blobsBundles!.proofs.length, 6, 'maximum 6 proofs should be included') const pendingBlob = blobsBundles!.blobs[0] - st.ok(pendingBlob !== undefined && equalsBytes(pendingBlob, blobs[0])) + assert.ok(pendingBlob !== undefined && equalsBytes(pendingBlob, blobs[0])) const blobProof = blobsBundles!.proofs[0] - st.ok(blobProof !== undefined && equalsBytes(blobProof, proofs[0])) - st.end() + assert.ok(blobProof !== undefined && equalsBytes(blobProof, proofs[0])) }) - t.test('should exclude missingBlobTx', async (st) => { + it('should exclude missingBlobTx', async () => { try { initKZG(kzg, __dirname + '/../../src/trustedSetups/devnet6.txt') // eslint-disable-next-line @@ -432,7 +457,7 @@ tape('[PendingBlock]', async (t) => { ).sign(A.privateKey) await txPool.add(missingBlobTx) - st.equal(txPool.txsInPool, 1, '1 txs should still be in the pool') + assert.equal(txPool.txsInPool, 1, '1 txs should still be in the pool') const pendingBlock = new PendingBlock({ config, txPool }) const vm = await VM.create({ common }) @@ -445,19 +470,16 @@ tape('[PendingBlock]', async (t) => { const payloadId = await pendingBlock.start(vm, parentBlock) const [block, _receipts, _value, blobsBundles] = (await pendingBlock.build(payloadId)) ?? [] - st.ok(block !== undefined && blobsBundles !== undefined) - st.equal(block!.transactions.length, 0, 'Missing blob tx should not be included') - st.end() + assert.ok(block !== undefined && blobsBundles !== undefined) + assert.equal(block!.transactions.length, 0, 'Missing blob tx should not be included') }) - t.test('should reset td', (st) => { + it('should reset td', () => { td.reset() // according to https://github.com/testdouble/testdouble.js/issues/379#issuecomment-415868424 // mocking indirect dependencies is not properly supported, but it works for us in this file, // so we will replace the original functions to avoid issues in other tests that come after ;(BlockHeader as any).prototype._consensusFormatValidation = originalValidate DefaultStateManager.prototype.setStateRoot = originalSetStateRoot - - st.end() }) }) diff --git a/packages/client/test/net/peer/peer.spec.ts b/packages/client/test/net/peer/peer.spec.ts index 9030235986..1963b3e2e8 100644 --- a/packages/client/test/net/peer/peer.spec.ts +++ b/packages/client/test/net/peer/peer.spec.ts @@ -1,12 +1,12 @@ import { EventEmitter } from 'events' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Config } from '../../../src/config' import { Peer } from '../../../src/net/peer' import { Event } from '../../../src/types' -tape('[Peer]', (t) => { +describe('[Peer]', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const peer = new Peer({ config, @@ -16,52 +16,48 @@ tape('[Peer]', (t) => { inbound: true, }) - t.test('should get/set idle state', (t) => { - t.ok(peer.idle, 'is initially idle') + it('should get/set idle state', () => { + assert.ok(peer.idle, 'is initially idle') peer.idle = false - t.notOk(peer.idle, 'idle set to false') - t.end() + assert.notOk(peer.idle, 'idle set to false') }) - t.test('should bind protocol', async (t) => { + it('should bind protocol', async () => { const bound = new EventEmitter() as any const sender = 'sender' as any const protocol = td.object('Protocol') as any bound.name = 'bound0' protocol.name = 'proto0' - t.plan(3) td.when(protocol.bind(peer, sender)).thenResolve(bound) await (peer as any).bindProtocol(protocol, sender) - t.equals(peer.bound.get('bound0'), bound, 'protocol bound') + assert.equal(peer.bound.get('bound0'), bound, 'protocol bound') config.events.on(Event.PROTOCOL_MESSAGE, (msg, name, msgPeer) => { - t.ok(msg === 'msg0' && name === 'proto0' && msgPeer === peer, 'on message') + assert.ok(msg === 'msg0' && name === 'proto0' && msgPeer === peer, 'on message') }) config.events.on(Event.PEER_ERROR, (err) => { - if (err.message === 'err0') t.pass('on error') + if (err.message === 'err0') assert.ok(true, 'on error') }) config.events.emit(Event.PROTOCOL_MESSAGE, 'msg0', 'proto0', peer) config.events.emit(Event.PEER_ERROR, new Error('err0'), peer) }) - t.test('should understand protocols', (t) => { - t.ok(peer.understands('bound0'), 'understands bound protocol') - t.notOk(peer.understands('unknown'), 'does not understand unknown protocol') - t.end() + it('should understand protocols', () => { + assert.ok(peer.understands('bound0'), 'understands bound protocol') + assert.notOk(peer.understands('unknown'), 'does not understand unknown protocol') }) - t.test('should convert to string', (t) => { - t.equals( + it('should convert to string', () => { + assert.equal( peer.toString(true), 'id=0123456789abcdef address=address0 transport=transport0 protocols=bound0 inbound=true', 'correct full id string' ) peer.inbound = false - t.equals( + assert.equal( peer.toString(), 'id=01234567 address=address0 transport=transport0 protocols=bound0 inbound=false', 'correct short id string' ) - t.end() }) }) diff --git a/packages/client/test/net/peer/rlpxpeer.spec.ts b/packages/client/test/net/peer/rlpxpeer.spec.ts index 651121f4ad..089fd5de3d 100644 --- a/packages/client/test/net/peer/rlpxpeer.spec.ts +++ b/packages/client/test/net/peer/rlpxpeer.spec.ts @@ -1,21 +1,16 @@ +import { RLPx } from '@ethereumjs/devp2p' import { EventEmitter } from 'events' -import * as tape from 'tape' -import * as td from 'testdouble' +import { assert, describe, expect, it, vi } from 'vitest' import { Config } from '../../../src/config' -import { RlpxSender } from '../../../src/net/protocol/rlpxsender' import { Event } from '../../../src/types' -tape('[RlpxPeer]', async (t) => { - const { DPT, ETH, LES, SNAP } = await import('@ethereumjs/devp2p') - class RLPx extends EventEmitter { - connect(_: any) {} - } - RLPx.prototype.connect = td.func() - td.replace('@ethereumjs/devp2p', { DPT, ETH, LES, SNAP, RLPx }) +describe('[RlpxPeer]', async () => { + vi.mock('@ethereumjs/devp2p') + const { RlpxPeer } = await import('../../../src/net/peer/rlpxpeer') - t.test('should initialize correctly', async (t) => { + it('should initialize correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const peer = new RlpxPeer({ config, @@ -23,12 +18,11 @@ tape('[RlpxPeer]', async (t) => { host: '10.0.0.1', port: 1234, }) - t.equals(peer.address, '10.0.0.1:1234', 'address correct') - t.notOk(peer.connected, 'not connected') - t.end() + assert.equal(peer.address, '10.0.0.1:1234', 'address correct') + assert.notOk(peer.connected, 'not connected') }) - t.test('should compute capabilities', (t) => { + it('should compute capabilities', () => { const protocols: any = [ { name: 'eth', versions: [66] }, { name: 'les', versions: [4] }, @@ -39,7 +33,7 @@ tape('[RlpxPeer]', async (t) => { version, length, })) - t.deepEquals( + assert.deepEqual( caps, [ { name: 'eth', version: 66, length: 17 }, @@ -48,10 +42,9 @@ tape('[RlpxPeer]', async (t) => { ], 'correct capabilities' ) - t.end() }) - t.test('should connect to peer', async (t) => { + it('should connect to peer', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const proto0 = { name: 'les', versions: [4] } as any const peer = new RlpxPeer({ @@ -61,82 +54,76 @@ tape('[RlpxPeer]', async (t) => { host: '10.0.0.1', port: 1234, }) - proto0.open = td.func() - td.when(proto0.open()).thenResolve(null) + proto0.open = vi.fn().mockResolvedValue(null) await peer.connect() - t.ok('connected successfully') - td.verify(RLPx.prototype.connect(td.matchers.anything())) - t.end() + assert.ok('connected successfully') + expect(RLPx.prototype.connect).toBeCalled() }) - t.test('should handle peer events', async (t) => { - t.plan(5) + it('should handle peer events', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const peer = new RlpxPeer({ config, id: 'abcdef0123', host: '10.0.0.1', port: 1234 }) - const rlpxPeer = { id: 'zyx321', getDisconnectPrefix: td.func() } as any - ;(peer as any).bindProtocols = td.func() + const rlpxPeer = { id: 'zyx321', getDisconnectPrefix: vi.fn() } as any + ;(peer as any).bindProtocols = vi.fn().mockResolvedValue(undefined) peer.rlpxPeer = rlpxPeer - td.when((peer as any).bindProtocols(rlpxPeer)).thenResolve(undefined) - td.when(rlpxPeer.getDisconnectPrefix('reason')).thenReturn('reason') + + rlpxPeer.getDisconnectPrefix = vi.fn().mockImplementation((param: any) => { + if (param === 'reason') { + return 'reason' + } + }) await peer.connect() config.events.on(Event.PEER_ERROR, (error) => { - if (error.message === 'err0') t.pass('got err0') + if (error.message === 'err0') assert.ok(true, 'got err0') }) peer.config.events.on(Event.PEER_CONNECTED, (peer) => - t.equals(peer.id, 'abcdef0123', 'got connected') + assert.equal(peer.id, 'abcdef0123', 'got connected') ) peer.config.events.on(Event.PEER_DISCONNECTED, (rlpxPeer) => - t.equals(rlpxPeer.pooled, false, 'got disconnected') + assert.equal(rlpxPeer.pooled, false, 'got disconnected') ) peer.rlpx!.emit('peer:error', rlpxPeer, new Error('err0')) peer.rlpx!.emit('peer:added', rlpxPeer) peer.rlpx!.emit('peer:removed', rlpxPeer, 'reason') - ;(peer as any).bindProtocols = td.func() + ;(peer as any).bindProtocols = vi.fn().mockRejectedValue(new Error('err1')) + rlpxPeer.getDisconnectPrefix = vi.fn().mockImplementation((param: string) => { + if (param === 'reason') throw new Error('err2') + }) + peer.rlpxPeer = rlpxPeer await peer.connect() - td.when((peer as any).bindProtocols(rlpxPeer)).thenReject(new Error('err1')) - td.when(rlpxPeer.getDisconnectPrefix('reason')).thenThrow(new Error('err2')) + peer.config.events.on(Event.PEER_ERROR, (err) => { - if (err.message === 'err1') t.pass('got err1') - if (err.message === 'err2') t.pass('got err2') + if (err.message === 'err1') assert.ok(true, 'got err1') + if (err.message === 'err2') assert.ok(true, 'got err2') }) peer.rlpx!.emit('peer:added', rlpxPeer) peer.rlpx!.emit('peer:removed', rlpxPeer, 'reason') }) - t.test('should accept peer connection', async (t) => { + it('should accept peer connection', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const peer: any = new RlpxPeer({ config, id: 'abcdef0123', host: '10.0.0.1', port: 1234 }) - peer.bindProtocols = td.func() - td.when(peer.bindProtocols('rlpxpeer' as any)).thenResolve(null) + peer.bindProtocols = vi.fn().mockResolvedValue(null) + await peer.accept('rlpxpeer' as any, 'server') - t.equals(peer.server, 'server', 'server set') - t.end() + assert.equal(peer.server, 'server', 'server set') }) - t.test('should bind protocols', async (t) => { + it('should bind protocols', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const protocols = [{ name: 'proto0' }] as any const peer = new RlpxPeer({ config, id: 'abcdef0123', protocols, host: '10.0.0.1', port: 1234 }) const proto0 = new (class Proto0 extends EventEmitter {})() - const rlpxPeer = { getProtocols: td.func() } as any - ;(peer as any).bindProtocol = td.func() - const bindProtocolThen = td.func() - td.when((peer as any).bindProtocol(td.matchers.anything(), td.matchers.anything())).thenReturn({ - then: bindProtocolThen, - }) - td.when(rlpxPeer.getProtocols()).thenReturn([proto0]) + const rlpxPeer = { + getProtocols: vi.fn().mockReturnValue([proto0]), + } as any + peer['bindProtocol'] = vi.fn().mockResolvedValue(undefined) await (peer as any).bindProtocols(rlpxPeer) - td.verify((peer as any).bindProtocol({ name: 'proto0' } as any, td.matchers.isA(RlpxSender))) - t.ok(peer.connected, 'connected set to true') - t.end() - }) - - t.test('should reset td', (t) => { - td.reset() - t.end() + expect((peer as any).bindProtocol).toBeCalled() + assert.ok(peer.connected, 'connected set to true') }) }) diff --git a/packages/client/test/net/peerpool.spec.ts b/packages/client/test/net/peerpool.spec.ts index c7d488864c..ed2928c0c8 100644 --- a/packages/client/test/net/peerpool.spec.ts +++ b/packages/client/test/net/peerpool.spec.ts @@ -1,6 +1,6 @@ import { EventEmitter } from 'events' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Config } from '../../src/config' import { Event } from '../../src/types' @@ -8,22 +8,20 @@ import { MockPeer } from '../integration/mocks/mockpeer' import type { RlpxServer } from '../../src/net/server' -tape('[PeerPool]', async (t) => { +describe('[PeerPool]', async () => { const Peer = td.replace('../../src/net/peer/peer', function (this: any, id: string) { this.id = id // eslint-disable-line no-invalid-this }) const { PeerPool } = await import('../../src/net/peerpool') - t.test('should initialize', (t) => { + it('should initialize', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool({ config }) - t.notOk((pool as any).pool.size, 'empty pool') - t.notOk((pool as any).opened, 'not open') - t.end() + assert.notOk((pool as any).pool.size, 'empty pool') + assert.notOk((pool as any).opened, 'not open') }) - t.test('should open/close', async (t) => { - t.plan(3) + it('should open/close', async () => { const server = {} const config = new Config({ servers: [server as RlpxServer], @@ -40,21 +38,19 @@ tape('[PeerPool]', async (t) => { }) await pool.open() config.events.on(Event.PEER_CONNECTED, (peer) => { - if (pool.contains(peer.id)) t.pass('peer connected') + if (pool.contains(peer.id)) assert.ok(true, 'peer connected') }) config.events.on(Event.POOL_PEER_REMOVED, () => { - if (!pool.contains('peer')) t.pass('peer disconnected') + if (!pool.contains('peer')) assert.ok(true, 'peer disconnected') }) pool.add(peer) pool.remove(peer) - t.equals(await pool.open(), false, 'already opened') + assert.equal(await pool.open(), false, 'already opened') await pool.close() - t.notOk((pool as any).opened, 'closed') - t.end() + assert.notOk((pool as any).opened, 'closed') }) - t.test('should connect/disconnect peer', (t) => { - t.plan(2) + it('should connect/disconnect peer', () => { const peer = new EventEmitter() as any const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool({ config }) @@ -62,24 +58,23 @@ tape('[PeerPool]', async (t) => { ;(peer as any).handleMessageQueue = td.func() ;(pool as any).connected(peer) pool.config.events.on(Event.PROTOCOL_MESSAGE, (msg: any, proto: any, p: any) => { - t.ok(msg === 'msg0' && proto === 'proto0' && p === peer, 'got message') + assert.ok(msg === 'msg0' && proto === 'proto0' && p === peer, 'got message') }) config.events.emit(Event.PROTOCOL_MESSAGE, 'msg0', 'proto0', peer) pool.config.events.emit(Event.PEER_ERROR, new Error('err0'), peer) ;(pool as any).disconnected(peer) - t.notOk((pool as any).pool.get('abc'), 'peer removed') + assert.notOk((pool as any).pool.get('abc'), 'peer removed') }) - t.test('should check contains', (t) => { + it('should check contains', () => { const peer = new Peer('abc') const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool({ config }) pool.add(peer) - t.ok(pool.contains(peer.id), 'found peer') - t.end() + assert.ok(pool.contains(peer.id), 'found peer') }) - t.test('should get idle peers', (t) => { + it('should get idle peers', () => { const peers = [new Peer(1), new Peer(2), new Peer(3)] const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool({ config }) @@ -87,16 +82,15 @@ tape('[PeerPool]', async (t) => { for (const p of peers) { pool.add(p) } - t.equals(pool.idle(), peers[1], 'correct idle peer') - t.equals( + assert.equal(pool.idle(), peers[1], 'correct idle peer') + assert.equal( pool.idle((p: any) => p.id > 1), peers[1], 'correct idle peer with filter' ) - t.end() }) - t.test('should ban peer', (t) => { + it('should ban peer', () => { const peers = [{ id: 1 }, { id: 2, server: { ban: td.func() } }] const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool({ config }) @@ -104,16 +98,16 @@ tape('[PeerPool]', async (t) => { pool.add(p) pool.ban(p, 1000) } - pool.config.events.on(Event.POOL_PEER_BANNED, (peer) => t.equals(peer, peers[1], 'banned peer')) + pool.config.events.on(Event.POOL_PEER_BANNED, (peer) => + assert.equal(peer, peers[1], 'banned peer') + ) pool.config.events.on(Event.POOL_PEER_REMOVED, (peer) => - t.equals(peer, peers[1], 'removed peer') + assert.equal(peer, peers[1], 'removed peer') ) - t.equals(pool.peers[0], peers[0], 'outbound peer not banned') - t.end() + assert.equal(pool.peers[0], peers[0], 'outbound peer not banned') }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/net/protocol/boundprotocol.spec.ts b/packages/client/test/net/protocol/boundprotocol.spec.ts index a52fb9d0ed..ff4865313c 100644 --- a/packages/client/test/net/protocol/boundprotocol.spec.ts +++ b/packages/client/test/net/protocol/boundprotocol.spec.ts @@ -1,14 +1,14 @@ /// import { EventEmitter } from 'events' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Config } from '../../../src/config' import { BoundProtocol } from '../../../src/net/protocol' import { Sender } from '../../../src/net/protocol/sender' import { Event } from '../../../src/types' -tape('[BoundProtocol]', (t) => { +describe('[BoundProtocol]', () => { const peer = td.object('Peer') as any const protocol = td.object('Protocol') as any const testMessage = { @@ -27,7 +27,7 @@ tape('[BoundProtocol]', (t) => { protocol.timeout = 100 protocol.messages = [testMessage, testResponse] - t.test('should add methods for messages with a response', (t) => { + it('should add methods for messages with a response', () => { const sender = new Sender() const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const bound = new BoundProtocol({ @@ -36,11 +36,10 @@ tape('[BoundProtocol]', (t) => { peer, sender, }) - t.ok(/this.request/.test((bound as any).testMessage.toString()), 'added testMessage') - t.end() + assert.ok(/this.request/.test((bound as any).testMessage.toString()), 'added testMessage') }) - t.test('should get/set status', (t) => { + it('should get/set status', () => { const sender = new Sender() const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const bound = new BoundProtocol({ @@ -49,13 +48,12 @@ tape('[BoundProtocol]', (t) => { peer, sender, }) - t.deepEquals(bound.status, {}, 'empty status') + assert.deepEqual(bound.status, {}, 'empty status') bound.status = { id: 1 } - t.deepEquals(bound.status, { id: 1 }, 'status set') - t.end() + assert.deepEqual(bound.status, { id: 1 }, 'status set') }) - t.test('should do handshake', async (t) => { + it('should do handshake', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const sender = new EventEmitter() as Sender const bound = new BoundProtocol({ @@ -66,11 +64,10 @@ tape('[BoundProtocol]', (t) => { }) td.when(protocol.handshake(td.matchers.isA(EventEmitter))).thenResolve({ id: 1 }) await bound.handshake(sender) - t.deepEquals(bound.status, { id: 1 }, 'handshake success') - t.end() + assert.deepEqual(bound.status, { id: 1 }, 'handshake success') }) - t.test('should handle incoming without resolver', async (t) => { + it('should handle incoming without resolver', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const sender = new Sender() const bound = new BoundProtocol({ @@ -80,19 +77,18 @@ tape('[BoundProtocol]', (t) => { sender, }) bound.config.events.once(Event.PROTOCOL_ERROR, (err) => { - t.ok(/error0/.test(err.message), 'decode error') + assert.ok(/error0/.test(err.message), 'decode error') }) td.when(protocol.decode(testMessage, '1')).thenThrow(new Error('error0')) ;(bound as any).handle({ name: 'TestMessage', code: 0x01, payload: '1' }) bound.config.events.once(Event.PROTOCOL_MESSAGE, (message) => { - t.deepEquals(message, { name: 'TestMessage', data: 2 }, 'correct message') + assert.deepEqual(message, { name: 'TestMessage', data: 2 }, 'correct message') }) td.when(protocol.decode(testMessage, '2')).thenReturn(2) ;(bound as any).handle({ name: 'TestMessage', code: 0x01, payload: '2' }) - t.end() }) - t.test('should perform send', (t) => { + it('should perform send', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const sender = new Sender() sender.sendMessage = td.func() @@ -103,13 +99,12 @@ tape('[BoundProtocol]', (t) => { sender, }) td.when(protocol.encode(testMessage, 3)).thenReturn('3') - t.deepEquals(bound.send('TestMessage', 3), testMessage, 'message returned') + assert.deepEqual(bound.send('TestMessage', 3), testMessage, 'message returned') td.verify(sender.sendMessage(0x01, '3' as any)) - t.throws(() => bound.send('UnknownMessage'), /Unknown message/, 'unknown message') - t.end() + assert.throws(() => bound.send('UnknownMessage'), /Unknown message/, 'unknown message') }) - t.test('should perform request', async (t) => { + it('should perform request', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const sender = new Sender() const bound = new BoundProtocol({ @@ -127,17 +122,16 @@ tape('[BoundProtocol]', (t) => { }, 100) }) const response = await (bound as any).testMessage(1) - t.equals(response, 2, 'got response') + assert.equal(response, 2, 'got response') td.when(protocol.decode(testResponse, '2')).thenThrow(new Error('error1')) try { await (bound as any).testMessage(1) } catch (err: any) { - t.ok(/error1/.test(err.message), 'got error') + assert.ok(/error1/.test(err.message), 'got error') } - t.end() }) - t.test('should timeout request', async (t) => { + it('should timeout request', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const sender = td.object('Sender') const bound = new BoundProtocol({ @@ -149,13 +143,11 @@ tape('[BoundProtocol]', (t) => { try { await (bound as any).testMessage(1) } catch (err: any) { - t.ok(/timed out/.test(err.message), 'got error') + assert.ok(/timed out/.test(err.message), 'got error') } - t.end() }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/net/protocol/ethprotocol.spec.ts b/packages/client/test/net/protocol/ethprotocol.spec.ts index e1bb5aa493..4d2b748e55 100644 --- a/packages/client/test/net/protocol/ethprotocol.spec.ts +++ b/packages/client/test/net/protocol/ethprotocol.spec.ts @@ -2,34 +2,32 @@ import { Block } from '@ethereumjs/block' import { Common, Hardfork } from '@ethereumjs/common' import { FeeMarketEIP1559Transaction, TransactionFactory, TransactionType } from '@ethereumjs/tx' import { bigIntToBytes, bytesToBigInt, hexToBytes, randomBytes } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Chain } from '../../../src/blockchain/chain' import { Config } from '../../../src/config' import { EthProtocol } from '../../../src/net/protocol' -tape('[EthProtocol]', (t) => { - t.test('should get properties', async (t) => { +describe('[EthProtocol]', () => { + it('should get properties', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new EthProtocol({ config, chain }) - t.ok(typeof p.name === 'string', 'get name') - t.ok(Array.isArray(p.versions), 'get versions') - t.ok(Array.isArray(p.messages), 'get messages') - t.end() + assert.ok(typeof p.name === 'string', 'get name') + assert.ok(Array.isArray(p.versions), 'get versions') + assert.ok(Array.isArray(p.messages), 'get messages') }) - t.test('should open correctly', async (t) => { + it('should open correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new EthProtocol({ config, chain }) await p.open() - t.ok(p.opened, 'opened is true') - t.notOk(await p.open(), 'repeat open') - t.end() + assert.ok(p.opened, 'opened is true') + assert.notOk(await p.open(), 'repeat open') }) - t.test('should encode/decode status', async (t) => { + it('should encode/decode status', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new EthProtocol({ config, chain }) @@ -51,7 +49,7 @@ tape('[EthProtocol]', (t) => { return { hash: () => '0xbb' } }, }) - t.deepEquals( + assert.deepEqual( p.encodeStatus(), { networkId: hexToBytes('0x01'), @@ -68,17 +66,16 @@ tape('[EthProtocol]', (t) => { bestHash: '0xaa', genesisHash: '0xbb', }) - t.ok( + assert.ok( status.networkId === BigInt(1) && status.td === BigInt(100) && status.bestHash === '0xaa' && status.genesisHash === '0xbb', 'decode status' ) - t.end() }) - t.test('verify that NewBlock handler encodes/decodes correctly', async (t) => { + it('verify that NewBlock handler encodes/decodes correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new EthProtocol({ config, chain }) @@ -92,12 +89,11 @@ tape('[EthProtocol]', (t) => { block, td, ]) - t.deepEquals(res[0].hash(), block.hash(), 'correctly decoded block') - t.equal(bytesToBigInt(res2[1]), td, 'correctly encoded td') - t.end() + assert.deepEqual(res[0].hash(), block.hash(), 'correctly decoded block') + assert.equal(bytesToBigInt(res2[1]), td, 'correctly encoded td') }) - t.test('verify that GetReceipts handler encodes/decodes correctly', async (t) => { + it('verify that GetReceipts handler encodes/decodes correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new EthProtocol({ config, chain }) @@ -110,14 +106,13 @@ tape('[EthProtocol]', (t) => { reqId: BigInt(1), hashes: [block.hash()], }) - t.equal(res.reqId, BigInt(1), 'correctly decoded reqId') - t.deepEquals(res.hashes[0], block.hash(), 'correctly decoded blockHash') - t.equal(bytesToBigInt(res2[0]), BigInt(1), 'correctly encoded reqId') - t.deepEquals(res2[1][0], block.hash(), 'correctly encoded blockHash') - t.end() + assert.equal(res.reqId, BigInt(1), 'correctly decoded reqId') + assert.deepEqual(res.hashes[0], block.hash(), 'correctly decoded blockHash') + assert.equal(bytesToBigInt(res2[0]), BigInt(1), 'correctly encoded reqId') + assert.deepEqual(res2[1][0], block.hash(), 'correctly encoded blockHash') }) - t.test('verify that PooledTransactions handler encodes correctly', async (t) => { + it('verify that PooledTransactions handler encodes correctly', async () => { const config = new Config({ transports: [], common: new Common({ chain: Config.CHAIN_DEFAULT, hardfork: Hardfork.London }), @@ -139,12 +134,11 @@ tape('[EthProtocol]', (t) => { reqId: BigInt(1), txs: [tx], }) - t.equal(bytesToBigInt(res[0]), BigInt(1), 'correctly encoded reqId') - t.deepEqual(res[1][0], tx.serialize(), 'EIP1559 transaction correctly encoded') - t.end() + assert.equal(bytesToBigInt(res[0]), BigInt(1), 'correctly encoded reqId') + assert.deepEqual(res[1][0], tx.serialize(), 'EIP1559 transaction correctly encoded') }) - t.test('verify that Receipts encode/decode correctly', async (t) => { + it('verify that Receipts encode/decode correctly', async () => { const config = new Config({ transports: [], common: new Common({ chain: Config.CHAIN_DEFAULT, hardfork: Hardfork.London }), @@ -187,7 +181,7 @@ tape('[EthProtocol]', (t) => { reqId: BigInt(1), receipts, }) - t.equal(bytesToBigInt(res[0]), BigInt(1), 'correctly encoded reqId') + assert.equal(bytesToBigInt(res[0]), BigInt(1), 'correctly encoded reqId') const expectedSerializedReceipts = [ hexToBytes( '0x02f9016d0164b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f866f864940000000000000000000000000000000000000000f842a00000000000000000000000000000000000000000000000000000000000000000a001010101010101010101010101010101010101010101010101010101010101018a00000000000000000000' @@ -196,20 +190,19 @@ tape('[EthProtocol]', (t) => { '0xf9016f808203e8b9010001010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101f866f864940101010101010101010101010101010101010101f842a00101010101010101010101010101010101010101010101010101010101010101a001010101010101010101010101010101010101010101010101010101010101018a00000000000000000000' ), ] - t.deepEqual(res[1], expectedSerializedReceipts, 'correctly encoded receipts') + assert.deepEqual(res[1], expectedSerializedReceipts, 'correctly encoded receipts') // decode the encoded result and match to the original receipts (without tx type) res = p.decode(p.messages.filter((message) => message.name === 'Receipts')[0], res) - t.equal(BigInt(res[0]), BigInt(1), 'correctly decoded reqId') + assert.equal(BigInt(res[0]), BigInt(1), 'correctly decoded reqId') const receiptsWithoutTxType = receipts.map((r: any) => { delete r.txType return r }) - t.deepEqual(res[1], receiptsWithoutTxType, 'receipts correctly decoded') - t.end() + assert.deepEqual(res[1], receiptsWithoutTxType, 'receipts correctly decoded') }) - t.test('verify that Transactions handler encodes/decodes correctly', async (st) => { + it('verify that Transactions handler encodes/decodes correctly', async () => { const config = new Config({ transports: [], common: new Common({ @@ -234,22 +227,21 @@ tape('[EthProtocol]', (t) => { eip1559Tx, blobTx, ]) - st.deepEqual(res[0], legacyTx.serialize(), 'legacy tx correctly encoded') - st.deepEqual(res[1], eip2929Tx.serialize(), 'EIP29292 tx correctly encoded') - st.deepEqual(res[2], eip1559Tx.serialize(), 'EIP1559 tx correctly encoded') + assert.deepEqual(res[0], legacyTx.serialize(), 'legacy tx correctly encoded') + assert.deepEqual(res[1], eip2929Tx.serialize(), 'EIP29292 tx correctly encoded') + assert.deepEqual(res[2], eip1559Tx.serialize(), 'EIP1559 tx correctly encoded') const decoded = p.decode( p.messages.filter((message) => message.name === 'Transactions')[0], res ) - st.deepEqual(decoded[0].type, legacyTx.type, 'decoded legacy tx correctly') - st.deepEqual(decoded[1].type, eip2929Tx.type, 'decoded eip2929 tx correctly') - st.deepEqual(decoded[2].type, eip1559Tx.type, 'decoded EIP1559 tx correctly') - st.equal(decoded.length, 3, 'should not include blob transaction') - st.end() + assert.deepEqual(decoded[0].type, legacyTx.type, 'decoded legacy tx correctly') + assert.deepEqual(decoded[1].type, eip2929Tx.type, 'decoded eip2929 tx correctly') + assert.deepEqual(decoded[2].type, eip1559Tx.type, 'decoded EIP1559 tx correctly') + assert.equal(decoded.length, 3, 'should not include blob transaction') }) - t.test('verify that NewPooledTransactionHashes encodes/decodes correctly', async (st) => { + it('verify that NewPooledTransactionHashes encodes/decodes correctly', async () => { const config = new Config({ transports: [], common: new Common({ chain: Config.CHAIN_DEFAULT, hardfork: Hardfork.London }), @@ -269,8 +261,8 @@ tape('[EthProtocol]', (t) => { p.messages.filter((message) => message.name === 'NewPooledTransactionHashes')[0], [[fakeTx.type], [fakeTx.serialize().byteLength], [fakeHash]] ) - st.deepEqual(encoded[0], fakeHash, 'encoded hash correctly with pre-eth/68 format') - st.deepEqual(encodedEth68[2][0], fakeHash, 'encoded hash correctly with eth/68 format') + assert.deepEqual(encoded[0], fakeHash, 'encoded hash correctly with pre-eth/68 format') + assert.deepEqual(encodedEth68[2][0], fakeHash, 'encoded hash correctly with eth/68 format') const decoded = p.decode( p.messages.filter((message) => message.name === 'NewPooledTransactionHashes')[0], @@ -280,8 +272,7 @@ tape('[EthProtocol]', (t) => { p.messages.filter((message) => message.name === 'NewPooledTransactionHashes')[0], encodedEth68 ) - st.deepEqual(decoded[0], fakeHash, 'decoded hash correctly with pre-eth/68 format') - st.deepEqual(decodedEth68[2][0], fakeHash, 'decoded hash correctly with eth/68 format') - st.end() + assert.deepEqual(decoded[0], fakeHash, 'decoded hash correctly with pre-eth/68 format') + assert.deepEqual(decodedEth68[2][0], fakeHash, 'decoded hash correctly with eth/68 format') }) }) diff --git a/packages/client/test/net/protocol/flowcontrol.spec.ts b/packages/client/test/net/protocol/flowcontrol.spec.ts index 53b8b7fc32..8b18dcebeb 100644 --- a/packages/client/test/net/protocol/flowcontrol.spec.ts +++ b/packages/client/test/net/protocol/flowcontrol.spec.ts @@ -1,14 +1,12 @@ /// /// -import * as tape from 'tape' -import * as td from 'testdouble' -import timers from 'testdouble-timers' +import { assert, describe, it, vi } from 'vitest' import { FlowControl } from '../../../src/net/protocol' -timers.use(td) +vi.useFakeTimers() -tape('[FlowControl]', (t) => { +describe('[FlowControl]', () => { const settings = { bl: 1000, mrc: { @@ -17,23 +15,21 @@ tape('[FlowControl]', (t) => { mrr: 10, } const peer = { id: '1', les: { status: settings } } as any - const clock = td.timers() - t.test('should handle incoming flow control', (t) => { - const expected = [700, 700, 410, 120, -170] + it('should handle incoming flow control', () => { + const expected = [700, 410, 120, -170] const flow = new FlowControl(settings) let correct = 0 - for (let count = 0; count < 5; count++) { + for (let count = 0; count < 4; count++) { const bv = flow.handleRequest(peer, 'test', 2) if (bv === expected[count]) correct++ - clock.tick(1) + vi.advanceTimersByTime(1) } - t.equals(correct, 5, 'correct bv values') - t.notOk(flow.out.get(peer.id), 'peer should be dropped') - t.end() + assert.equal(correct, 4, 'correct bv values') + assert.notOk(flow.out.get(peer.id), 'peer should be dropped') }) - t.test('should handle outgoing flow control', (t) => { + it('should handle outgoing flow control', () => { const expected = [9, 6, 3, 0, 0] const flow = new FlowControl() let correct = 0 @@ -41,9 +37,8 @@ tape('[FlowControl]', (t) => { flow.handleReply(peer, 1000 - count * 300) const max = flow.maxRequestCount(peer, 'test') if (max === expected[count]) correct++ - clock.tick(1) + vi.advanceTimersByTime(1) } - t.equals(correct, 5, 'correct max values') - t.end() + assert.equal(correct, 5, 'correct max values') }) }) diff --git a/packages/client/test/net/protocol/lesprotocol.spec.ts b/packages/client/test/net/protocol/lesprotocol.spec.ts index 4edb50cff2..f0a148c1d7 100644 --- a/packages/client/test/net/protocol/lesprotocol.spec.ts +++ b/packages/client/test/net/protocol/lesprotocol.spec.ts @@ -1,32 +1,30 @@ import { bytesToHex } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Chain } from '../../../src/blockchain' import { Config } from '../../../src/config' import { FlowControl, LesProtocol } from '../../../src/net/protocol' -tape('[LesProtocol]', (t) => { - t.test('should get properties', async (t) => { +describe('[LesProtocol]', () => { + it('should get properties', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new LesProtocol({ config, chain }) - t.ok(typeof p.name === 'string', 'get name') - t.ok(Array.isArray(p.versions), 'get versions') - t.ok(Array.isArray(p.messages), 'get messages') - t.end() + assert.ok(typeof p.name === 'string', 'get name') + assert.ok(Array.isArray(p.versions), 'get versions') + assert.ok(Array.isArray(p.messages), 'get messages') }) - t.test('should open correctly', async (t) => { + it('should open correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new LesProtocol({ config, chain }) await p.open() - t.ok(p.opened, 'opened is true') - t.notOk(await p.open(), 'repeat open') - t.end() + assert.ok(p.opened, 'opened is true') + assert.notOk(await p.open(), 'repeat open') }) - t.test('should encode/decode status', async (t) => { + it('should encode/decode status', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const flow = new FlowControl({ @@ -66,7 +64,7 @@ tape('[LesProtocol]', (t) => { }, }) let status = p.encodeStatus() - t.ok( + assert.ok( bytesToHex(status.networkId) === '0x01' && bytesToHex(status.headTd) === '0x64' && status.headHash === '0xaa' && @@ -88,7 +86,7 @@ tape('[LesProtocol]', (t) => { ) status = { ...status, networkId: [0x01] } status = p.decodeStatus(status) - t.ok( + assert.ok( status.networkId === BigInt(1) && status.headTd === BigInt(100) && status.headHash === '0xaa' && @@ -109,6 +107,5 @@ tape('[LesProtocol]', (t) => { status.mrc.GetBlockHeaders.req === 10, 'decode status' ) - t.end() }) }) diff --git a/packages/client/test/net/protocol/protocol.spec.ts b/packages/client/test/net/protocol/protocol.spec.ts index 6bdfe6d44c..6ac77a3544 100644 --- a/packages/client/test/net/protocol/protocol.spec.ts +++ b/packages/client/test/net/protocol/protocol.spec.ts @@ -1,10 +1,10 @@ -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Config } from '../../../src/config' import { BoundProtocol, Protocol, Sender } from '../../../src/net/protocol' -tape('[Protocol]', (t) => { +describe('[Protocol]', () => { const testMessage = { name: 'TestMessage', code: 0x01, @@ -34,34 +34,31 @@ tape('[Protocol]', (t) => { } } - t.test('should throw if missing abstract methods', (t) => { + it('should throw if missing abstract methods', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const p = new Protocol({ config }) - t.throws(() => p.versions, /Unimplemented/) - t.throws(() => p.messages, /Unimplemented/) - t.throws(() => p.encodeStatus(), /Unimplemented/) - t.throws(() => p.decodeStatus({}), /Unimplemented/) - t.end() + assert.throws(() => p.versions, /Unimplemented/) + assert.throws(() => p.messages, /Unimplemented/) + assert.throws(() => p.encodeStatus(), /Unimplemented/) + assert.throws(() => p.decodeStatus({}), /Unimplemented/) }) - t.test('should handle open', async (t) => { + it('should handle open', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const p = new Protocol({ config }) await p.open() - t.ok(p.opened, 'is open') - t.end() + assert.ok(p.opened, 'is open') }) - t.test('should perform handshake (status now)', async (t) => { + it('should perform handshake (status now)', async () => { const p = new TestProtocol() const sender = new Sender() sender.sendStatus = td.func() sender.status = [1] - t.deepEquals(await p.handshake(sender), { id: 1 }, 'got status now') - t.end() + assert.deepEqual(await p.handshake(sender), { id: 1 }, 'got status now') }) - t.test('should perform handshake (status later)', async (t) => { + it('should perform handshake (status later)', async () => { const p = new TestProtocol() const sender = new Sender() sender.sendStatus = td.func() @@ -70,11 +67,10 @@ tape('[Protocol]', (t) => { }, 100) const status = await p.handshake(sender) td.verify(sender.sendStatus([1])) - t.deepEquals(status, { id: 1 }, 'got status later') - t.end() + assert.deepEqual(status, { id: 1 }, 'got status later') }) - t.test('should handle handshake timeout', async (t) => { + it('should handle handshake timeout', async () => { const p = new TestProtocol() const sender = new Sender() sender.sendStatus = td.func() @@ -85,35 +81,31 @@ tape('[Protocol]', (t) => { try { await p.handshake(sender) } catch (e: any) { - t.ok(/timed out/.test(e.message), 'got timeout error') + assert.ok(/timed out/.test(e.message), 'got timeout error') } - t.end() }) - t.test('should encode message', (t) => { + it('should encode message', () => { const p = new TestProtocol() - t.equals(p.encode(testMessage, 1234), '1234', 'encoded') - t.equals(p.encode({} as any, 1234), 1234, 'encode not defined') - t.end() + assert.equal(p.encode(testMessage, 1234), '1234', 'encoded') + assert.equal(p.encode({} as any, 1234), 1234, 'encode not defined') }) - t.test('should decode message', (t) => { + it('should decode message', () => { const p = new TestProtocol() - t.equals(p.decode(testMessage, '1234'), 1234, 'decoded') - t.equals(p.decode({} as any, 1234), 1234, 'decode not defined') - t.end() + assert.equal(p.decode(testMessage, '1234'), 1234, 'decoded') + assert.equal(p.decode({} as any, 1234), 1234, 'decode not defined') }) - t.test('should bind to peer', async (t) => { + it('should bind to peer', async () => { const p = new TestProtocol() const peer = td.object('Peer') as any const sender = new Sender() BoundProtocol.prototype.handshake = td.func() td.when(BoundProtocol.prototype.handshake(td.matchers.isA(Sender))).thenResolve() const bound = await p.bind(peer, sender) - t.ok(bound instanceof BoundProtocol, 'correct bound protocol') - t.equals(peer.test, bound, 'bound to peer') - t.end() + assert.ok(bound instanceof BoundProtocol, 'correct bound protocol') + assert.equal(peer.test, bound, 'bound to peer') }) td.reset() diff --git a/packages/client/test/net/protocol/rlpxsender.spec.ts b/packages/client/test/net/protocol/rlpxsender.spec.ts index 2bc67da486..170e1d30f0 100644 --- a/packages/client/test/net/protocol/rlpxsender.spec.ts +++ b/packages/client/test/net/protocol/rlpxsender.spec.ts @@ -1,60 +1,55 @@ import { EventEmitter } from 'events' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { RlpxSender } from '../../../src/net/protocol' import type { ETH as Devp2pETH } from '@ethereumjs/devp2p' -tape('[RlpxSender]', (t) => { - t.test('should send status', (t) => { +describe('[RlpxSender]', () => { + it('should send status', () => { const rlpxProtocol = td.object() as any const status = { id: 5 } const sender = new RlpxSender(rlpxProtocol) sender.sendStatus(status) td.verify(rlpxProtocol.sendStatus(status)) td.reset() - t.pass('status sent') - t.end() + assert.ok(true, 'status sent') }) - t.test('should send message', (t) => { + it('should send message', () => { const rlpxProtocol = td.object() as any const sender = new RlpxSender(rlpxProtocol) sender.sendMessage(1, 5) td.verify(rlpxProtocol.sendMessage(1, 5)) td.reset() - t.pass('message sent') - t.end() + assert.ok(true, 'message sent') }) - t.test('should receive status', (t) => { + it('should receive status', () => { const rlpxProtocol = new EventEmitter() const sender = new RlpxSender(rlpxProtocol as Devp2pETH) sender.on('status', (status: any) => { - t.equal(status.id, 5, 'status received') - t.equal(sender.status.id, 5, 'status getter') - t.end() + assert.equal(status.id, 5, 'status received') + assert.equal(sender.status.id, 5, 'status getter') }) rlpxProtocol.emit('status', { id: 5 }) }) - t.test('should receive message', (t) => { + it('should receive message', () => { const rlpxProtocol = new EventEmitter() const sender = new RlpxSender(rlpxProtocol as Devp2pETH) sender.on('message', (message: any) => { - t.equal(message.code, 1, 'message received (code)') - t.equal(message.payload, 5, 'message received (payload)') - t.end() + assert.equal(message.code, 1, 'message received (code)') + assert.equal(message.payload, 5, 'message received (payload)') }) rlpxProtocol.emit('message', 1, 5) }) - t.test('should catch errors', (t) => { + it('should catch errors', () => { const rlpxProtocol = new EventEmitter() const sender = new RlpxSender(rlpxProtocol as Devp2pETH) - t.throws(() => sender.sendStatus({ id: 5 }), /not a function/, 'sendStatus error') - t.throws(() => sender.sendMessage(1, 5), /not a function/, 'sendMessage error') - t.end() + assert.throws(() => sender.sendStatus({ id: 5 }), /not a function/, 'sendStatus error') + assert.throws(() => sender.sendMessage(1, 5), /not a function/, 'sendMessage error') }) }) diff --git a/packages/client/test/net/protocol/sender.spec.ts b/packages/client/test/net/protocol/sender.spec.ts index f6a8e1dcde..71c56da2cb 100644 --- a/packages/client/test/net/protocol/sender.spec.ts +++ b/packages/client/test/net/protocol/sender.spec.ts @@ -1,20 +1,18 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' -const { Sender } = require('../../../src/net/protocol') +import { Sender } from '../../../src/net/protocol' -tape('[Sender]', (t) => { - t.test('should get/set status', (t) => { +describe('[Sender]', () => { + it('should get/set status', () => { const sender = new Sender() - t.deepEquals(sender.status, null, 'empty status') + assert.deepEqual(sender.status, null, 'empty status') sender.status = { id: 1 } - t.deepEquals(sender.status, { id: 1 }, 'status correct') - t.end() + assert.deepEqual(sender.status, { id: 1 }, 'status correct') }) - t.test('should error on abstract method calls', (t) => { + it('should error on abstract method calls', () => { const sender = new Sender() - t.throws(() => sender.sendStatus(), /Unimplemented/) - t.throws(() => sender.sendMessage(), /Unimplemented/) - t.end() + assert.throws(() => sender.sendStatus(undefined), /Unimplemented/) + assert.throws(() => sender.sendMessage(0, []), /Unimplemented/) }) }) diff --git a/packages/client/test/net/protocol/snapprotocol.spec.ts b/packages/client/test/net/protocol/snapprotocol.spec.ts index 3deed76011..b14895c661 100644 --- a/packages/client/test/net/protocol/snapprotocol.spec.ts +++ b/packages/client/test/net/protocol/snapprotocol.spec.ts @@ -11,7 +11,7 @@ import { setLengthLeft, } from '@ethereumjs/util' import { keccak256 } from 'ethereum-cryptography/keccak' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Chain } from '../../../src/blockchain' import { Config } from '../../../src/config' @@ -21,28 +21,26 @@ import { SnapProtocol } from '../../../src/net/protocol' return this.toString() } -tape('[SnapProtocol]', (t) => { - t.test('should get properties', async (t) => { +describe('[SnapProtocol]', () => { + it('should get properties', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) - t.ok(typeof p.name === 'string', 'get name') - t.ok(Array.isArray(p.versions), 'get versions') - t.ok(Array.isArray(p.messages), 'get messages') - t.end() + assert.ok(typeof p.name === 'string', 'get name') + assert.ok(Array.isArray(p.versions), 'get versions') + assert.ok(Array.isArray(p.messages), 'get messages') }) - t.test('should open correctly', async (t) => { + it('should open correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) await p.open() - t.ok(p.opened, 'opened is true') - t.notOk(await p.open(), 'repeat open') - t.end() + assert.ok(p.opened, 'opened is true') + assert.notOk(await p.open(), 'repeat open') }) - t.test('GetAccountRange should encode/decode correctly', async (t) => { + it('GetAccountRange should encode/decode correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) @@ -63,40 +61,39 @@ tape('[SnapProtocol]', (t) => { } ) - t.ok( + assert.ok( JSON.stringify(payload[0]) === JSON.stringify(bigIntToBytes(BigInt(1))), 'correctly encoded reqId' ) - t.ok( + assert.ok( JSON.stringify(payload[1]) === JSON.stringify(setLengthLeft(root, 32)), 'correctly encoded root' ) - t.ok(JSON.stringify(payload[2]) === JSON.stringify(origin), 'correctly encoded origin') - t.ok(JSON.stringify(payload[3]) === JSON.stringify(limit), 'correctly encoded limit') - t.ok( + assert.ok(JSON.stringify(payload[2]) === JSON.stringify(origin), 'correctly encoded origin') + assert.ok(JSON.stringify(payload[3]) === JSON.stringify(limit), 'correctly encoded limit') + assert.ok( JSON.stringify(payload[4]) === JSON.stringify(bigIntToBytes(bytes)), 'correctly encoded bytes' ) - t.ok(payload) + assert.ok(payload) const res = p.decode( p.messages.filter((message) => message.name === 'GetAccountRange')[0], payload ) - t.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') - t.ok( + assert.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') + assert.ok( JSON.stringify(res.root) === JSON.stringify(setLengthLeft(root, 32)), 'correctly decoded root' ) - t.ok(JSON.stringify(res.origin) === JSON.stringify(origin), 'correctly decoded origin') - t.ok(JSON.stringify(res.limit) === JSON.stringify(limit), 'correctly decoded limit') - t.ok(JSON.stringify(res.bytes) === JSON.stringify(bytes), 'correctly decoded bytes') - t.ok(res) - t.end() + assert.ok(JSON.stringify(res.origin) === JSON.stringify(origin), 'correctly decoded origin') + assert.ok(JSON.stringify(res.limit) === JSON.stringify(limit), 'correctly decoded limit') + assert.ok(JSON.stringify(res.bytes) === JSON.stringify(bytes), 'correctly decoded bytes') + assert.ok(res) }) - t.test('AccountRange should encode/decode correctly', async (t) => { + it('AccountRange should encode/decode correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) @@ -106,21 +103,21 @@ tape('[SnapProtocol]', (t) => { p.messages.filter((message) => message.name === 'AccountRange')[0], data ) - t.ok(reqId === BigInt(1), 'reqId should be 1') - t.ok(accounts.length === 2, 'accounts should be 2') - t.ok(proof.length === 7, 'proof nodes should be 7') + assert.ok(reqId === BigInt(1), 'reqId should be 1') + assert.ok(accounts.length === 2, 'accounts should be 2') + assert.ok(proof.length === 7, 'proof nodes should be 7') const firstAccount = accounts[0].body const secondAccount = accounts[1].body - t.ok(firstAccount[2].length === 0, 'Slim format storageRoot for first account') - t.ok(firstAccount[3].length === 0, 'Slim format codehash for first account') - t.ok( + assert.ok(firstAccount[2].length === 0, 'Slim format storageRoot for first account') + assert.ok(firstAccount[3].length === 0, 'Slim format codehash for first account') + assert.ok( bytesToHex(secondAccount[2]) === '0x3dc6d3cfdc6210b8591ea852961d880821298c7891dea399e02d87550af9d40e', 'storageHash of the second account' ) - t.ok( + assert.ok( bytesToHex(secondAccount[3]) === '0xe68fe0bb7c4a483affd0f19cc2b989105242bd6b256c6de3afd738f8acd80c66', 'codeHash of the second account' @@ -132,14 +129,13 @@ tape('[SnapProtocol]', (t) => { proof, }) ) - t.ok( + assert.ok( contractAccountRangeRLP === bytesToHex(payload), 'Re-encoded payload should match with original' ) - t.end() }) - t.test('AccountRange encode/decode should handle account slim body correctly', async (t) => { + it('AccountRange encode/decode should handle account slim body correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const pSlim = new SnapProtocol({ config, chain }) @@ -152,10 +148,10 @@ tape('[SnapProtocol]', (t) => { resData ) const { accounts: accountsFull } = fullData - t.ok(accountsFull.length === 3, '3 accounts should be decoded in accountsFull') + assert.ok(accountsFull.length === 3, '3 accounts should be decoded in accountsFull') const accountFull = accountsFull[0].body - t.ok(equalsBytes(accountFull[2], KECCAK256_RLP), 'storageRoot should be KECCAK256_RLP') - t.ok(equalsBytes(accountFull[3], KECCAK256_NULL), 'codeHash should be KECCAK256_NULL') + assert.ok(equalsBytes(accountFull[2], KECCAK256_RLP), 'storageRoot should be KECCAK256_RLP') + assert.ok(equalsBytes(accountFull[3], KECCAK256_NULL), 'codeHash should be KECCAK256_NULL') // Lets encode fullData as it should be encoded in slim format and upon decoding // we shpuld get slim format @@ -169,15 +165,13 @@ tape('[SnapProtocol]', (t) => { ) // 3 accounts are there in accountRangeRLP - t.ok(accountsSlim.length === 3, '3 accounts should be decoded in accountsSlim') + assert.ok(accountsSlim.length === 3, '3 accounts should be decoded in accountsSlim') const accountSlim = accountsSlim[0].body - t.ok(accountSlim[2].length === 0, 'storageRoot should be decoded in slim') - t.ok(accountSlim[3].length === 0, 'codeHash should be decoded in slim') - - t.end() + assert.ok(accountSlim[2].length === 0, 'storageRoot should be decoded in slim') + assert.ok(accountSlim[3].length === 0, 'codeHash should be decoded in slim') }) - t.test('AccountRange should verify a real sample', async (t) => { + it('AccountRange should verify a real sample', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) @@ -208,16 +202,15 @@ tape('[SnapProtocol]', (t) => { proof ) } catch (e) { - t.fail(`AccountRange proof verification failed with message=${(e as Error).message}`) + assert.fail(`AccountRange proof verification failed with message=${(e as Error).message}`) } - t.ok( + assert.ok( equalsBytes(keccak256(proof[0]), stateRoot), 'Proof should link to the requested stateRoot' ) - t.end() }) - t.test('GetStorageRanges should encode/decode correctly', async (t) => { + it('GetStorageRanges should encode/decode correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) @@ -243,41 +236,43 @@ tape('[SnapProtocol]', (t) => { } ) - t.ok( + assert.ok( JSON.stringify(payload[0]) === JSON.stringify(bigIntToBytes(BigInt(1))), 'correctly encoded reqId' ) - t.ok( + assert.ok( JSON.stringify(payload[1]) === JSON.stringify(setLengthLeft(root, 32)), 'correctly encoded root' ) - t.ok(JSON.stringify(payload[2]) === JSON.stringify(accounts), 'correctly encoded accounts') - t.ok(JSON.stringify(payload[3]) === JSON.stringify(origin), 'correctly encoded origin') - t.ok(JSON.stringify(payload[4]) === JSON.stringify(limit), 'correctly encoded limit') - t.ok( + assert.ok(JSON.stringify(payload[2]) === JSON.stringify(accounts), 'correctly encoded accounts') + assert.ok(JSON.stringify(payload[3]) === JSON.stringify(origin), 'correctly encoded origin') + assert.ok(JSON.stringify(payload[4]) === JSON.stringify(limit), 'correctly encoded limit') + assert.ok( JSON.stringify(payload[5]) === JSON.stringify(bigIntToBytes(bytes)), 'correctly encoded bytes' ) - t.ok(payload) + assert.ok(payload) const res = p.decode( p.messages.filter((message) => message.name === 'GetStorageRanges')[0], payload ) - t.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') - t.ok( + assert.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') + assert.ok( JSON.stringify(res.root) === JSON.stringify(setLengthLeft(root, 32)), 'correctly decoded root' ) - t.ok(JSON.stringify(res.accounts) === JSON.stringify(accounts), 'correctly decoded accounts') - t.ok(JSON.stringify(res.origin) === JSON.stringify(origin), 'correctly decoded origin') - t.ok(JSON.stringify(res.limit) === JSON.stringify(limit), 'correctly decoded limit') - t.ok(JSON.stringify(res.bytes) === JSON.stringify(bytes), 'correctly decoded bytes') - t.ok(payload) - t.end() + assert.ok( + JSON.stringify(res.accounts) === JSON.stringify(accounts), + 'correctly decoded accounts' + ) + assert.ok(JSON.stringify(res.origin) === JSON.stringify(origin), 'correctly decoded origin') + assert.ok(JSON.stringify(res.limit) === JSON.stringify(limit), 'correctly decoded limit') + assert.ok(JSON.stringify(res.bytes) === JSON.stringify(bytes), 'correctly decoded bytes') + assert.ok(payload) }) - t.test('StorageRanges should encode/decode correctly', async (t) => { + it('StorageRanges should encode/decode correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) @@ -288,14 +283,14 @@ tape('[SnapProtocol]', (t) => { p.messages.filter((message) => message.name === 'StorageRanges')[0], data ) - t.ok(reqId === BigInt(1), 'correctly decoded reqId') - t.ok(slots.length === 1 && slots[0].length === 3, 'correctly decoded slots') + assert.ok(reqId === BigInt(1), 'correctly decoded reqId') + assert.ok(slots.length === 1 && slots[0].length === 3, 'correctly decoded slots') const { hash, body } = slots[0][2] - t.ok( + assert.ok( bytesToHex(hash) === '0x60264186ee63f748d340388f07b244d96d007fff5cbc397bbd69f8747c421f79', 'Slot 3 key' ) - t.ok(bytesToHex(body) === '0x8462b66ae7', 'Slot 3 value') + assert.ok(bytesToHex(body) === '0x8462b66ae7', 'Slot 3 value') const payload = RLP.encode( p.encode(p.messages.filter((message) => message.name === 'StorageRanges')[0], { @@ -304,11 +299,13 @@ tape('[SnapProtocol]', (t) => { proof, }) ) - t.ok(storageRangesRLP === bytesToHex(payload), 'Re-encoded payload should match with original') - t.end() + assert.ok( + storageRangesRLP === bytesToHex(payload), + 'Re-encoded payload should match with original' + ) }) - t.test('StorageRanges should verify a real sample', async (t) => { + it('StorageRanges should verify a real sample', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) @@ -344,16 +341,15 @@ tape('[SnapProtocol]', (t) => { proof ) } catch (e) { - t.fail(`StorageRange proof verification failed with message=${(e as Error).message}`) + assert.fail(`StorageRange proof verification failed with message=${(e as Error).message}`) } - t.ok( + assert.ok( equalsBytes(keccak256(proof[0]), lastAccountStorageRoot), 'Proof should link to the accounts storageRoot' ) - t.end() }) - t.test('GetByteCodes should encode/decode correctly', async (t) => { + it('GetByteCodes should encode/decode correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) @@ -370,30 +366,29 @@ tape('[SnapProtocol]', (t) => { bytes, }) - t.ok( + assert.ok( JSON.stringify(payload[0]) === JSON.stringify(bigIntToBytes(BigInt(1))), 'correctly encoded reqId' ) - t.ok(JSON.stringify(payload[1]) === JSON.stringify(hashes), 'correctly encoded hashes') - t.ok( + assert.ok(JSON.stringify(payload[1]) === JSON.stringify(hashes), 'correctly encoded hashes') + assert.ok( JSON.stringify(payload[2]) === JSON.stringify(bigIntToBytes(bytes)), 'correctly encoded bytes' ) - t.ok(payload) + assert.ok(payload) const res = p.decode( p.messages.filter((message) => message.name === 'GetByteCodes')[0], payload ) - t.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') - t.ok(JSON.stringify(res.hashes) === JSON.stringify(hashes), 'correctly decoded hashes') - t.ok(JSON.stringify(res.bytes) === JSON.stringify(bytes), 'correctly decoded bytes') - t.ok(res) - t.end() + assert.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') + assert.ok(JSON.stringify(res.hashes) === JSON.stringify(hashes), 'correctly decoded hashes') + assert.ok(JSON.stringify(res.bytes) === JSON.stringify(bytes), 'correctly decoded bytes') + assert.ok(res) }) - t.test('ByteCodes should encode/decode correctly', async (t) => { + it('ByteCodes should encode/decode correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) @@ -404,8 +399,8 @@ tape('[SnapProtocol]', (t) => { codesRes ) - t.ok(reqId === BigInt(1), 'reqId should be 1') - t.ok(codes.length === 1, 'code should be present in response') + assert.ok(reqId === BigInt(1), 'reqId should be 1') + assert.ok(codes.length === 1, 'code should be present in response') const payload = RLP.encode( p.encode(p.messages.filter((message) => message.name === 'ByteCodes')[0], { @@ -413,11 +408,10 @@ tape('[SnapProtocol]', (t) => { codes, }) ) - t.ok(byteCodesRLP === bytesToHex(payload), 'Re-encoded payload should match with original') - t.end() + assert.ok(byteCodesRLP === bytesToHex(payload), 'Re-encoded payload should match with original') }) - t.test('ByteCodes should verify a real sample', async (t) => { + it('ByteCodes should verify a real sample', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) @@ -435,11 +429,10 @@ tape('[SnapProtocol]', (t) => { codesRes ) const code = codes[0] - t.ok(equalsBytes(keccak256(code), codeHash), 'Code should match the requested codeHash') - t.end() + assert.ok(equalsBytes(keccak256(code), codeHash), 'Code should match the requested codeHash') }) - t.test('GetTrieNodes should encode/decode correctly', async (t) => { + it('GetTrieNodes should encode/decode correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) @@ -456,32 +449,31 @@ tape('[SnapProtocol]', (t) => { bytes, }) - t.ok( + assert.ok( JSON.stringify(payload[0]) === JSON.stringify(bigIntToBytes(reqId)), 'correctly encoded reqId' ) - t.ok(JSON.stringify(payload[1]) === JSON.stringify(root), 'correctly encoded root') - t.ok(JSON.stringify(payload[2]) === JSON.stringify(paths), 'correctly encoded paths') - t.ok( + assert.ok(JSON.stringify(payload[1]) === JSON.stringify(root), 'correctly encoded root') + assert.ok(JSON.stringify(payload[2]) === JSON.stringify(paths), 'correctly encoded paths') + assert.ok( JSON.stringify(payload[3]) === JSON.stringify(bigIntToBytes(bytes)), 'correctly encoded bytes' ) - t.ok(payload) + assert.ok(payload) const res = p.decode( p.messages.filter((message) => message.name === 'GetTrieNodes')[0], payload ) - t.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') - t.ok(JSON.stringify(res.root) === JSON.stringify(root), 'correctly decoded root') - t.ok(JSON.stringify(res.paths) === JSON.stringify(paths), 'correctly decoded paths') - t.ok(JSON.stringify(res.bytes) === JSON.stringify(bytes), 'correctly decoded bytes') - t.ok(res) - t.end() + assert.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') + assert.ok(JSON.stringify(res.root) === JSON.stringify(root), 'correctly decoded root') + assert.ok(JSON.stringify(res.paths) === JSON.stringify(paths), 'correctly decoded paths') + assert.ok(JSON.stringify(res.bytes) === JSON.stringify(bytes), 'correctly decoded bytes') + assert.ok(res) }) - t.test('TrieNodes should encode/decode correctly with real sample', async (t) => { + it('TrieNodes should encode/decode correctly with real sample', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) @@ -492,14 +484,14 @@ tape('[SnapProtocol]', (t) => { nodesRes ) - t.ok(reqId === BigInt(1), 'reqId should be 1') - t.ok(nodes.length > 0, 'nodes should be present in response') + assert.ok(reqId === BigInt(1), 'reqId should be 1') + assert.ok(nodes.length > 0, 'nodes should be present in response') // check that raw node data that exists is valid for (let i = 0; i < nodes.length; i++) { const node: Uint8Array = nodes[i] if (node !== null) { - t.ok(decodeNode(node), 'raw node data should decode without error') + assert.ok(decodeNode(node), 'raw node data should decode without error') } } @@ -509,8 +501,7 @@ tape('[SnapProtocol]', (t) => { nodes, }) ) - t.ok(trieNodesRLP === bytesToHex(payload), 'Re-encoded payload should match with original') - t.end() + assert.ok(trieNodesRLP === bytesToHex(payload), 'Re-encoded payload should match with original') }) }) diff --git a/packages/client/test/net/server/rlpxserver.spec.ts b/packages/client/test/net/server/rlpxserver.spec.ts index 905eaaa3dd..f2fd57133e 100644 --- a/packages/client/test/net/server/rlpxserver.spec.ts +++ b/packages/client/test/net/server/rlpxserver.spec.ts @@ -1,13 +1,13 @@ import { equalsBytes, hexToBytes, utf8ToBytes } from '@ethereumjs/util' import { EventEmitter } from 'events' import { multiaddr } from 'multiaddr' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Config } from '../../../src/config' import { Event } from '../../../src/types' -tape('[RlpxServer]', async (t) => { +describe('[RlpxServer]', async () => { class RlpxPeer extends EventEmitter { accept(_: any, _2: any) {} getId() { @@ -44,24 +44,23 @@ tape('[RlpxServer]', async (t) => { RlpxPeer.prototype.accept(td.matchers.anything(), td.matchers.isA(RlpxServer)) ).thenResolve() - t.test('should initialize correctly', async (t) => { + it('should initialize correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const server = new RlpxServer({ config, bootnodes: '10.0.0.1:1234,enode://abcd@10.0.0.2:1234', key: 'abcd', }) - t.equals(server.name, 'rlpx', 'get name') - t.ok(equalsBytes(server.key!, hexToBytes('0xabcd')), 'key parse') - t.deepEquals( + assert.equal(server.name, 'rlpx', 'get name') + assert.ok(equalsBytes(server.key!, hexToBytes('0xabcd')), 'key parse') + assert.deepEqual( server.bootnodes, [multiaddr('/ip4/10.0.0.1/tcp/1234'), multiaddr('/ip4/10.0.0.2/tcp/1234')], 'bootnodes split' ) - t.end() }) - t.test('should start/stop server', async (t) => { + it('should start/stop server', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const server = new RlpxServer({ config, @@ -77,21 +76,22 @@ tape('[RlpxServer]', async (t) => { td.when( (server.dpt! as any).bootstrap({ address: '10.0.0.2', udpPort: '1234', tcpPort: '1234' }) ).thenReject(new Error('err0')) - server.config.events.on(Event.PEER_ERROR, (err) => t.equals(err.message, 'err0', 'got error')) + server.config.events.on(Event.PEER_ERROR, (err) => + assert.equal(err.message, 'err0', 'got error') + ) await server.start() td.verify((server as any).initDpt()) td.verify((server as any).initRlpx()) - t.ok(server.running, 'started') - t.notOk(await server.start(), 'already started') + assert.ok(server.running, 'started') + assert.notOk(await server.start(), 'already started') await server.stop() td.verify(server.dpt!.destroy()) td.verify(server.rlpx!.destroy()) - t.notOk(server.running, 'stopped') - t.notOk(await server.stop(), 'already stopped') - t.end() + assert.notOk(server.running, 'stopped') + assert.notOk(await server.stop(), 'already stopped') }) - t.test('should bootstrap with dns acquired peers', async (t) => { + it('should bootstrap with dns acquired peers', async () => { const dnsPeerInfo = { address: '10.0.0.5', udpPort: 1234, tcpPort: 1234 } const config = new Config({ transports: [], @@ -112,10 +112,9 @@ tape('[RlpxServer]', async (t) => { await server.bootstrap() td.verify(server.dpt!.bootstrap(dnsPeerInfo)) await server.stop() - t.end() }) - t.test('should return rlpx server info with ip4 as default', async (t) => { + it('should return rlpx server info with ip4 as default', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const mockId = '0123' const server = new RlpxServer({ @@ -137,11 +136,11 @@ tape('[RlpxServer]', async (t) => { td.when( (server.dpt! as any).bootstrap({ address: '10.0.0.2', udpPort: '1234', tcpPort: '1234' }) ).thenReject(new Error('err0')) - config.events.on(Event.SERVER_ERROR, (err) => t.equals(err.message, 'err0', 'got error')) + config.events.on(Event.SERVER_ERROR, (err) => assert.equal(err.message, 'err0', 'got error')) await server.start() const nodeInfo = server.getRlpxInfo() - t.deepEqual( + assert.deepEqual( nodeInfo, { enode: `enode://${mockId}@0.0.0.0:30303`, @@ -153,10 +152,9 @@ tape('[RlpxServer]', async (t) => { 'get nodeInfo' ) await server.stop() - t.end() }) - t.test('should return rlpx server info with ip6', async (t) => { + it('should return rlpx server info with ip6', async () => { const config = new Config({ transports: [], accountCache: 10000, @@ -183,10 +181,10 @@ tape('[RlpxServer]', async (t) => { td.when( (server.dpt! as any).bootstrap({ address: '10.0.0.2', udpPort: '1234', tcpPort: '1234' }) ).thenReject(new Error('err0')) - config.events.on(Event.SERVER_ERROR, (err) => t.equals(err.message, 'err0', 'got error')) + config.events.on(Event.SERVER_ERROR, (err) => assert.equal(err.message, 'err0', 'got error')) await server.start() const nodeInfo = server.getRlpxInfo() - t.deepEqual( + assert.deepEqual( nodeInfo, { enode: `enode://${mockId}@[::]:30303`, @@ -198,53 +196,51 @@ tape('[RlpxServer]', async (t) => { 'get nodeInfo' ) await server.stop() - t.end() }) - t.test('should handle errors', (t) => { - t.plan(3) + it('should handle errors', () => { let count = 0 const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const server = new RlpxServer({ config }) server.config.events.on(Event.SERVER_ERROR, (err) => { count = count + 1 - if (err.message === 'err0') t.pass('got server error - err0') - if (err.message === 'err1') t.pass('got peer error - err1') + if (err.message === 'err0') assert.ok(true, 'got server error - err0') + if (err.message === 'err1') assert.ok(true, 'got peer error - err1') }) ;(server as any).error(new Error('EPIPE')) ;(server as any).error(new Error('err0')) setTimeout(() => { - t.equals(count, 2, 'ignored error') + assert.equal(count, 2, 'ignored error') }, 100) ;(server as any).error(new Error('err1')) }) - t.test('should ban peer', (t) => { + it('should ban peer', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const server = new RlpxServer({ config }) - t.notOk(server.ban('123'), 'not started') + assert.notOk(server.ban('123'), 'not started') server.started = true server.dpt = td.object() server.rlpx = td.object() server.ban('112233', 1234) td.verify(server.dpt!.banPeer('112233', 1234)) - t.end() }) - t.test('should init dpt', (t) => { - t.plan(1) + it('should init dpt', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const server = new RlpxServer({ config }) ;(server as any).initDpt().catch((error: Error) => { throw error }) - td.verify((server.dpt as any).bind(server.config.port, '0.0.0.0')) - config.events.on(Event.SERVER_ERROR, (err) => t.equals(err.message, 'err0', 'got error')) + config.events.on(Event.SERVER_ERROR, (err) => + it('should throw', async () => { + assert.equal(err.message, 'err0', 'got error') + }) + ) ;(server.dpt as any).emit('error', new Error('err0')) }) - t.test('should init rlpx', async (t) => { - t.plan(4) + it('should init rlpx', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const server = new RlpxServer({ config }) const rlpxPeer = new RlpxPeer() @@ -253,13 +249,25 @@ tape('[RlpxServer]', async (t) => { ;(server as any).initRlpx().catch((error: Error) => { throw error }) - td.verify(RlpxPeer.capabilities(Array.from((server as any).protocols))) - td.verify(server.rlpx!.listen(server.config.port, '0.0.0.0')) - config.events.on(Event.PEER_CONNECTED, (peer) => t.ok(peer instanceof RlpxPeer, 'connected')) - config.events.on(Event.PEER_DISCONNECTED, (peer) => t.equals(peer.id, '01', 'disconnected')) - config.events.on(Event.SERVER_ERROR, (err) => t.equals(err.message, 'err0', 'got error')) + config.events.on(Event.PEER_CONNECTED, (peer) => + it('should connect', async () => { + assert.ok(peer instanceof RlpxPeer, 'connected') + }) + ) + config.events.on(Event.PEER_DISCONNECTED, (peer) => + it('should disconnect', async () => { + assert.equal(peer.id, '01', 'disconnected') + }) + ) + config.events.on(Event.SERVER_ERROR, (err) => + it('should throw error', async () => { + assert.equal(err.message, 'err0', 'got error') + }) + ) config.events.on(Event.SERVER_LISTENING, (info) => - t.deepEquals(info, { transport: 'rlpx', url: 'enode://ff@0.0.0.0:30303' }, 'listening') + it('should listen', async () => { + assert.deepEqual(info, { transport: 'rlpx', url: 'enode://ff@0.0.0.0:30303' }, 'listening') + }) ) server.rlpx!.emit('peer:added', rlpxPeer) ;(server as any).peers.set('01', { id: '01' } as any) @@ -271,8 +279,7 @@ tape('[RlpxServer]', async (t) => { server.rlpx!.emit('listening') }) - t.test('should handles errors from id-less peers', async (t) => { - t.plan(1) + it('should handles errors from id-less peers', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const server = new RlpxServer({ config }) const rlpxPeer = new RlpxPeer() @@ -281,12 +288,15 @@ tape('[RlpxServer]', async (t) => { ;(server as any).initRlpx().catch((error: Error) => { throw error }) - config.events.on(Event.SERVER_ERROR, (err) => t.equals(err.message, 'err0', 'got error')) + config.events.on(Event.SERVER_ERROR, (err) => + it('should throw', async () => { + assert.equal(err.message, 'err0', 'got error') + }) + ) server.rlpx!.emit('peer:error', rlpxPeer, new Error('err0')) }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/rpc/admin/nodeInfo.spec.ts b/packages/client/test/rpc/admin/nodeInfo.spec.ts index 46cf04bfaa..b5ce289a81 100644 --- a/packages/client/test/rpc/admin/nodeInfo.spec.ts +++ b/packages/client/test/rpc/admin/nodeInfo.spec.ts @@ -1,22 +1,24 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' const method = 'admin_nodeInfo' -tape(method, async (t) => { - const manager = createManager(createClient({ opened: true })) - const server = startRPC(manager.getMethods()) +describe(method, () => { + it('works', async () => { + const manager = createManager(createClient({ opened: true })) + const server = startRPC(manager.getMethods()) - const req = params(method, []) + const req = params(method, []) - const expectRes = (res: any) => { - const { result } = res.body - if (result !== undefined) { - t.pass('admin_nodeInfo returns a value') - } else { - throw new Error('no return value') + const expectRes = (res: any) => { + const { result } = res.body + if (result !== undefined) { + assert.ok(true, 'admin_nodeInfo returns a value') + } else { + throw new Error('no return value') + } } - } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/debug/traceTransaction.spec.ts b/packages/client/test/rpc/debug/traceTransaction.spec.ts index ec8fa31d06..9b045bd1b2 100644 --- a/packages/client/test/rpc/debug/traceTransaction.spec.ts +++ b/packages/client/test/rpc/debug/traceTransaction.spec.ts @@ -1,175 +1,176 @@ import { Block } from '@ethereumjs/block' import { TransactionFactory } from '@ethereumjs/tx' import { bytesToHex } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INTERNAL_ERROR, INVALID_PARAMS } from '../../../src/rpc/error-code' -import * as genesisJSON from '../../testdata/geth-genesis/debug.json' +import genesisJSON from '../../testdata/geth-genesis/debug.json' import { baseRequest, baseSetup, dummy, params, runBlockWithTxs, setupChain } from '../helpers' import { checkError } from '../util' const method = 'debug_traceTransaction' -tape(`${method}: call with invalid configuration`, async (t) => { - const { server } = baseSetup({ engine: false, includeVM: true }) +describe(method, () => { + it('call with invalid configuration', async () => { + const { server } = baseSetup({ engine: false, includeVM: true }) - const req = params(method, ['0xabcd', {}]) - const expectRes = checkError(t, INTERNAL_ERROR, 'missing receiptsManager') - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, ['0xabcd', {}]) + const expectRes = checkError(INTERNAL_ERROR, 'missing receiptsManager') + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with invalid parameters`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge') + it('call with invalid parameters', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge') - let req = params(method, ['abcd', {}]) - let expectRes = checkError(t, INVALID_PARAMS, 'hex string without 0x prefix') - await baseRequest(t, server, req, 200, expectRes, false) + let req = params(method, ['abcd', {}]) + let expectRes = checkError(INVALID_PARAMS, 'hex string without 0x prefix') + await baseRequest(server, req, 200, expectRes, false) - req = params(method, ['0xabcd', { enableReturnData: true }]) - expectRes = checkError(t, INVALID_PARAMS, 'enabling return data not implemented') - await baseRequest(t, server, req, 200, expectRes, false) + req = params(method, ['0xabcd', { enableReturnData: true }]) + expectRes = checkError(INVALID_PARAMS, 'enabling return data not implemented') + await baseRequest(server, req, 200, expectRes, false) - req = params(method, ['0xabcd', { tracerConfig: { some: 'value' } }]) - expectRes = checkError( - t, - INVALID_PARAMS, - 'custom tracers and tracer configurations are not implemented' - ) - await baseRequest(t, server, req, 200, expectRes, false) + req = params(method, ['0xabcd', { tracerConfig: { some: 'value' } }]) + expectRes = checkError( + INVALID_PARAMS, + 'custom tracers and tracer configurations are not implemented' + ) + await baseRequest(server, req, 200, expectRes, false) - req = params(method, ['0xabcd', { tracer: 'someTracer' }]) - expectRes = checkError(t, INVALID_PARAMS, 'custom tracers not implemented') - await baseRequest(t, server, req, 200, expectRes, false) + req = params(method, ['0xabcd', { tracer: 'someTracer' }]) + expectRes = checkError(INVALID_PARAMS, 'custom tracers not implemented') + await baseRequest(server, req, 200, expectRes, false) - req = params(method, ['0xabcd', { timeout: 1000 }]) - expectRes = checkError(t, INVALID_PARAMS, 'custom tracer timeouts not implemented') - await baseRequest(t, server, req, 200, expectRes, false) -}) - -tape(`${method}: call with valid parameters`, async (t) => { - const { chain, common, execution, server } = await setupChain(genesisJSON, 'post-merge', { - txLookupLimit: 0, + req = params(method, ['0xabcd', { timeout: 1000 }]) + expectRes = checkError(INVALID_PARAMS, 'custom tracer timeouts not implemented') + await baseRequest(server, req, 200, expectRes, false) }) - // construct block with tx - const tx = TransactionFactory.fromTxData( - { - type: 0x2, - gasLimit: 0xfffff, - maxFeePerGas: 10, - maxPriorityFeePerGas: 1, - value: 10000, - data: '0x60AA', - }, - { common, freeze: false } - ).sign(dummy.privKey) - tx.getSenderAddress = () => { - return dummy.addr - } - const block = Block.fromBlockData({}, { common }) - block.transactions[0] = tx - await runBlockWithTxs(chain, execution, [tx], true) - - const req = params(method, [bytesToHex(tx.hash()), {}]) - const expectRes = (res: any) => { - t.equal(res.body.result.structLogs[0].op, 'PUSH1', 'produced a correct trace') - } - await baseRequest(t, server, req, 200, expectRes, true) -}) - -tape(`${method}: call with reverting code`, async (t) => { - const { chain, common, execution, server } = await setupChain(genesisJSON, 'post-merge', { - txLookupLimit: 0, + it('call with valid parameters', async () => { + const { chain, common, execution, server } = await setupChain(genesisJSON, 'post-merge', { + txLookupLimit: 0, + }) + + // construct block with tx + const tx = TransactionFactory.fromTxData( + { + type: 0x2, + gasLimit: 0xfffff, + maxFeePerGas: 10, + maxPriorityFeePerGas: 1, + value: 10000, + data: '0x60AA', + }, + { common, freeze: false } + ).sign(dummy.privKey) + tx.getSenderAddress = () => { + return dummy.addr + } + const block = Block.fromBlockData({}, { common }) + block.transactions[0] = tx + await runBlockWithTxs(chain, execution, [tx], true) + + const req = params(method, [bytesToHex(tx.hash()), {}]) + const expectRes = (res: any) => { + assert.equal(res.body.result.structLogs[0].op, 'PUSH1', 'produced a correct trace') + } + await baseRequest(server, req, 200, expectRes, true) }) - // construct block with tx - const tx = TransactionFactory.fromTxData( - { - type: 0x2, - gasLimit: 0xfffff, - maxFeePerGas: 10, - maxPriorityFeePerGas: 1, - value: 10000, - data: '0x560FAA', - }, - { common, freeze: false } - ).sign(dummy.privKey) - tx.getSenderAddress = () => { - return dummy.addr - } - const block = Block.fromBlockData({}, { common }) - block.transactions[0] = tx - await runBlockWithTxs(chain, execution, [tx], true) - - const req = params(method, [bytesToHex(tx.hash()), {}]) - const expectRes = (res: any) => { - t.equal(res.body.result.failed, true, 'returns error result with reverting code') - } - await baseRequest(t, server, req, 200, expectRes, true) -}) - -tape(`${method}: call with memory enabled`, async (t) => { - const { chain, common, execution, server } = await setupChain(genesisJSON, 'post-merge', { - txLookupLimit: 0, + it('call with reverting code', async () => { + const { chain, common, execution, server } = await setupChain(genesisJSON, 'post-merge', { + txLookupLimit: 0, + }) + + // construct block with tx + const tx = TransactionFactory.fromTxData( + { + type: 0x2, + gasLimit: 0xfffff, + maxFeePerGas: 10, + maxPriorityFeePerGas: 1, + value: 10000, + data: '0x560FAA', + }, + { common, freeze: false } + ).sign(dummy.privKey) + tx.getSenderAddress = () => { + return dummy.addr + } + const block = Block.fromBlockData({}, { common }) + block.transactions[0] = tx + await runBlockWithTxs(chain, execution, [tx], true) + + const req = params(method, [bytesToHex(tx.hash()), {}]) + const expectRes = (res: any) => { + assert.equal(res.body.result.failed, true, 'returns error result with reverting code') + } + await baseRequest(server, req, 200, expectRes, true) }) - // construct block with tx - const tx = TransactionFactory.fromTxData( - { - type: 0x2, - gasLimit: 0xfffff, - maxFeePerGas: 10, - maxPriorityFeePerGas: 1, - value: 10000, - data: '0x604260005260206000F3', - }, - { common, freeze: false } - ).sign(dummy.privKey) - tx.getSenderAddress = () => { - return dummy.addr - } - const block = Block.fromBlockData({}, { common }) - block.transactions[0] = tx - await runBlockWithTxs(chain, execution, [tx], true) - - const req = params(method, [bytesToHex(tx.hash()), { enableMemory: true }]) - const expectRes = (res: any) => { - t.equal( - res.body.result.structLogs[5].memory[0], - '0x0000000000000000000000000000000000000000000000000000000000000042', - 'produced a trace with correct memory value returned' - ) - } - await baseRequest(t, server, req, 200, expectRes, true) -}) - -tape(`${method}: call with stack disabled`, async (t) => { - const { chain, common, execution, server } = await setupChain(genesisJSON, 'post-merge', { - txLookupLimit: 0, + it('call with memory enabled', async () => { + const { chain, common, execution, server } = await setupChain(genesisJSON, 'post-merge', { + txLookupLimit: 0, + }) + + // construct block with tx + const tx = TransactionFactory.fromTxData( + { + type: 0x2, + gasLimit: 0xfffff, + maxFeePerGas: 10, + maxPriorityFeePerGas: 1, + value: 10000, + data: '0x604260005260206000F3', + }, + { common, freeze: false } + ).sign(dummy.privKey) + tx.getSenderAddress = () => { + return dummy.addr + } + const block = Block.fromBlockData({}, { common }) + block.transactions[0] = tx + await runBlockWithTxs(chain, execution, [tx], true) + + const req = params(method, [bytesToHex(tx.hash()), { enableMemory: true }]) + const expectRes = (res: any) => { + assert.equal( + res.body.result.structLogs[5].memory[0], + '0x0000000000000000000000000000000000000000000000000000000000000042', + 'produced a trace with correct memory value returned' + ) + } + await baseRequest(server, req, 200, expectRes, true) }) - // construct block with tx - const tx = TransactionFactory.fromTxData( - { - type: 0x2, - gasLimit: 0xfffff, - maxFeePerGas: 10, - maxPriorityFeePerGas: 1, - value: 10000, - data: '0x600F6000', - }, - { common, freeze: false } - ).sign(dummy.privKey) - tx.getSenderAddress = () => { - return dummy.addr - } - const block = Block.fromBlockData({}, { common }) - block.transactions[0] = tx - await runBlockWithTxs(chain, execution, [tx], true) - - const req = params(method, [bytesToHex(tx.hash()), { disableStack: true }]) - const expectRes = (res: any) => { - t.ok(res.body.result.structLogs[1].stack === undefined, 'returns no stack with trace') - } - await baseRequest(t, server, req, 200, expectRes, true) + it('call with stack disabled', async () => { + const { chain, common, execution, server } = await setupChain(genesisJSON, 'post-merge', { + txLookupLimit: 0, + }) + + // construct block with tx + const tx = TransactionFactory.fromTxData( + { + type: 0x2, + gasLimit: 0xfffff, + maxFeePerGas: 10, + maxPriorityFeePerGas: 1, + value: 10000, + data: '0x600F6000', + }, + { common, freeze: false } + ).sign(dummy.privKey) + tx.getSenderAddress = () => { + return dummy.addr + } + const block = Block.fromBlockData({}, { common }) + block.transactions[0] = tx + await runBlockWithTxs(chain, execution, [tx], true) + + const req = params(method, [bytesToHex(tx.hash()), { disableStack: true }]) + const expectRes = (res: any) => { + assert.ok(res.body.result.structLogs[1].stack === undefined, 'returns no stack with trace') + } + await baseRequest(server, req, 200, expectRes, true) + }) }) diff --git a/packages/client/test/rpc/engine/exchangeCapabilities.spec.ts b/packages/client/test/rpc/engine/exchangeCapabilities.spec.ts index d1921d7f0b..2763105c4e 100644 --- a/packages/client/test/rpc/engine/exchangeCapabilities.spec.ts +++ b/packages/client/test/rpc/engine/exchangeCapabilities.spec.ts @@ -1,20 +1,22 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { baseRequest, baseSetup, params } from '../helpers' const method = 'engine_exchangeCapabilities' -tape(`${method}: call with invalid payloadId`, async (t) => { - const { server } = baseSetup({ engine: true }) +describe(method, () => { + it('call with invalid payloadId', async () => { + const { server } = baseSetup({ engine: true }) - const req = params(method, []) - const expectRes = (res: any) => { - t.ok(res.body.result.length > 0, 'got more than 1 engine capability') - t.equal( - res.body.result.findIndex((el: string) => el === 'engine_exchangeCapabilities'), - -1, - 'should not include engine_exchangeCapabilities in response' - ) - } - await baseRequest(t, server, req, 200, expectRes) + const req = params(method, []) + const expectRes = (res: any) => { + assert.ok(res.body.result.length > 0, 'got more than 1 engine capability') + assert.equal( + res.body.result.findIndex((el: string) => el === 'engine_exchangeCapabilities'), + -1, + 'should not include engine_exchangeCapabilities in response' + ) + } + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/engine/exchangeTransitionConfigurationV1.spec.ts b/packages/client/test/rpc/engine/exchangeTransitionConfigurationV1.spec.ts index e3426dc169..7bf77b8afd 100644 --- a/packages/client/test/rpc/engine/exchangeTransitionConfigurationV1.spec.ts +++ b/packages/client/test/rpc/engine/exchangeTransitionConfigurationV1.spec.ts @@ -1,7 +1,7 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' -import * as genesisJSON from '../../testdata/geth-genesis/post-merge.json' +import genesisJSON from '../../testdata/geth-genesis/post-merge.json' import { baseRequest, params, setupChain } from '../helpers' import { checkError } from '../util' @@ -19,20 +19,22 @@ const invalidConfig = { terminalBlockNumber: '0x0', } -tape(`${method}: call with valid config`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) +describe(method, () => { + it('call with valid config', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - const req = params(method, [validConfig]) - const expectRes = (res: any) => { - t.deepEqual(res.body.result, validConfig) - } - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, [validConfig]) + const expectRes = (res: any) => { + assert.deepEqual(res.body.result, validConfig) + } + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with invalid config`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + it('call with invalid config', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - const req = params(method, [invalidConfig]) - const expectRes = checkError(t, INVALID_PARAMS, 'terminalTotalDifficulty set to 0, received 256') - await baseRequest(t, server, req, 200, expectRes) + const req = params(method, [invalidConfig]) + const expectRes = checkError(INVALID_PARAMS, 'terminalTotalDifficulty set to 0, received 256') + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/engine/forkchoiceUpdatedV1.spec.ts b/packages/client/test/rpc/engine/forkchoiceUpdatedV1.spec.ts index 78f06a95ca..267623109d 100644 --- a/packages/client/test/rpc/engine/forkchoiceUpdatedV1.spec.ts +++ b/packages/client/test/rpc/engine/forkchoiceUpdatedV1.spec.ts @@ -1,17 +1,17 @@ import { Block, BlockHeader } from '@ethereumjs/block' import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { bytesToHex, bytesToUnprefixedHex, zeros } from '@ethereumjs/util' -import * as tape from 'tape' +import { bytesToHex, zeros } from '@ethereumjs/util' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' import { blockToExecutionPayload } from '../../../src/rpc/modules' -import * as blocks from '../../testdata/blocks/beacon.json' -import * as genesisJSON from '../../testdata/geth-genesis/post-merge.json' +import blocks from '../../testdata/blocks/beacon.json' +import genesisJSON from '../../testdata/geth-genesis/post-merge.json' import { baseRequest, baseSetup, params, setupChain } from '../helpers' import { checkError } from '../util' -import { batchBlocks } from './newPayloadV1.spec' +import { batchBlocks } from './newPayloadV1.spec.js' const crypto = require('crypto') @@ -53,369 +53,370 @@ function createBlock(parentBlock: Block) { export const validPayload = [validForkChoiceState, validPayloadAttributes] -tape(`${method}: call with invalid head block hash without 0x`, async (t) => { - const { server } = baseSetup({ engine: true, includeVM: true }) - const invalidForkChoiceState = { - ...validForkChoiceState, - headBlockHash: 'invalid formatted head block hash', - } - const req = params(method, [invalidForkChoiceState, validPayloadAttributes]) - const expectRes = checkError( - t, - INVALID_PARAMS, - "invalid argument 0 for key 'headBlockHash': hex string without 0x prefix" - ) - await baseRequest(t, server, req, 200, expectRes) -}) +describe(method, () => { + it('call with invalid head block hash without 0x', async () => { + const { server } = baseSetup({ engine: true, includeVM: true }) + const invalidForkChoiceState = { + ...validForkChoiceState, + headBlockHash: 'invalid formatted head block hash', + } + const req = params(method, [invalidForkChoiceState, validPayloadAttributes]) + const expectRes = checkError( + INVALID_PARAMS, + "invalid argument 0 for key 'headBlockHash': hex string without 0x prefix" + ) + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with invalid hex string as block hash`, async (t) => { - const { server } = baseSetup({ engine: true, includeVM: true }) - - const invalidForkChoiceState = { - ...validForkChoiceState, - finalizedBlockHash: '0xinvalid', - } - const req = params(method, [invalidForkChoiceState, validPayloadAttributes]) - const expectRes = checkError( - t, - INVALID_PARAMS, - "invalid argument 0 for key 'finalizedBlockHash': invalid block hash" - ) - await baseRequest(t, server, req, 200, expectRes) -}) + it('call with invalid hex string as block hash', async () => { + const { server } = baseSetup({ engine: true, includeVM: true }) -tape(`${method}: call with valid data but parent block is not loaded yet`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - - const nonExistentHeadBlockHash = { - ...validForkChoiceState, - headBlockHash: '0x1d93f244823f80efbd9292a0d0d72a2b03df8cd5a9688c6c3779d26a7cc5009c', - } - const req = params(method, [nonExistentHeadBlockHash, validPayloadAttributes]) - const expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'SYNCING') - t.equal(res.body.result.payloadStatus.latestValidHash, null) - t.equal(res.body.result.payloadStatus.validationError, null) - t.equal(res.body.result.payloadId, null) - } - await baseRequest(t, server, req, 200, expectRes) -}) + const invalidForkChoiceState = { + ...validForkChoiceState, + finalizedBlockHash: '0xinvalid', + } + const req = params(method, [invalidForkChoiceState, validPayloadAttributes]) + const expectRes = checkError( + INVALID_PARAMS, + "invalid argument 0 for key 'finalizedBlockHash': invalid block hash" + ) + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with valid data and synced data`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + it('call with valid data but parent block is not loaded yet', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - const req = params(method, validPayload) - const expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') - t.equal( - res.body.result.payloadStatus.latestValidHash, - '0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a' - ) - t.equal(res.body.result.payloadStatus.validationError, null) - t.notEqual(res.body.result.payloadId, null) - } - await baseRequest(t, server, req, 200, expectRes) -}) + const nonExistentHeadBlockHash = { + ...validForkChoiceState, + headBlockHash: '0x1d93f244823f80efbd9292a0d0d72a2b03df8cd5a9688c6c3779d26a7cc5009c', + } + const req = params(method, [nonExistentHeadBlockHash, validPayloadAttributes]) + const expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'SYNCING') + assert.equal(res.body.result.payloadStatus.latestValidHash, null) + assert.equal(res.body.result.payloadStatus.validationError, null) + assert.equal(res.body.result.payloadId, null) + } + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with invalid timestamp payloadAttributes`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + it('call with valid data and synced data', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - const invalidTimestampPayload: any = [{ ...validPayload[0] }, { ...validPayload[1] }] - invalidTimestampPayload[1].timestamp = '0x0' + const req = params(method, validPayload) + const expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'VALID') + assert.equal( + res.body.result.payloadStatus.latestValidHash, + '0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a' + ) + assert.equal(res.body.result.payloadStatus.validationError, null) + assert.notEqual(res.body.result.payloadId, null) + } + await baseRequest(server, req, 200, expectRes) + }) - const req = params(method, invalidTimestampPayload) - const expectRes = checkError( - t, - INVALID_PARAMS, - 'invalid timestamp in payloadAttributes, got 0, need at least 1' - ) - await baseRequest(t, server, req, 200, expectRes) -}) + it('call with invalid timestamp payloadAttributes', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) -tape(`${method}: call with valid fork choice state without payload attributes`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - const req = params(method, [validForkChoiceState]) - const expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') - t.equal(res.body.result.payloadStatus.latestValidHash, validForkChoiceState.headBlockHash) - t.equal(res.body.result.payloadStatus.validationError, null) - t.equal(res.body.result.payloadId, null) - } - await baseRequest(t, server, req, 200, expectRes) -}) + const invalidTimestampPayload: any = [{ ...validPayload[0] }, { ...validPayload[1] }] + invalidTimestampPayload[1].timestamp = '0x0' -tape(`${method}: invalid terminal block with only genesis block`, async (t) => { - const genesisWithHigherTtd = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 17179869185, - }, - } + const req = params(method, invalidTimestampPayload) + const expectRes = checkError( + INVALID_PARAMS, + 'invalid timestamp in payloadAttributes, got 0, need at least 1' + ) + await baseRequest(server, req, 200, expectRes) + }) - ;(BlockHeader as any).prototype._consensusFormatValidation = td.func() - const { server } = await setupChain(genesisWithHigherTtd, 'post-merge', { - engine: true, + it('call with valid fork choice state without payload attributes', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + const req = params(method, [validForkChoiceState]) + const expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'VALID') + assert.equal( + res.body.result.payloadStatus.latestValidHash, + validForkChoiceState.headBlockHash + ) + assert.equal(res.body.result.payloadStatus.validationError, null) + assert.equal(res.body.result.payloadId, null) + } + await baseRequest(server, req, 200, expectRes) }) - const req = params(method, [validForkChoiceState, null]) - const expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'INVALID') - t.equal(res.body.result.payloadStatus.latestValidHash, bytesToUnprefixedHex(zeros(32))) - } - await baseRequest(t, server, req, 200, expectRes) -}) + it('invalid terminal block with only genesis block', async () => { + const genesisWithHigherTtd = { + ...genesisJSON, + config: { + ...genesisJSON.config, + terminalTotalDifficulty: 17179869185, + }, + } -tape(`${method}: invalid terminal block with 1+ blocks`, async (t) => { - const genesisWithHigherTtd = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 17179869185, - clique: undefined, - ethash: {}, - }, - } + BlockHeader.prototype['_consensusFormatValidation'] = td.func() + const { server } = await setupChain(genesisWithHigherTtd, 'post-merge', { + engine: true, + }) - const { server, chain, common } = await setupChain(genesisWithHigherTtd, 'post-merge', { - engine: true, + const req = params(method, [validForkChoiceState, null]) + const expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'INVALID') + assert.equal(res.body.result.payloadStatus.latestValidHash, bytesToHex(zeros(32))) + } + await baseRequest(server, req, 200, expectRes) }) - const newBlock = Block.fromBlockData( - { - header: { - number: blocks[0].blockNumber, - parentHash: blocks[0].parentHash, - difficulty: 1, - extraData: new Uint8Array(97), + it('invalid terminal block with 1+ blocks', async () => { + const genesisWithHigherTtd = { + ...genesisJSON, + config: { + ...genesisJSON.config, + terminalTotalDifficulty: 17179869185, + clique: undefined, + ethash: {}, }, - }, - { common } - ) + } - await chain.putBlocks([newBlock]) - const req = params(method, [ - { ...validForkChoiceState, headBlockHash: bytesToHex(newBlock.hash()) }, - null, - ]) - const expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'INVALID') - t.equal(res.body.result.payloadStatus.latestValidHash, bytesToUnprefixedHex(zeros(32))) - } - await baseRequest(t, server, req, 200, expectRes) -}) + const { server, chain, common } = await setupChain(genesisWithHigherTtd, 'post-merge', { + engine: true, + }) + + BlockHeader.prototype['_consensusFormatValidation'] = td.func() + const newBlock = Block.fromBlockData( + { + header: { + number: blocks[0].blockNumber, + parentHash: blocks[0].parentHash, + difficulty: 1, + extraData: new Uint8Array(97), + }, + }, + { common, skipConsensusFormatValidation: true } + ) -tape(`${method}: call with deep parent lookup`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + await chain.putBlocks([newBlock]) + const req = params(method, [ + { ...validForkChoiceState, headBlockHash: bytesToHex(newBlock.hash()) }, + null, + ]) + const expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'INVALID') + assert.equal(res.body.result.payloadStatus.latestValidHash, bytesToHex(zeros(32))) + } + await baseRequest(server, req, 200, expectRes) + }) - let req = params(method, [validForkChoiceState]) - let expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') - } - await baseRequest(t, server, req, 200, expectRes, false, false) + it('call with deep parent lookup', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - for (let i = 0; i < 3; i++) { - const req = params('engine_newPayloadV1', [blocks[i]]) - const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') + let req = params(method, [validForkChoiceState]) + let expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'VALID') + } + await baseRequest(server, req, 200, expectRes, false, false) + + for (let i = 0; i < 3; i++) { + const req = params('engine_newPayloadV1', [blocks[i]]) + const expectRes = (res: any) => { + assert.equal(res.body.result.status, 'VALID') + } + await baseRequest(server, req, 200, expectRes, false, false) } - await baseRequest(t, server, req, 200, expectRes, false, false) - } - - // Now set the head to the last hash - req = params(method, [{ ...validForkChoiceState, headBlockHash: blocks[2].blockHash }]) - expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') - } - await baseRequest(t, server, req, 200, expectRes) -}) -tape(`${method}: call with deep parent lookup and with stored safe block hash`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + // Now set the head to the last hash + req = params(method, [{ ...validForkChoiceState, headBlockHash: blocks[2].blockHash }]) + expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'VALID') + } + await baseRequest(server, req, 200, expectRes) + }) - let req = params(method, [validForkChoiceState]) - let expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') - } - await baseRequest(t, server, req, 200, expectRes, false, false) + it('call with deep parent lookup and with stored safe block hash', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - await batchBlocks(t, server) + let req = params(method, [validForkChoiceState]) + let expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'VALID') + } + await baseRequest(server, req, 200, expectRes, false, false) - req = params(method, [ - { - ...validForkChoiceState, - headBlockHash: blocks[2].blockHash, - safeBlockHash: blocks[0].blockHash, - }, - ]) - expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') - } - await baseRequest(t, server, req, 200, expectRes) -}) + await batchBlocks(server) -tape(`${method}: unknown finalized block hash`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - const req = params(method, [ - { - ...validForkChoiceState, - finalizedBlockHash: '0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4b', - }, - ]) - const expectRes = checkError(t, INVALID_PARAMS, 'finalized block not available') - await baseRequest(t, server, req, 200, expectRes) -}) + req = params(method, [ + { + ...validForkChoiceState, + headBlockHash: blocks[2].blockHash, + safeBlockHash: blocks[0].blockHash, + }, + ]) + expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'VALID') + } + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: invalid safe block hash`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - const req = params(method, [ - { - ...validForkChoiceState, - safeBlockHash: '0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4b', - }, - ]) - const expectRes = checkError(t, INVALID_PARAMS, 'safe block not available') + it('unknown finalized block hash', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + const req = params(method, [ + { + ...validForkChoiceState, + finalizedBlockHash: '0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4b', + }, + ]) + const expectRes = checkError(INVALID_PARAMS, 'finalized block not available') + await baseRequest(server, req, 200, expectRes) + }) - await baseRequest(t, server, req, 200, expectRes) -}) + it('invalid safe block hash', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + const req = params(method, [ + { + ...validForkChoiceState, + safeBlockHash: '0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4b', + }, + ]) + const expectRes = checkError(INVALID_PARAMS, 'safe block not available') -tape(`${method}: latest block after reorg`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - let req = params(method, [validForkChoiceState]) - let expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') - } - await baseRequest(t, server, req, 200, expectRes, false, false) + await baseRequest(server, req, 200, expectRes) + }) - await batchBlocks(t, server) + it('latest block after reorg', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + let req = params(method, [validForkChoiceState]) + let expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'VALID') + } + await baseRequest(server, req, 200, expectRes, false, false) - req = params(method, [ - { - ...validForkChoiceState, - headBlockHash: blocks[2].blockHash, - safeBlockHash: blocks[0].blockHash, - }, - ]) - expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') - } - await baseRequest(t, server, req, 200, expectRes, false, false) - - // check safe and finalized - req = params('eth_getBlockByNumber', ['finalized', false]) - expectRes = (res: any) => { - t.equal(res.body.result.number, '0x0', 'finalized should be set to genesis') - } - await baseRequest(t, server, req, 200, expectRes, false, false) - - req = params('eth_getBlockByNumber', ['safe', false]) - expectRes = (res: any) => { - t.equal(res.body.result.number, '0x1', 'safe should be set to first block') - } - await baseRequest(t, server, req, 200, expectRes, false, false) - - req = params(method, [ - { - headBlockHash: blocks[1].blockHash, - safeBlockHash: blocks[2].blockHash, - finalizedBlockHash: blocks[2].blockHash, - }, - ]) + await batchBlocks(server) - expectRes = (res: any) => { - t.equal(res.body.error.code, -32602) - } - await baseRequest(t, server, req, 200, expectRes) -}) + req = params(method, [ + { + ...validForkChoiceState, + headBlockHash: blocks[2].blockHash, + safeBlockHash: blocks[0].blockHash, + }, + ]) + expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'VALID') + } + await baseRequest(server, req, 200, expectRes, false, false) + + // check safe and finalized + req = params('eth_getBlockByNumber', ['finalized', false]) + expectRes = (res: any) => { + assert.equal(res.body.result.number, '0x0', 'finalized should be set to genesis') + } + await baseRequest(server, req, 200, expectRes, false, false) + + req = params('eth_getBlockByNumber', ['safe', false]) + expectRes = (res: any) => { + assert.equal(res.body.result.number, '0x1', 'safe should be set to first block') + } + await baseRequest(server, req, 200, expectRes, false, false) -tape(`${method}: validate safeBlockHash is part of canonical chain`, async (t) => { - const { server, chain } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + req = params(method, [ + { + headBlockHash: blocks[1].blockHash, + safeBlockHash: blocks[2].blockHash, + finalizedBlockHash: blocks[2].blockHash, + }, + ]) - const genesis = await chain.getBlock(BigInt(0)) + expectRes = (res: any) => { + assert.equal(res.body.error.code, -32602) + } + await baseRequest(server, req, 200, expectRes) + }) - // Build the payload for the canonical chain - const canonical = [genesis] + it('validate safeBlockHash is part of canonical chain', async () => { + const { server, chain } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - for (let i = 0; i < 2; i++) { - canonical.push(createBlock(canonical[canonical.length - 1])) - } + const genesis = await chain.getBlock(BigInt(0)) - // Build an alternative payload - const reorg = [genesis] - for (let i = 0; i < 2; i++) { - reorg.push(createBlock(reorg[reorg.length - 1])) - } + // Build the payload for the canonical chain + const canonical = [genesis] - const canonicalPayload = canonical.map( - (e) => blockToExecutionPayload(e, BigInt(0)).executionPayload - ) - const reorgPayload = reorg.map((e) => blockToExecutionPayload(e, BigInt(0)).executionPayload) + for (let i = 0; i < 2; i++) { + canonical.push(createBlock(canonical[canonical.length - 1])) + } - await batchBlocks(t, server, canonicalPayload.slice(1)) - await batchBlocks(t, server, reorgPayload.slice(1)) + // Build an alternative payload + const reorg = [genesis] + for (let i = 0; i < 2; i++) { + reorg.push(createBlock(reorg[reorg.length - 1])) + } - // Safe block hash is not in the canonical chain - const req = params(method, [ - { - headBlockHash: reorgPayload[2].blockHash, - safeBlockHash: canonicalPayload[1].blockHash, - finalizedBlockHash: reorgPayload[1].blockHash, - }, - ]) - - const expectRes = (res: any) => { - t.equal(res.body.error.code, -32602) - t.ok(res.body.error.message.includes('safeBlock')) - t.ok(res.body.error.message.includes('canonical')) - } - await baseRequest(t, server, req, 200, expectRes) -}) + const canonicalPayload = canonical.map( + (e) => blockToExecutionPayload(e, BigInt(0)).executionPayload + ) + const reorgPayload = reorg.map((e) => blockToExecutionPayload(e, BigInt(0)).executionPayload) -tape(`${method}: validate finalizedBlockHash is part of canonical chain`, async (t) => { - const { server, chain } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + await batchBlocks(server, canonicalPayload.slice(1)) + await batchBlocks(server, reorgPayload.slice(1)) - const genesis = await chain.getBlock(BigInt(0)) + // Safe block hash is not in the canonical chain + const req = params(method, [ + { + headBlockHash: reorgPayload[2].blockHash, + safeBlockHash: canonicalPayload[1].blockHash, + finalizedBlockHash: reorgPayload[1].blockHash, + }, + ]) - // Build the payload for the canonical chain - const canonical = [genesis] + const expectRes = (res: any) => { + assert.equal(res.body.error.code, -32602) + assert.ok(res.body.error.message.includes('safeBlock')) + assert.ok(res.body.error.message.includes('canonical')) + } + await baseRequest(server, req, 200, expectRes) + }) - for (let i = 0; i < 2; i++) { - canonical.push(createBlock(canonical[canonical.length - 1])) - } + it('validate finalizedBlockHash is part of canonical chain', async () => { + const { server, chain } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - // Build an alternative payload - const reorg = [genesis] - for (let i = 0; i < 2; i++) { - reorg.push(createBlock(reorg[reorg.length - 1])) - } + const genesis = await chain.getBlock(BigInt(0)) - const canonicalPayload = canonical.map( - (e) => blockToExecutionPayload(e, BigInt(0)).executionPayload - ) - const reorgPayload = reorg.map((e) => blockToExecutionPayload(e, BigInt(0)).executionPayload) + // Build the payload for the canonical chain + const canonical = [genesis] - await batchBlocks(t, server, canonicalPayload.slice(1)) - await batchBlocks(t, server, reorgPayload.slice(1)) + for (let i = 0; i < 2; i++) { + canonical.push(createBlock(canonical[canonical.length - 1])) + } - // Finalized block hash is not in the canonical chain - const req = params(method, [ - { - headBlockHash: reorgPayload[2].blockHash, - safeBlockHash: reorgPayload[1].blockHash, - finalizedBlockHash: canonicalPayload[1].blockHash, - }, - ]) - - const expectRes = (res: any) => { - t.equal(res.body.error.code, -32602) - t.ok(res.body.error.message.includes('finalizedBlock')) - t.ok(res.body.error.message.includes('canonical')) - } - await baseRequest(t, server, req, 200, expectRes) -}) + // Build an alternative payload + const reorg = [genesis] + for (let i = 0; i < 2; i++) { + reorg.push(createBlock(reorg[reorg.length - 1])) + } + + const canonicalPayload = canonical.map( + (e) => blockToExecutionPayload(e, BigInt(0)).executionPayload + ) + const reorgPayload = reorg.map((e) => blockToExecutionPayload(e, BigInt(0)).executionPayload) -tape('reset TD', (t) => { - td.reset() - ;(BlockHeader as any).prototype._consensusFormatValidation = originalValidate - t.end() + await batchBlocks(server, canonicalPayload.slice(1)) + await batchBlocks(server, reorgPayload.slice(1)) + + // Finalized block hash is not in the canonical chain + const req = params(method, [ + { + headBlockHash: reorgPayload[2].blockHash, + safeBlockHash: reorgPayload[1].blockHash, + finalizedBlockHash: canonicalPayload[1].blockHash, + }, + ]) + + const expectRes = (res: any) => { + assert.equal(res.body.error.code, -32602) + assert.ok(res.body.error.message.includes('finalizedBlock')) + assert.ok(res.body.error.message.includes('canonical')) + } + await baseRequest(server, req, 200, expectRes) + }) + it('reset TD', () => { + td.reset() + BlockHeader.prototype['_consensusFormatValidation'] = originalValidate + }) }) diff --git a/packages/client/test/rpc/engine/getPayloadBodiesByHashV1.spec.ts b/packages/client/test/rpc/engine/getPayloadBodiesByHashV1.spec.ts index eed88c9720..6a488499d6 100644 --- a/packages/client/test/rpc/engine/getPayloadBodiesByHashV1.spec.ts +++ b/packages/client/test/rpc/engine/getPayloadBodiesByHashV1.spec.ts @@ -3,193 +3,198 @@ import { Hardfork } from '@ethereumjs/common' import { DefaultStateManager } from '@ethereumjs/statemanager' import { TransactionFactory } from '@ethereumjs/tx' import { Account, Address, bytesToHex, hexToBytes, randomBytes } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { TOO_LARGE_REQUEST } from '../../../src/rpc/error-code' -import * as genesisJSON from '../../testdata/geth-genesis/eip4844.json' -import * as preShanghaiGenesisJson from '../../testdata/geth-genesis/post-merge.json' +import genesisJSON from '../../testdata/geth-genesis/eip4844.json' +import preShanghaiGenesisJson from '../../testdata/geth-genesis/post-merge.json' import { baseRequest, baseSetup, params, setupChain } from '../helpers' import { checkError } from '../util' const method = 'engine_getPayloadBodiesByHashV1' -tape(`${method}: call with too many hashes`, async (t) => { - const { server } = baseSetup({ engine: true, includeVM: true }) - const tooManyHashes: string[] = [] - for (let x = 0; x < 35; x++) { - tooManyHashes.push(bytesToHex(randomBytes(32))) - } - const req = params(method, [tooManyHashes]) - const expectRes = checkError( - t, - TOO_LARGE_REQUEST, - 'More than 32 execution payload bodies requested' - ) - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with valid parameters`, async (t) => { - // Disable stateroot validation in TxPool since valid state root isn't available - const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot - const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy - DefaultStateManager.prototype.setStateRoot = function (): any {} - DefaultStateManager.prototype.shallowCopy = function () { - return this - } - const { chain, service, server, common } = await setupChain(genesisJSON, 'post-merge', { - engine: true, - hardfork: Hardfork.Cancun, +describe(method, () => { + it('call with too many hashes', async () => { + const { server } = baseSetup({ engine: true, includeVM: true }) + const tooManyHashes: string[] = [] + for (let x = 0; x < 35; x++) { + tooManyHashes.push(bytesToHex(randomBytes(32))) + } + const req = params(method, [tooManyHashes]) + const expectRes = checkError( + TOO_LARGE_REQUEST, + 'More than 32 execution payload bodies requested' + ) + await baseRequest(server, req, 200, expectRes) }) - common.setHardfork(Hardfork.Cancun) - const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') - const address = Address.fromPrivateKey(pkey) - await service.execution.vm.stateManager.putAccount(address, new Account()) - const account = await service.execution.vm.stateManager.getAccount(address) - - account!.balance = 0xfffffffffffffffn - await service.execution.vm.stateManager.putAccount(address, account!) - const tx = TransactionFactory.fromTxData( - { - type: 0x01, - maxFeePerDataGas: 1n, - maxFeePerGas: 10000000000n, - maxPriorityFeePerGas: 100000000n, - gasLimit: 30000000n, - }, - { common } - ).sign(pkey) - const tx2 = TransactionFactory.fromTxData( - { - type: 0x01, - maxFeePerDataGas: 1n, - maxFeePerGas: 10000000000n, - maxPriorityFeePerGas: 100000000n, - gasLimit: 30000000n, - nonce: 1n, - }, - { common } - ).sign(pkey) - const block = Block.fromBlockData( - { - transactions: [tx], - header: BlockHeader.fromHeaderData( - { parentHash: chain.genesis.hash(), number: 1n }, - { common, skipConsensusFormatValidation: true } - ), - }, - { common, skipConsensusFormatValidation: true } - ) - const block2 = Block.fromBlockData( - { - transactions: [tx2], - header: BlockHeader.fromHeaderData( - { parentHash: block.hash(), number: 2n }, - { common, skipConsensusFormatValidation: true } - ), - }, - { common, skipConsensusFormatValidation: true } - ) - await chain.putBlocks([block, block2], true) + it('call with valid parameters', async () => { + // Disable stateroot validation in TxPool since valid state root isn't available + const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot + const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy + DefaultStateManager.prototype.setStateRoot = function (): any {} + DefaultStateManager.prototype.shallowCopy = function () { + return this + } + const { chain, service, server, common } = await setupChain(genesisJSON, 'post-merge', { + engine: true, + hardfork: Hardfork.Cancun, + }) + common.setHardfork(Hardfork.Cancun) + const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') + const address = Address.fromPrivateKey(pkey) + await service.execution.vm.stateManager.putAccount(address, new Account()) + const account = await service.execution.vm.stateManager.getAccount(address) - const req = params(method, [ - [bytesToHex(block.hash()), bytesToHex(randomBytes(32)), bytesToHex(block2.hash())], - ]) - const expectRes = (res: any) => { - t.equal( - res.body.result[0].transactions[0], - bytesToHex(tx.serialize()), - 'got expected transaction from first payload' + account!.balance = 0xfffffffffffffffn + await service.execution.vm.stateManager.putAccount(address, account!) + const tx = TransactionFactory.fromTxData( + { + type: 0x01, + maxFeePerDataGas: 1n, + maxFeePerGas: 10000000000n, + maxPriorityFeePerGas: 100000000n, + gasLimit: 30000000n, + }, + { common } + ).sign(pkey) + const tx2 = TransactionFactory.fromTxData( + { + type: 0x01, + maxFeePerDataGas: 1n, + maxFeePerGas: 10000000000n, + maxPriorityFeePerGas: 100000000n, + gasLimit: 30000000n, + nonce: 1n, + }, + { common } + ).sign(pkey) + const block = Block.fromBlockData( + { + transactions: [tx], + header: BlockHeader.fromHeaderData( + { parentHash: chain.genesis.hash(), number: 1n }, + { common, skipConsensusFormatValidation: true } + ), + }, + { common, skipConsensusFormatValidation: true } + ) + const block2 = Block.fromBlockData( + { + transactions: [tx2], + header: BlockHeader.fromHeaderData( + { parentHash: block.hash(), number: 2n }, + { common, skipConsensusFormatValidation: true } + ), + }, + { common, skipConsensusFormatValidation: true } ) - t.equal(res.body.result[1], null, 'got null for block not found in chain') - t.equal(res.body.result.length, 3, 'length of response matches number of block hashes sent') - } - await baseRequest(t, server, req, 200, expectRes) - // Restore setStateRoot - DefaultStateManager.prototype.setStateRoot = originalSetStateRoot - DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy -}) -tape(`${method}: call with valid parameters on pre-Shanghai block`, async (t) => { - // Disable stateroot validation in TxPool since valid state root isn't available - const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot - const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy - DefaultStateManager.prototype.setStateRoot = function (): any {} - DefaultStateManager.prototype.shallowCopy = function () { - return this - } - const { chain, service, server, common } = await setupChain( - preShanghaiGenesisJson, - 'post-merge', - { - engine: true, - hardfork: Hardfork.London, - } - ) - common.setHardfork(Hardfork.London) - const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') - const address = Address.fromPrivateKey(pkey) - await service.execution.vm.stateManager.putAccount(address, new Account()) - const account = await service.execution.vm.stateManager.getAccount(address) + await chain.putBlocks([block, block2], true) - account!.balance = 0xfffffffffffffffn - await service.execution.vm.stateManager.putAccount(address, account!) - const tx = TransactionFactory.fromTxData( - { - type: 0x01, - maxFeePerDataGas: 1n, - maxFeePerGas: 10000000000n, - maxPriorityFeePerGas: 100000000n, - gasLimit: 30000000n, - }, - { common } - ).sign(pkey) - const tx2 = TransactionFactory.fromTxData( - { - type: 0x01, - maxFeePerDataGas: 1n, - maxFeePerGas: 10000000000n, - maxPriorityFeePerGas: 100000000n, - gasLimit: 30000000n, - nonce: 1n, - }, - { common } - ).sign(pkey) - const block = Block.fromBlockData( - { - transactions: [tx], - header: BlockHeader.fromHeaderData( - { parentHash: chain.genesis.hash(), number: 1n }, - { common, skipConsensusFormatValidation: true } - ), - }, - { common, skipConsensusFormatValidation: true } - ) - const block2 = Block.fromBlockData( - { - transactions: [tx2], - header: BlockHeader.fromHeaderData( - { parentHash: block.hash(), number: 2n }, - { common, skipConsensusFormatValidation: true } - ), - }, - { common, skipConsensusFormatValidation: true } - ) + const req = params(method, [ + [bytesToHex(block.hash()), bytesToHex(randomBytes(32)), bytesToHex(block2.hash())], + ]) + const expectRes = (res: any) => { + assert.equal( + res.body.result[0].transactions[0], + bytesToHex(tx.serialize()), + 'got expected transaction from first payload' + ) + assert.equal(res.body.result[1], null, 'got null for block not found in chain') + assert.equal( + res.body.result.length, + 3, + 'length of response matches number of block hashes sent' + ) + } + await baseRequest(server, req, 200, expectRes) + // Restore setStateRoot + DefaultStateManager.prototype.setStateRoot = originalSetStateRoot + DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy + }) - await chain.putBlocks([block, block2], true) + it('call with valid parameters on pre-Shanghai block', async () => { + // Disable stateroot validation in TxPool since valid state root isn't available + const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot + const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy + DefaultStateManager.prototype.setStateRoot = function (): any {} + DefaultStateManager.prototype.shallowCopy = function () { + return this + } + const { chain, service, server, common } = await setupChain( + preShanghaiGenesisJson, + 'post-merge', + { + engine: true, + hardfork: Hardfork.London, + } + ) + common.setHardfork(Hardfork.London) + const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') + const address = Address.fromPrivateKey(pkey) + await service.execution.vm.stateManager.putAccount(address, new Account()) + const account = await service.execution.vm.stateManager.getAccount(address) - const req = params(method, [ - [bytesToHex(block.hash()), bytesToHex(randomBytes(32)), bytesToHex(block2.hash())], - ]) - const expectRes = (res: any) => { - t.equal( - res.body.result[0].withdrawals, - null, - 'got null for withdrawals field on pre-Shanghai block' + account!.balance = 0xfffffffffffffffn + await service.execution.vm.stateManager.putAccount(address, account!) + const tx = TransactionFactory.fromTxData( + { + type: 0x01, + maxFeePerDataGas: 1n, + maxFeePerGas: 10000000000n, + maxPriorityFeePerGas: 100000000n, + gasLimit: 30000000n, + }, + { common } + ).sign(pkey) + const tx2 = TransactionFactory.fromTxData( + { + type: 0x01, + maxFeePerDataGas: 1n, + maxFeePerGas: 10000000000n, + maxPriorityFeePerGas: 100000000n, + gasLimit: 30000000n, + nonce: 1n, + }, + { common } + ).sign(pkey) + const block = Block.fromBlockData( + { + transactions: [tx], + header: BlockHeader.fromHeaderData( + { parentHash: chain.genesis.hash(), number: 1n }, + { common, skipConsensusFormatValidation: true } + ), + }, + { common, skipConsensusFormatValidation: true } ) - } - await baseRequest(t, server, req, 200, expectRes) - // Restore setStateRoot - DefaultStateManager.prototype.setStateRoot = originalSetStateRoot - DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy + const block2 = Block.fromBlockData( + { + transactions: [tx2], + header: BlockHeader.fromHeaderData( + { parentHash: block.hash(), number: 2n }, + { common, skipConsensusFormatValidation: true } + ), + }, + { common, skipConsensusFormatValidation: true } + ) + + await chain.putBlocks([block, block2], true) + + const req = params(method, [ + [bytesToHex(block.hash()), bytesToHex(randomBytes(32)), bytesToHex(block2.hash())], + ]) + const expectRes = (res: any) => { + assert.equal( + res.body.result[0].withdrawals, + null, + 'got null for withdrawals field on pre-Shanghai block' + ) + } + await baseRequest(server, req, 200, expectRes) + // Restore setStateRoot + DefaultStateManager.prototype.setStateRoot = originalSetStateRoot + DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy + }) }) diff --git a/packages/client/test/rpc/engine/getPayloadBodiesByRangeV1.spec.ts b/packages/client/test/rpc/engine/getPayloadBodiesByRangeV1.spec.ts index 67956b1a4a..c71ed0e9b0 100644 --- a/packages/client/test/rpc/engine/getPayloadBodiesByRangeV1.spec.ts +++ b/packages/client/test/rpc/engine/getPayloadBodiesByRangeV1.spec.ts @@ -3,208 +3,205 @@ import { Hardfork } from '@ethereumjs/common' import { DefaultStateManager } from '@ethereumjs/statemanager' import { TransactionFactory } from '@ethereumjs/tx' import { Account, Address, bytesToHex, hexToBytes } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS, TOO_LARGE_REQUEST } from '../../../src/rpc/error-code' -import * as genesisJSON from '../../testdata/geth-genesis/eip4844.json' -import * as preShanghaiGenesisJSON from '../../testdata/geth-genesis/post-merge.json' +import genesisJSON from '../../testdata/geth-genesis/eip4844.json' +import preShanghaiGenesisJSON from '../../testdata/geth-genesis/post-merge.json' import { baseRequest, baseSetup, params, setupChain } from '../helpers' import { checkError } from '../util' const method = 'engine_getPayloadBodiesByRangeV1' -tape(`${method}: call with too many hashes`, async (t) => { - const { server } = baseSetup({ engine: true, includeVM: true }) +describe(method, () => { + it('call with too many hashes', async () => { + const { server } = baseSetup({ engine: true, includeVM: true }) - const req = params(method, ['0x1', '0x55']) - const expectRes = checkError( - t, - TOO_LARGE_REQUEST, - 'More than 32 execution payload bodies requested' - ) - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, ['0x1', '0x55']) + const expectRes = checkError( + TOO_LARGE_REQUEST, + 'More than 32 execution payload bodies requested' + ) + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with invalid parameters`, async (t) => { - const { server } = baseSetup({ engine: true, includeVM: true }) + it('call with invalid parameters', async () => { + const { server } = baseSetup({ engine: true, includeVM: true }) - const req = params(method, ['0x0', '0x0']) - const expectRes = checkError( - t, - INVALID_PARAMS, - 'Start and Count parameters cannot be less than 1' - ) - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with valid parameters`, async (t) => { - // Disable stateroot validation in TxPool since valid state root isn't available - const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot - const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy - DefaultStateManager.prototype.setStateRoot = function (): any {} - DefaultStateManager.prototype.shallowCopy = function () { - return this - } - const { chain, service, server, common } = await setupChain(genesisJSON, 'post-merge', { - engine: true, - hardfork: Hardfork.Cancun, + const req = params(method, ['0x0', '0x0']) + const expectRes = checkError(INVALID_PARAMS, 'Start and Count parameters cannot be less than 1') + await baseRequest(server, req, 200, expectRes) }) - common.setHardfork(Hardfork.Cancun) - const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') - const address = Address.fromPrivateKey(pkey) - await service.execution.vm.stateManager.putAccount(address, new Account()) - const account = await service.execution.vm.stateManager.getAccount(address) - - account!.balance = 0xfffffffffffffffn - await service.execution.vm.stateManager.putAccount(address, account!) - const tx = TransactionFactory.fromTxData( - { - type: 0x01, - maxFeePerDataGas: 1n, - maxFeePerGas: 10000000000n, - maxPriorityFeePerGas: 100000000n, - gasLimit: 30000000n, - }, - { common } - ).sign(pkey) - const tx2 = TransactionFactory.fromTxData( - { - type: 0x01, - maxFeePerDataGas: 1n, - maxFeePerGas: 10000000000n, - maxPriorityFeePerGas: 100000000n, - gasLimit: 30000000n, - nonce: 1n, - }, - { common } - ).sign(pkey) - const block = Block.fromBlockData( - { - transactions: [tx], - header: BlockHeader.fromHeaderData( - { parentHash: chain.genesis.hash(), number: 1n }, - { common, skipConsensusFormatValidation: true } - ), - }, - { common, skipConsensusFormatValidation: true } - ) - const block2 = Block.fromBlockData( - { - transactions: [tx2], - header: BlockHeader.fromHeaderData( - { parentHash: block.hash(), number: 2n }, - { common, skipConsensusFormatValidation: true } - ), - }, - { common, skipConsensusFormatValidation: true } - ) - await chain.putBlocks([block, block2], true) + it('call with valid parameters', async () => { + // Disable stateroot validation in TxPool since valid state root isn't available + const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot + const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy + DefaultStateManager.prototype.setStateRoot = function (): any {} + DefaultStateManager.prototype.shallowCopy = function () { + return this + } + const { chain, service, server, common } = await setupChain(genesisJSON, 'post-merge', { + engine: true, + hardfork: Hardfork.Cancun, + }) + common.setHardfork(Hardfork.Cancun) + const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') + const address = Address.fromPrivateKey(pkey) + await service.execution.vm.stateManager.putAccount(address, new Account()) + const account = await service.execution.vm.stateManager.getAccount(address) - const req = params(method, ['0x1', '0x4']) - const expectRes = (res: any) => { - t.equal( - res.body.result[0].transactions[0], - bytesToHex(tx.serialize()), - 'got expected transaction from first payload' + account!.balance = 0xfffffffffffffffn + await service.execution.vm.stateManager.putAccount(address, account!) + const tx = TransactionFactory.fromTxData( + { + type: 0x01, + maxFeePerDataGas: 1n, + maxFeePerGas: 10000000000n, + maxPriorityFeePerGas: 100000000n, + gasLimit: 30000000n, + }, + { common } + ).sign(pkey) + const tx2 = TransactionFactory.fromTxData( + { + type: 0x01, + maxFeePerDataGas: 1n, + maxFeePerGas: 10000000000n, + maxPriorityFeePerGas: 100000000n, + gasLimit: 30000000n, + nonce: 1n, + }, + { common } + ).sign(pkey) + const block = Block.fromBlockData( + { + transactions: [tx], + header: BlockHeader.fromHeaderData( + { parentHash: chain.genesis.hash(), number: 1n }, + { common, skipConsensusFormatValidation: true } + ), + }, + { common, skipConsensusFormatValidation: true } ) - t.equal( - res.body.result.length, - 2, - 'length of response matches start of range up to highest known block' + const block2 = Block.fromBlockData( + { + transactions: [tx2], + header: BlockHeader.fromHeaderData( + { parentHash: block.hash(), number: 2n }, + { common, skipConsensusFormatValidation: true } + ), + }, + { common, skipConsensusFormatValidation: true } ) - } - await baseRequest(t, server, req, 200, expectRes, false) - const req2 = params(method, ['0x3', '0x2']) - const expectRes2 = (res: any) => { - t.equal( - res.body.result.length, - 0, - 'got empty array when start of requested range is beyond current chain head' - ) - } - await baseRequest(t, server, req2, 200, expectRes2) - // Restore setStateRoot - DefaultStateManager.prototype.setStateRoot = originalSetStateRoot - DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy -}) + await chain.putBlocks([block, block2], true) -tape(`${method}: call with valid parameters on pre-Shanghai hardfork`, async (t) => { - // Disable stateroot validation in TxPool since valid state root isn't available - const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot - const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy - DefaultStateManager.prototype.setStateRoot = function (): any {} - DefaultStateManager.prototype.shallowCopy = function () { - return this - } - const { chain, service, server, common } = await setupChain(preShanghaiGenesisJSON, 'london', { - engine: true, - hardfork: Hardfork.London, - }) - common.setHardfork(Hardfork.London) - const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') - const address = Address.fromPrivateKey(pkey) - await service.execution.vm.stateManager.putAccount(address, new Account()) - const account = await service.execution.vm.stateManager.getAccount(address) + const req = params(method, ['0x1', '0x4']) + const expectRes = (res: any) => { + assert.equal( + res.body.result[0].transactions[0], + bytesToHex(tx.serialize()), + 'got expected transaction from first payload' + ) + assert.equal( + res.body.result.length, + 2, + 'length of response matches start of range up to highest known block' + ) + } + await baseRequest(server, req, 200, expectRes, false) - account!.balance = 0xfffffffffffffffn - await service.execution.vm.stateManager.putAccount(address, account!) - const tx = TransactionFactory.fromTxData( - { - type: 0x01, - maxFeePerDataGas: 1n, - maxFeePerGas: 10000000000n, - maxPriorityFeePerGas: 100000000n, - gasLimit: 30000000n, - }, - { common } - ).sign(pkey) - const tx2 = TransactionFactory.fromTxData( - { - type: 0x01, - maxFeePerDataGas: 1n, - maxFeePerGas: 10000000000n, - maxPriorityFeePerGas: 100000000n, - gasLimit: 30000000n, - nonce: 1n, - }, - { common } - ).sign(pkey) - const block = Block.fromBlockData( - { - transactions: [tx], - header: BlockHeader.fromHeaderData( - { parentHash: chain.genesis.hash(), number: 1n }, - { common, skipConsensusFormatValidation: true } - ), - }, - { common, skipConsensusFormatValidation: true } - ) - const block2 = Block.fromBlockData( - { - transactions: [tx2], - header: BlockHeader.fromHeaderData( - { parentHash: block.hash(), number: 2n }, - { common, skipConsensusFormatValidation: true } - ), - }, - { common, skipConsensusFormatValidation: true } - ) + const req2 = params(method, ['0x3', '0x2']) + const expectRes2 = (res: any) => { + assert.equal( + res.body.result.length, + 0, + 'got empty array when start of requested range is beyond current chain head' + ) + } + await baseRequest(server, req2, 200, expectRes2) + // Restore setStateRoot + DefaultStateManager.prototype.setStateRoot = originalSetStateRoot + DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy + }) - await chain.putBlocks([block, block2], true) + it('call with valid parameters on pre-Shanghai hardfork', async () => { + // Disable stateroot validation in TxPool since valid state root isn't available + const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot + const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy + DefaultStateManager.prototype.setStateRoot = function (): any {} + DefaultStateManager.prototype.shallowCopy = function () { + return this + } + const { chain, service, server, common } = await setupChain(preShanghaiGenesisJSON, 'london', { + engine: true, + hardfork: Hardfork.London, + }) + common.setHardfork(Hardfork.London) + const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') + const address = Address.fromPrivateKey(pkey) + await service.execution.vm.stateManager.putAccount(address, new Account()) + const account = await service.execution.vm.stateManager.getAccount(address) - const req = params(method, ['0x1', '0x4']) - const expectRes = (res: any) => { - t.equal( - res.body.result[0].withdrawals, - null, - 'withdrawals field is null for pre-shanghai blocks' + account!.balance = 0xfffffffffffffffn + await service.execution.vm.stateManager.putAccount(address, account!) + const tx = TransactionFactory.fromTxData( + { + type: 0x01, + maxFeePerDataGas: 1n, + maxFeePerGas: 10000000000n, + maxPriorityFeePerGas: 100000000n, + gasLimit: 30000000n, + }, + { common } + ).sign(pkey) + const tx2 = TransactionFactory.fromTxData( + { + type: 0x01, + maxFeePerDataGas: 1n, + maxFeePerGas: 10000000000n, + maxPriorityFeePerGas: 100000000n, + gasLimit: 30000000n, + nonce: 1n, + }, + { common } + ).sign(pkey) + const block = Block.fromBlockData( + { + transactions: [tx], + header: BlockHeader.fromHeaderData( + { parentHash: chain.genesis.hash(), number: 1n }, + { common, skipConsensusFormatValidation: true } + ), + }, + { common, skipConsensusFormatValidation: true } + ) + const block2 = Block.fromBlockData( + { + transactions: [tx2], + header: BlockHeader.fromHeaderData( + { parentHash: block.hash(), number: 2n }, + { common, skipConsensusFormatValidation: true } + ), + }, + { common, skipConsensusFormatValidation: true } ) - } - service.execution.vm.common.setHardfork(Hardfork.London) - await baseRequest(t, server, req, 200, expectRes) - // Restore setStateRoot - DefaultStateManager.prototype.setStateRoot = originalSetStateRoot - DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy + + await chain.putBlocks([block, block2], true) + + const req = params(method, ['0x1', '0x4']) + const expectRes = (res: any) => { + assert.equal( + res.body.result[0].withdrawals, + null, + 'withdrawals field is null for pre-shanghai blocks' + ) + } + service.execution.vm.common.setHardfork(Hardfork.London) + await baseRequest(server, req, 200, expectRes) + // Restore setStateRoot + DefaultStateManager.prototype.setStateRoot = originalSetStateRoot + DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy + }) }) diff --git a/packages/client/test/rpc/engine/getPayloadV1.spec.ts b/packages/client/test/rpc/engine/getPayloadV1.spec.ts index b4ddef737b..54e452d155 100644 --- a/packages/client/test/rpc/engine/getPayloadV1.spec.ts +++ b/packages/client/test/rpc/engine/getPayloadV1.spec.ts @@ -1,7 +1,7 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' -import * as genesisJSON from '../../testdata/geth-genesis/post-merge.json' +import genesisJSON from '../../testdata/geth-genesis/post-merge.json' import { baseRequest, baseSetup, params, setupChain } from '../helpers' import { checkError } from '../util' @@ -9,49 +9,50 @@ import { validPayload } from './forkchoiceUpdatedV1.spec' const method = 'engine_getPayloadV1' -tape(`${method}: call with invalid payloadId`, async (t) => { - const { server } = baseSetup({ engine: true, includeVM: true }) - - const req = params(method, [1]) - const expectRes = checkError( - t, - INVALID_PARAMS, - 'invalid argument 0: argument must be a hex string' - ) - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with unknown payloadId`, async (t) => { - const { server } = baseSetup({ engine: true, includeVM: true }) - - const req = params(method, ['0x123']) - const expectRes = checkError(t, -32001, 'Unknown payload') - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with known payload`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - let req = params('engine_forkchoiceUpdatedV1', validPayload) - let payloadId - let expectRes = (res: any) => { - payloadId = res.body.result.payloadId - } - await baseRequest(t, server, req, 200, expectRes, false, false) - - req = params(method, [payloadId]) - expectRes = (res: any) => { - t.equal(res.body.result.blockNumber, '0x1') - } - await baseRequest(t, server, req, 200, expectRes, false, false) - - expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') - } - req = params('engine_forkchoiceUpdatedV1', [ - { - ...validPayload[0], - headBlockHash: '0x3559e851470f6e7bbed1db474980683e8c315bfce99b2a6ef47c057c04de7858', - }, - ]) - await baseRequest(t, server, req, 200, expectRes) +describe(method, () => { + it('call with invalid payloadId', async () => { + const { server } = baseSetup({ engine: true, includeVM: true }) + + const req = params(method, [1]) + const expectRes = checkError( + INVALID_PARAMS, + 'invalid argument 0: argument must be a hex string' + ) + await baseRequest(server, req, 200, expectRes) + }) + + it('call with unknown payloadId', async () => { + const { server } = baseSetup({ engine: true, includeVM: true }) + + const req = params(method, ['0x123']) + const expectRes = checkError(-32001, 'Unknown payload') + await baseRequest(server, req, 200, expectRes) + }) + + it('call with known payload', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + let req = params('engine_forkchoiceUpdatedV1', validPayload) + let payloadId + let expectRes = (res: any) => { + payloadId = res.body.result.payloadId + } + await baseRequest(server, req, 200, expectRes, false, false) + + req = params(method, [payloadId]) + expectRes = (res: any) => { + assert.equal(res.body.result.blockNumber, '0x1') + } + await baseRequest(server, req, 200, expectRes, false, false) + + expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'VALID') + } + req = params('engine_forkchoiceUpdatedV1', [ + { + ...validPayload[0], + headBlockHash: '0x3559e851470f6e7bbed1db474980683e8c315bfce99b2a6ef47c057c04de7858', + }, + ]) + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/engine/getPayloadV3.spec.ts b/packages/client/test/rpc/engine/getPayloadV3.spec.ts index f51b14c826..380ac0016b 100644 --- a/packages/client/test/rpc/engine/getPayloadV3.spec.ts +++ b/packages/client/test/rpc/engine/getPayloadV3.spec.ts @@ -13,10 +13,10 @@ import { initKZG, } from '@ethereumjs/util' import * as kzg from 'c-kzg' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' -import * as genesisJSON from '../../testdata/geth-genesis/eip4844.json' +import genesisJSON from '../../testdata/geth-genesis/eip4844.json' import { baseRequest, baseSetup, params, setupChain } from '../helpers' import { checkError } from '../util' @@ -41,98 +41,100 @@ try { } catch {} const method = 'engine_getPayloadV3' -tape(`${method}: call with invalid payloadId`, async (t) => { - const { server } = baseSetup({ engine: true, includeVM: true }) +describe(method, () => { + it('call with invalid payloadId', async () => { + const { server } = baseSetup({ engine: true, includeVM: true }) - const req = params(method, [1]) - const expectRes = checkError( - t, - INVALID_PARAMS, - 'invalid argument 0: argument must be a hex string' - ) - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with unknown payloadId`, async (t) => { - const { server } = baseSetup({ engine: true, includeVM: true }) + const req = params(method, [1]) + const expectRes = checkError( + INVALID_PARAMS, + 'invalid argument 0: argument must be a hex string' + ) + await baseRequest(server, req, 200, expectRes) + }) - const req = params(method, ['0x123']) - const expectRes = checkError(t, -32001, 'Unknown payload') - await baseRequest(t, server, req, 200, expectRes) -}) + it('call with unknown payloadId', async () => { + const { server } = baseSetup({ engine: true, includeVM: true }) -tape(`${method}: call with known payload`, async (t) => { - // Disable stateroot validation in TxPool since valid state root isn't available - const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot - const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy - DefaultStateManager.prototype.setStateRoot = function (): any {} - DefaultStateManager.prototype.shallowCopy = function () { - return this - } - const { service, server, common } = await setupChain(genesisJSON, 'post-merge', { - engine: true, - hardfork: Hardfork.Cancun, + const req = params(method, ['0x123']) + const expectRes = checkError(-32001, 'Unknown payload') + await baseRequest(server, req, 200, expectRes) }) - common.setHardfork(Hardfork.Cancun) - const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') - const address = Address.fromPrivateKey(pkey) - await service.execution.vm.stateManager.putAccount(address, new Account()) - const account = await service.execution.vm.stateManager.getAccount(address) - account!.balance = 0xfffffffffffffffn - await service.execution.vm.stateManager.putAccount(address, account!) - let req = params('engine_forkchoiceUpdatedV2', validPayload) - let payloadId - let expectRes = (res: any) => { - payloadId = res.body.result.payloadId - t.ok(payloadId !== undefined && payloadId !== null, 'valid payloadId should be received') - } - await baseRequest(t, server, req, 200, expectRes, false, false) + it('call with known payload', async () => { + // Disable stateroot validation in TxPool since valid state root isn't available + const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot + const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy + DefaultStateManager.prototype.setStateRoot = function (): any {} + DefaultStateManager.prototype.shallowCopy = function () { + return this + } + const { service, server, common } = await setupChain(genesisJSON, 'post-merge', { + engine: true, + hardfork: Hardfork.Cancun, + }) + common.setHardfork(Hardfork.Cancun) + const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') + const address = Address.fromPrivateKey(pkey) + await service.execution.vm.stateManager.putAccount(address, new Account()) + const account = await service.execution.vm.stateManager.getAccount(address) - const txBlobs = getBlobs('hello world') - const txCommitments = blobsToCommitments(txBlobs) - const txVersionedHashes = commitmentsToVersionedHashes(txCommitments) - const txProofs = blobsToProofs(txBlobs, txCommitments) + account!.balance = 0xfffffffffffffffn + await service.execution.vm.stateManager.putAccount(address, account!) + let req = params('engine_forkchoiceUpdatedV2', validPayload) + let payloadId + let expectRes = (res: any) => { + payloadId = res.body.result.payloadId + assert.ok(payloadId !== undefined && payloadId !== null, 'valid payloadId should be received') + } + await baseRequest(server, req, 200, expectRes, false, false) + ;(service.txPool as any).vm.common.setHardfork(Hardfork.Cancun) - const tx = TransactionFactory.fromTxData( - { - type: 0x03, - versionedHashes: txVersionedHashes, - blobs: txBlobs, - kzgCommitments: txCommitments, - kzgProofs: txProofs, - maxFeePerDataGas: 1n, - maxFeePerGas: 10000000000n, - maxPriorityFeePerGas: 100000000n, - gasLimit: 30000000n, - }, - { common } - ).sign(pkey) + const txBlobs = getBlobs('hello world') + const txCommitments = blobsToCommitments(txBlobs) + const txVersionedHashes = commitmentsToVersionedHashes(txCommitments) + const txProofs = blobsToProofs(txBlobs, txCommitments) - ;(service.txPool as any).vm.common.setHardfork(Hardfork.Cancun) - await service.txPool.add(tx, true) - req = params('engine_getPayloadV3', [payloadId]) - expectRes = (res: any) => { - const { executionPayload, blobsBundle } = res.body.result - t.equal( - executionPayload.blockHash, - '0x0a4f946a9dac3f6d2b86d02dfa6cf221b4fe72bbaff51b50cee4c5784156dd52', - 'built expected block' - ) - t.equal(executionPayload.excessDataGas, '0x0', 'correct execess data gas') - t.equal(executionPayload.dataGasUsed, '0x20000', 'correct data gas used') - const { commitments, proofs, blobs } = blobsBundle - t.ok( - commitments.length === proofs.length && commitments.length === blobs.length, - 'equal commitments, proofs and blobs' - ) - t.equal(blobs.length, 1, '1 blob should be returned') - t.equal(proofs[0], bytesToHex(txProofs[0]), 'proof should match') - t.equal(commitments[0], bytesToHex(txCommitments[0]), 'commitment should match') - t.equal(blobs[0], bytesToHex(txBlobs[0]), 'blob should match') - } + const tx = TransactionFactory.fromTxData( + { + type: 0x03, + versionedHashes: txVersionedHashes, + blobs: txBlobs, + kzgCommitments: txCommitments, + kzgProofs: txProofs, + maxFeePerDataGas: 1n, + maxFeePerGas: 10000000000n, + maxPriorityFeePerGas: 100000000n, + gasLimit: 30000000n, + }, + { common } + ).sign(pkey) - await baseRequest(t, server, req, 200, expectRes) - DefaultStateManager.prototype.setStateRoot = originalSetStateRoot - DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy + service.txPool['vm'].common.setHardfork(Hardfork.Cancun) + await service.txPool.add(tx, true) + req = params('engine_getPayloadV3', [payloadId]) + expectRes = (res: any) => { + const { executionPayload, blobsBundle } = res.body.result + assert.equal( + executionPayload.blockHash, + '0x0a4f946a9dac3f6d2b86d02dfa6cf221b4fe72bbaff51b50cee4c5784156dd52', + 'built expected block' + ) + assert.equal(executionPayload.excessDataGas, '0x0', 'correct execess data gas') + assert.equal(executionPayload.dataGasUsed, '0x20000', 'correct data gas used') + const { commitments, proofs, blobs } = blobsBundle + assert.ok( + commitments.length === proofs.length && commitments.length === blobs.length, + 'equal commitments, proofs and blobs' + ) + assert.equal(blobs.length, 1, '1 blob should be returned') + assert.equal(proofs[0], bytesToHex(txProofs[0]), 'proof should match') + assert.equal(commitments[0], bytesToHex(txCommitments[0]), 'commitment should match') + assert.equal(blobs[0], bytesToHex(txBlobs[0]), 'blob should match') + } + + await baseRequest(server, req, 200, expectRes, false, false) + DefaultStateManager.prototype.setStateRoot = originalSetStateRoot + DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy + }) }) diff --git a/packages/client/test/rpc/engine/newPayloadV1.spec.ts b/packages/client/test/rpc/engine/newPayloadV1.spec.ts index 20dad774cf..1b39b1a99d 100644 --- a/packages/client/test/rpc/engine/newPayloadV1.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV1.spec.ts @@ -1,17 +1,16 @@ import { BlockHeader } from '@ethereumjs/block' import { FeeMarketEIP1559Transaction } from '@ethereumjs/tx' import { Address, bytesToHex, hexToBytes, zeros } from '@ethereumjs/util' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' -import * as blocks from '../../testdata/blocks/beacon.json' -import * as genesisJSON from '../../testdata/geth-genesis/post-merge.json' +import blocks from '../../testdata/blocks/beacon.json' +import genesisJSON from '../../testdata/geth-genesis/post-merge.json' import { baseRequest, baseSetup, params, setupChain } from '../helpers' import { checkError } from '../util' import type { HttpServer } from 'jayson' -type Test = tape.Test const method = 'engine_newPayloadV1' @@ -25,99 +24,96 @@ const originalValidate = (BlockHeader as any).prototype._consensusFormatValidati * @param server HttpServer * @param inputBlocks Array of valid ExecutionPayloadV1 data */ -export const batchBlocks = async (t: Test, server: HttpServer, inputBlocks: any[] = blocks) => { +export const batchBlocks = async (server: HttpServer, inputBlocks: any[] = blocks) => { for (let i = 0; i < inputBlocks.length; i++) { const req = params('engine_newPayloadV1', [inputBlocks[i]]) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.status, 'VALID') } - await baseRequest(t, server, req, 200, expectRes, false, false) + await baseRequest(server, req, 200, expectRes, false, false) } } -tape(`${method}: call with invalid block hash without 0x`, async (t) => { - const { server } = baseSetup({ engine: true, includeVM: true }) +describe(method, () => { + it('call with invalid block hash without 0x', async () => { + const { server } = baseSetup({ engine: true, includeVM: true }) - const blockDataWithInvalidParentHash = [ - { - ...blockData, - parentHash: blockData.parentHash.slice(2), - }, - ] - - const req = params(method, blockDataWithInvalidParentHash) - const expectRes = checkError( - t, - INVALID_PARAMS, - "invalid argument 0 for key 'parentHash': hex string without 0x prefix" - ) - await baseRequest(t, server, req, 200, expectRes) -}) + const blockDataWithInvalidParentHash = [ + { + ...blockData, + parentHash: blockData.parentHash.slice(2), + }, + ] -tape(`${method}: call with invalid hex string as block hash`, async (t) => { - const { server } = baseSetup({ engine: true, includeVM: true }) - - const blockDataWithInvalidBlockHash = [{ ...blockData, blockHash: '0x-invalid-block-hash' }] - const req = params(method, blockDataWithInvalidBlockHash) - const expectRes = checkError( - t, - INVALID_PARAMS, - "invalid argument 0 for key 'blockHash': invalid block hash" - ) - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, blockDataWithInvalidParentHash) + const expectRes = checkError( + INVALID_PARAMS, + "invalid argument 0 for key 'parentHash': hex string without 0x prefix" + ) + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with non existent block hash`, async (t) => { - const { server } = await setupChain(genesisJSON, 'merge', { engine: true }) + it('call with invalid hex string as block hash', async () => { + const { server } = baseSetup({ engine: true, includeVM: true }) - const blockDataNonExistentBlockHash = [ - { - ...blockData, - blockHash: '0x2559e851470f6e7bbed1db474980683e8c315bfce99b2a6ef47c057c04de7858', - }, - ] - const req = params(method, blockDataNonExistentBlockHash) - const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID_BLOCK_HASH') - } + const blockDataWithInvalidBlockHash = [{ ...blockData, blockHash: '0x-invalid-block-hash' }] + const req = params(method, blockDataWithInvalidBlockHash) + const expectRes = checkError( + INVALID_PARAMS, + "invalid argument 0 for key 'blockHash': invalid block hash" + ) + await baseRequest(server, req, 200, expectRes) + }) - await baseRequest(t, server, req, 200, expectRes) -}) + it('call with non existent block hash', async () => { + const { server } = await setupChain(genesisJSON, 'merge', { engine: true }) -tape(`${method}: call with non existent parent hash`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + const blockDataNonExistentBlockHash = [ + { + ...blockData, + blockHash: '0x2559e851470f6e7bbed1db474980683e8c315bfce99b2a6ef47c057c04de7858', + }, + ] + const req = params(method, blockDataNonExistentBlockHash) + const expectRes = (res: any) => { + assert.equal(res.body.result.status, 'INVALID_BLOCK_HASH') + } - const blockDataNonExistentParentHash = [ - { - ...blockData, - parentHash: '0x2559e851470f6e7bbed1db474980683e8c315bfce99b2a6ef47c057c04de7858', - blockHash: '0xf31969a769bfcdbcc1c05f2542fdc7aa9336fc1ea9a82c4925320c035095d649', - }, - ] - const req = params(method, blockDataNonExistentParentHash) - const expectRes = (res: any) => { - t.equal(res.body.result.status, 'ACCEPTED') - } + await baseRequest(server, req, 200, expectRes) + }) - await baseRequest(t, server, req, 200, expectRes) -}) + it('call with non existent parent hash', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + + const blockDataNonExistentParentHash = [ + { + ...blockData, + parentHash: '0x2559e851470f6e7bbed1db474980683e8c315bfce99b2a6ef47c057c04de7858', + blockHash: '0xf31969a769bfcdbcc1c05f2542fdc7aa9336fc1ea9a82c4925320c035095d649', + }, + ] + const req = params(method, blockDataNonExistentParentHash) + const expectRes = (res: any) => { + assert.equal(res.body.result.status, 'ACCEPTED') + } -tape( - `${method}: call with unknown parent hash to store in remoteBlocks, then call valid ancestor in fcU`, - async (t) => { + await baseRequest(server, req, 200, expectRes) + }) + + it('call with unknown parent hash to store in remoteBlocks, then call valid ancestor in fcU', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) let req = params(method, [blocks[1]]) let expectRes = (res: any) => { - t.equal(res.body.result.status, 'ACCEPTED') + assert.equal(res.body.result.status, 'ACCEPTED') } - await baseRequest(t, server, req, 200, expectRes, false, false) + await baseRequest(server, req, 200, expectRes, false, false) req = params(method, [blocks[0]]) expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.status, 'VALID') } - await baseRequest(t, server, req, 200, expectRes, false, false) + await baseRequest(server, req, 200, expectRes, false, false) const state = { headBlockHash: blocks[1].blockHash, @@ -126,185 +122,189 @@ tape( } req = params('engine_forkchoiceUpdatedV1', [state]) expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') + assert.equal(res.body.result.payloadStatus.status, 'VALID') } - await baseRequest(t, server, req, 200, expectRes) - } -) - -tape(`${method}: invalid terminal block`, async (t) => { - const genesisWithHigherTtd = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 17179869185, - }, - } + await baseRequest(server, req, 200, expectRes) + }) + + it('invalid terminal block', async () => { + const genesisWithHigherTtd = { + ...genesisJSON, + config: { + ...genesisJSON.config, + terminalTotalDifficulty: 17179869185, + }, + } + + BlockHeader.prototype['_consensusFormatValidation'] = td.func() + td.replace('@ethereumjs/block', { BlockHeader }) - ;(BlockHeader as any).prototype._consensusFormatValidation = td.func() - td.replace('@ethereumjs/block', { BlockHeader }) + const { server } = await setupChain(genesisWithHigherTtd, 'post-merge', { + engine: true, + }) - const { server } = await setupChain(genesisWithHigherTtd, 'post-merge', { - engine: true, + const req = params(method, [blockData, null]) + const expectRes = (res: any) => { + assert.equal(res.body.result.status, 'INVALID') + assert.equal(res.body.result.latestValidHash, bytesToHex(zeros(32))) + } + await baseRequest(server, req, 200, expectRes) }) - const req = params(method, [blockData, null]) - const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') - t.equal(res.body.result.latestValidHash, bytesToHex(zeros(32))) - } - await baseRequest(t, server, req, 200, expectRes) -}) + it('call with valid data', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) -tape(`${method}: call with valid data`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + const req = params(method, [blockData]) + const expectRes = (res: any) => { + assert.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.latestValidHash, blockData.blockHash) + } + await baseRequest(server, req, 200, expectRes) + }) - const req = params(method, [blockData]) - const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') - t.equal(res.body.result.latestValidHash, blockData.blockHash) - } - await baseRequest(t, server, req, 200, expectRes) -}) + it('call with valid data but invalid transactions', async () => { + const { chain, server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + chain.config.logger.silent = true + const blockDataWithInvalidTransaction = { + ...blockData, + transactions: ['0x1'], + } + const expectRes = (res: any) => { + assert.equal(res.body.result.status, 'INVALID') + assert.equal(res.body.result.latestValidHash, blockData.parentHash) + const expectedError = + 'Invalid tx at index 0: Error: Invalid serialized tx input: must be array' + assert.ok( + res.body.result.validationError.includes(expectedError), + `should error with - ${expectedError}` + ) + } -tape(`${method}: call with valid data but invalid transactions`, async (t) => { - const { chain, server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - chain.config.logger.silent = true - const blockDataWithInvalidTransaction = { - ...blockData, - transactions: ['0x1'], - } - const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') - t.equal(res.body.result.latestValidHash, blockData.parentHash) - const expectedError = 'Invalid tx at index 0: Error: Invalid serialized tx input: must be array' - t.ok( - res.body.result.validationError.includes(expectedError), - `should error with - ${expectedError}` - ) - } + const req = params(method, [blockDataWithInvalidTransaction]) + await baseRequest(server, req, 200, expectRes) + }) - const req = params(method, [blockDataWithInvalidTransaction]) - await baseRequest(t, server, req, 200, expectRes) -}) + it('call with valid data & valid transaction but not signed', async () => { + const { server, common, chain } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + chain.config.logger.silent = true + + // Let's mock a non-signed transaction so execution fails + const tx = FeeMarketEIP1559Transaction.fromTxData( + { + gasLimit: 21_000, + maxFeePerGas: 10, + value: 1, + to: Address.fromString('0x61FfE691821291D02E9Ba5D33098ADcee71a3a17'), + }, + { common } + ) -tape(`${method}: call with valid data & valid transaction but not signed`, async (t) => { - const { server, common, chain } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - chain.config.logger.silent = true - - // Let's mock a non-signed transaction so execution fails - const tx = FeeMarketEIP1559Transaction.fromTxData( - { - gasLimit: 21_000, - maxFeePerGas: 10, - value: 1, - to: Address.fromString('0x61FfE691821291D02E9Ba5D33098ADcee71a3a17'), - }, - { common } - ) - - const transactions = [bytesToHex(tx.serialize())] - const blockDataWithValidTransaction = { - ...blockData, - transactions, - blockHash: '0x308f490332a31fade8b2b46a8e1132cd15adeaffbb651cb523c067b3f007dd9e', - } - const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') - t.true(res.body.result.validationError.includes('Error verifying block while running:')) - } + const transactions = [bytesToHex(tx.serialize())] + const blockDataWithValidTransaction = { + ...blockData, + transactions, + blockHash: '0x308f490332a31fade8b2b46a8e1132cd15adeaffbb651cb523c067b3f007dd9e', + } + const expectRes = (res: any) => { + assert.equal(res.body.result.status, 'INVALID') + assert.isTrue( + res.body.result.validationError.includes('Error verifying block while running:') + ) + } - const req = params(method, [blockDataWithValidTransaction]) - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, [blockDataWithValidTransaction]) + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with valid data & valid transaction`, async (t) => { - const accountPk = hexToBytes('0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109') - const accountAddress = Address.fromPrivateKey(accountPk) - const newGenesisJSON = { - ...genesisJSON, - alloc: { - ...genesisJSON.alloc, - [accountAddress.toString()]: { - balance: '0x1000000', + it('call with valid data & valid transaction', async () => { + const accountPk = hexToBytes( + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + ) + const accountAddress = Address.fromPrivateKey(accountPk) + const newGenesisJSON = { + ...genesisJSON, + alloc: { + ...genesisJSON.alloc, + [accountAddress.toString()]: { + balance: '0x1000000', + }, }, - }, - } + } - const { server, common } = await setupChain(newGenesisJSON, 'post-merge', { engine: true }) - - const tx = FeeMarketEIP1559Transaction.fromTxData( - { - maxFeePerGas: '0x7', - value: 6, - gasLimit: 53_000, - }, - { common } - ).sign(accountPk) - const transactions = [bytesToHex(tx.serialize())] - const blockDataWithValidTransaction = { - ...blockData, - transactions, - parentHash: '0xefc1993f08864165c42195966b3f12794a1a42afa84b1047a46ab6b105828c5c', - receiptsRoot: '0xc508745f9f8b6847a127bbc58b7c6b2c0f073c7ca778b6f020138f0d6d782adf', - gasUsed: '0xcf08', - stateRoot: '0x5a7123ab8bdd4f172438671a2a3de143f2105aa1ac3338c97e5f433e8e380d8d', - blockHash: '0x625f2fd36bf278f92211376cbfe5acd7ac5da694e28f3d94d59488b7dbe213a4', - } - const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') - } - const req = params(method, [blockDataWithValidTransaction]) - await baseRequest(t, server, req, 200, expectRes) -}) + const { server, common } = await setupChain(newGenesisJSON, 'post-merge', { engine: true }) + + const tx = FeeMarketEIP1559Transaction.fromTxData( + { + maxFeePerGas: '0x7', + value: 6, + gasLimit: 53_000, + }, + { common } + ).sign(accountPk) + const transactions = [bytesToHex(tx.serialize())] + const blockDataWithValidTransaction = { + ...blockData, + transactions, + parentHash: '0xefc1993f08864165c42195966b3f12794a1a42afa84b1047a46ab6b105828c5c', + receiptsRoot: '0xc508745f9f8b6847a127bbc58b7c6b2c0f073c7ca778b6f020138f0d6d782adf', + gasUsed: '0xcf08', + stateRoot: '0x5a7123ab8bdd4f172438671a2a3de143f2105aa1ac3338c97e5f433e8e380d8d', + blockHash: '0x625f2fd36bf278f92211376cbfe5acd7ac5da694e28f3d94d59488b7dbe213a4', + } + const expectRes = (res: any) => { + assert.equal(res.body.result.status, 'VALID') + } + const req = params(method, [blockDataWithValidTransaction]) + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: re-execute payload and verify that no errors occur`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + it('re-execute payload and verify that no errors occur', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - await batchBlocks(t, server) + await batchBlocks(server) - let req = params('engine_forkchoiceUpdatedV1', [ - { - headBlockHash: blocks[2].blockHash, - finalizedBlockHash: blocks[2].blockHash, - safeBlockHash: blocks[2].blockHash, - }, - ]) + let req = params('engine_forkchoiceUpdatedV1', [ + { + headBlockHash: blocks[2].blockHash, + finalizedBlockHash: blocks[2].blockHash, + safeBlockHash: blocks[2].blockHash, + }, + ]) - // Let's set new head hash - const expectResFcu = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') - } - await baseRequest(t, server, req, 200, expectResFcu, false) + // Let's set new head hash + const expectResFcu = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'VALID') + } + await baseRequest(server, req, 200, expectResFcu, false, false) - // Now let's try to re-execute payload - req = params(method, [blockData]) + // Now let's try to re-execute payload + req = params(method, [blockData]) - const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') - } - await baseRequest(t, server, req, 200, expectRes) -}) + const expectRes = (res: any) => { + assert.equal(res.body.result.status, 'VALID') + } + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: parent hash equals to block hash`, async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - const blockDataHasBlockHashSameAsParentHash = [ - { - ...blockData, - blockHash: blockData.parentHash, - }, - ] - const req = params(method, blockDataHasBlockHashSameAsParentHash) - const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID_BLOCK_HASH') - } + it('parent hash equals to block hash', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + const blockDataHasBlockHashSameAsParentHash = [ + { + ...blockData, + blockHash: blockData.parentHash, + }, + ] + const req = params(method, blockDataHasBlockHashSameAsParentHash) + const expectRes = (res: any) => { + assert.equal(res.body.result.status, 'INVALID_BLOCK_HASH') + } - await baseRequest(t, server, req, 200, expectRes) -}) + await baseRequest(server, req, 200, expectRes) + }) -tape(`reset TD`, (t) => { - ;(BlockHeader as any).prototype._consensusFormatValidation = originalValidate - td.reset() - t.end() + it('reset TD', () => { + BlockHeader.prototype['_consensusFormatValidation'] = originalValidate + td.reset() + }) }) diff --git a/packages/client/test/rpc/engine/newPayloadV2.spec.ts b/packages/client/test/rpc/engine/newPayloadV2.spec.ts index d7ca9b5794..0c91000c10 100644 --- a/packages/client/test/rpc/engine/newPayloadV2.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV2.spec.ts @@ -1,17 +1,16 @@ import { BlockHeader } from '@ethereumjs/block' import { FeeMarketEIP1559Transaction } from '@ethereumjs/tx' import { Address, bytesToHex, hexToBytes, zeros } from '@ethereumjs/util' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' -import * as blocks from '../../testdata/blocks/beacon.json' -import * as genesisJSON from '../../testdata/geth-genesis/post-merge.json' +import blocks from '../../testdata/blocks/beacon.json' +import genesisJSON from '../../testdata/geth-genesis/post-merge.json' import { baseRequest, baseSetup, params, setupChain } from '../helpers' import { checkError } from '../util' import type { HttpServer } from 'jayson' -type Test = tape.Test const method = 'engine_newPayloadV2' @@ -19,18 +18,18 @@ const [blockData] = blocks const originalValidate = (BlockHeader as any).prototype._consensusFormatValidation -export const batchBlocks = async (t: Test, server: HttpServer) => { +export const batchBlocks = async (server: HttpServer) => { for (let i = 0; i < 3; i++) { const req = params(method, [blocks[i]]) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.status, 'VALID') } - await baseRequest(t, server, req, 200, expectRes, false, false) + await baseRequest(server, req, 200, expectRes, false, false) } } -tape(`${method}: call with executionPayloadV1`, (v1) => { - v1.test(`${method}: call with invalid block hash without 0x`, async (t) => { +describe(`${method}: call with executionPayloadV1`, () => { + it('call with invalid block hash without 0x', async () => { const { server } = baseSetup({ engine: true, includeVM: true }) const blockDataWithInvalidParentHash = [ @@ -42,27 +41,25 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { const req = params(method, blockDataWithInvalidParentHash) const expectRes = checkError( - t, INVALID_PARAMS, "invalid argument 0 for key 'parentHash': hex string without 0x prefix" ) - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with invalid hex string as block hash`, async (t) => { + it('call with invalid hex string as block hash', async () => { const { server } = baseSetup({ engine: true, includeVM: true }) const blockDataWithInvalidBlockHash = [{ ...blockData, blockHash: '0x-invalid-block-hash' }] const req = params(method, blockDataWithInvalidBlockHash) const expectRes = checkError( - t, INVALID_PARAMS, "invalid argument 0 for key 'blockHash': invalid block hash" ) - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with non existent block hash`, async (t) => { + it('call with non existent block hash', async () => { const { server } = await setupChain(genesisJSON, 'merge', { engine: true }) const blockDataNonExistentBlockHash = [ @@ -73,13 +70,13 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { ] const req = params(method, blockDataNonExistentBlockHash) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') + assert.equal(res.body.result.status, 'INVALID') } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with non existent parent hash`, async (t) => { + it('call with non existent parent hash', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) const blockDataNonExistentParentHash = [ @@ -91,44 +88,41 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { ] const req = params(method, blockDataNonExistentParentHash) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'ACCEPTED') + assert.equal(res.body.result.status, 'ACCEPTED') } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test( - `${method}: call with unknown parent hash to store in remoteBlocks, then call valid ancestor in fcU`, - async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - - let req = params(method, [blocks[1]]) - let expectRes = (res: any) => { - t.equal(res.body.result.status, 'ACCEPTED') - } - await baseRequest(t, server, req, 200, expectRes, false, false) - - req = params(method, [blocks[0]]) - expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') - } - await baseRequest(t, server, req, 200, expectRes, false, false) - - const state = { - headBlockHash: blocks[1].blockHash, - safeBlockHash: blocks[1].blockHash, - finalizedBlockHash: blocks[0].blockHash, - } - req = params('engine_forkchoiceUpdatedV1', [state]) - expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') - } - - await baseRequest(t, server, req, 200, expectRes) + it('call with unknown parent hash to store in remoteBlocks, then call valid ancestor in fcU', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + + let req = params(method, [blocks[1]]) + let expectRes = (res: any) => { + assert.equal(res.body.result.status, 'ACCEPTED') + } + await baseRequest(server, req, 200, expectRes, false, false) + + req = params(method, [blocks[0]]) + expectRes = (res: any) => { + assert.equal(res.body.result.status, 'VALID') + } + await baseRequest(server, req, 200, expectRes, false, false) + + const state = { + headBlockHash: blocks[1].blockHash, + safeBlockHash: blocks[1].blockHash, + finalizedBlockHash: blocks[0].blockHash, } - ) + req = params('engine_forkchoiceUpdatedV1', [state]) + expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'VALID') + } + + await baseRequest(server, req, 200, expectRes) + }) - v1.test(`${method}: invalid terminal block`, async (t) => { + it('invalid terminal block', async () => { const genesisWithHigherTtd = { ...genesisJSON, config: { @@ -146,24 +140,24 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { const req = params(method, [blockData, null]) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') - t.equal(res.body.result.latestValidHash, bytesToHex(zeros(32))) + assert.equal(res.body.result.status, 'INVALID') + assert.equal(res.body.result.latestValidHash, bytesToHex(zeros(32))) } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with valid data`, async (t) => { + it('call with valid data', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) const req = params(method, [blockData]) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') - t.equal(res.body.result.latestValidHash, blockData.blockHash) + assert.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.latestValidHash, blockData.blockHash) } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with valid data but invalid transactions`, async (t) => { + it('call with valid data but invalid transactions', async () => { const { chain, server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) chain.config.logger.silent = true const blockDataWithInvalidTransaction = { @@ -171,21 +165,21 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { transactions: ['0x1'], } const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') - t.equal(res.body.result.latestValidHash, blockData.parentHash) + assert.equal(res.body.result.status, 'INVALID') + assert.equal(res.body.result.latestValidHash, blockData.parentHash) const expectedError = 'Invalid tx at index 0: Error: Invalid serialized tx input: must be array' - t.ok( + assert.ok( res.body.result.validationError.includes(expectedError), `should error with - ${expectedError}` ) } const req = params(method, [blockDataWithInvalidTransaction]) - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with valid data & valid transaction but not signed`, async (t) => { + it('call with valid data & valid transaction but not signed', async () => { const { server, common, chain } = await setupChain(genesisJSON, 'post-merge', { engine: true }) chain.config.logger.silent = true @@ -207,15 +201,17 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { blockHash: '0x308f490332a31fade8b2b46a8e1132cd15adeaffbb651cb523c067b3f007dd9e', } const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') - t.true(res.body.result.validationError.includes('Error verifying block while running:')) + assert.equal(res.body.result.status, 'INVALID') + assert.isTrue( + res.body.result.validationError.includes('Error verifying block while running:') + ) } const req = params(method, [blockDataWithValidTransaction]) - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with valid data & valid transaction`, async (t) => { + it('call with valid data & valid transaction', async () => { const accountPk = hexToBytes( '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' ) @@ -251,16 +247,16 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { blockHash: '0x625f2fd36bf278f92211376cbfe5acd7ac5da694e28f3d94d59488b7dbe213a4', } const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.status, 'VALID') } const req = params(method, [blockDataWithValidTransaction]) - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: re-execute payload and verify that no errors occur`, async (t) => { + it('re-execute payload and verify that no errors occur', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - await batchBlocks(t, server) + await batchBlocks(server) let req = params('engine_forkchoiceUpdatedV1', [ { @@ -272,20 +268,20 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { // Let's set new head hash const expectResFcu = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') + assert.equal(res.body.result.payloadStatus.status, 'VALID') } - await baseRequest(t, server, req, 200, expectResFcu, false, false) + await baseRequest(server, req, 200, expectResFcu, false, false) // Now let's try to re-execute payload req = params(method, [blockData]) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.status, 'VALID') } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: parent hash equals to block hash`, async (t) => { + it('parent hash equals to block hash', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) const blockDataHasBlockHashSameAsParentHash = [ { @@ -295,22 +291,19 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { ] const req = params(method, blockDataHasBlockHashSameAsParentHash) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') + assert.equal(res.body.result.status, 'INVALID') } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`reset TD`, (t) => { - ;(BlockHeader as any).prototype._consensusFormatValidation = originalValidate + it(`reset TD`, () => { + BlockHeader.prototype['_consensusFormatValidation'] = originalValidate td.reset() - t.end() }) - v1.end() -}) -tape(`${method}: call with executionPayloadV2`, (v2) => { - v2.pass('TODO: add tests for executionPayloadV2') - // TODO: add tests for executionPayloadV2 - v2.end() + it('call with executionPayloadV2', () => { + assert.ok(true, 'TODO: add tests for executionPayloadV2') + // TODO: add tests for executionPayloadV2 + }) }) diff --git a/packages/client/test/rpc/engine/newPayloadV3.spec.ts b/packages/client/test/rpc/engine/newPayloadV3.spec.ts index b0d1715133..c77e7adabe 100644 --- a/packages/client/test/rpc/engine/newPayloadV3.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV3.spec.ts @@ -1,17 +1,16 @@ import { BlockHeader } from '@ethereumjs/block' import { FeeMarketEIP1559Transaction } from '@ethereumjs/tx' import { Address, bytesToHex, hexToBytes, zeros } from '@ethereumjs/util' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' -import * as blocks from '../../testdata/blocks/beacon.json' -import * as genesisJSON from '../../testdata/geth-genesis/post-merge.json' +import blocks from '../../testdata/blocks/beacon.json' +import genesisJSON from '../../testdata/geth-genesis/post-merge.json' import { baseRequest, baseSetup, params, setupChain } from '../helpers' import { checkError } from '../util' import type { HttpServer } from 'jayson' -type Test = tape.Test const method = 'engine_newPayloadV3' @@ -19,18 +18,18 @@ const [blockData] = blocks const originalValidate = (BlockHeader as any).prototype._consensusFormatValidation -export const batchBlocks = async (t: Test, server: HttpServer) => { +export const batchBlocks = async (server: HttpServer) => { for (let i = 0; i < 3; i++) { const req = params(method, [blocks[i]]) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.status, 'VALID') } - await baseRequest(t, server, req, 200, expectRes, false, false) + await baseRequest(server, req, 200, expectRes, false, false) } } -tape(`${method}: call with executionPayloadV1`, (v1) => { - v1.test(`${method}: call with invalid block hash without 0x`, async (t) => { +describe(`${method}: call with executionPayloadV1`, () => { + it('call with invalid block hash without 0x', async () => { const { server } = baseSetup({ engine: true, includeVM: true }) const blockDataWithInvalidParentHash = [ @@ -42,27 +41,25 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { const req = params(method, blockDataWithInvalidParentHash) const expectRes = checkError( - t, INVALID_PARAMS, "invalid argument 0 for key 'parentHash': hex string without 0x prefix" ) - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with invalid hex string as block hash`, async (t) => { + it('call with invalid hex string as block hash', async () => { const { server } = baseSetup({ engine: true, includeVM: true }) const blockDataWithInvalidBlockHash = [{ ...blockData, blockHash: '0x-invalid-block-hash' }] const req = params(method, blockDataWithInvalidBlockHash) const expectRes = checkError( - t, INVALID_PARAMS, "invalid argument 0 for key 'blockHash': invalid block hash" ) - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with non existent block hash`, async (t) => { + it('call with non existent block hash', async () => { const { server } = await setupChain(genesisJSON, 'merge', { engine: true }) const blockDataNonExistentBlockHash = [ @@ -73,13 +70,13 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { ] const req = params(method, blockDataNonExistentBlockHash) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') + assert.equal(res.body.result.status, 'INVALID') } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with non existent parent hash`, async (t) => { + it('call with non existent parent hash', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) const blockDataNonExistentParentHash = [ @@ -91,44 +88,41 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { ] const req = params(method, blockDataNonExistentParentHash) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'ACCEPTED') + assert.equal(res.body.result.status, 'ACCEPTED') } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test( - `${method}: call with unknown parent hash to store in remoteBlocks, then call valid ancestor in fcU`, - async (t) => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - - let req = params(method, [blocks[1]]) - let expectRes = (res: any) => { - t.equal(res.body.result.status, 'ACCEPTED') - } - await baseRequest(t, server, req, 200, expectRes, false, false) - - req = params(method, [blocks[0]]) - expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') - } - await baseRequest(t, server, req, 200, expectRes, false, false) - - const state = { - headBlockHash: blocks[1].blockHash, - safeBlockHash: blocks[1].blockHash, - finalizedBlockHash: blocks[0].blockHash, - } - req = params('engine_forkchoiceUpdatedV1', [state]) - expectRes = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') - } - - await baseRequest(t, server, req, 200, expectRes) + it('call with unknown parent hash to store in remoteBlocks, then call valid ancestor in fcU', async () => { + const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) + + let req = params(method, [blocks[1]]) + let expectRes = (res: any) => { + assert.equal(res.body.result.status, 'ACCEPTED') + } + await baseRequest(server, req, 200, expectRes, false, false) + + req = params(method, [blocks[0]]) + expectRes = (res: any) => { + assert.equal(res.body.result.status, 'VALID') + } + await baseRequest(server, req, 200, expectRes, false, false) + + const state = { + headBlockHash: blocks[1].blockHash, + safeBlockHash: blocks[1].blockHash, + finalizedBlockHash: blocks[0].blockHash, + } + req = params('engine_forkchoiceUpdatedV1', [state]) + expectRes = (res: any) => { + assert.equal(res.body.result.payloadStatus.status, 'VALID') } - ) - v1.test(`${method}: invalid terminal block`, async (t) => { + await baseRequest(server, req, 200, expectRes) + }) + + it('invalid terminal block', async () => { const genesisWithHigherTtd = { ...genesisJSON, config: { @@ -146,24 +140,24 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { const req = params(method, [blockData, null]) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') - t.equal(res.body.result.latestValidHash, bytesToHex(zeros(32))) + assert.equal(res.body.result.status, 'INVALID') + assert.equal(res.body.result.latestValidHash, bytesToHex(zeros(32))) } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with valid data`, async (t) => { + it('call with valid data', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) const req = params(method, [blockData]) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') - t.equal(res.body.result.latestValidHash, blockData.blockHash) + assert.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.latestValidHash, blockData.blockHash) } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with valid data but invalid transactions`, async (t) => { + it('call with valid data but invalid transactions', async () => { const { chain, server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) chain.config.logger.silent = true const blockDataWithInvalidTransaction = { @@ -171,21 +165,21 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { transactions: ['0x1'], } const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') - t.equal(res.body.result.latestValidHash, blockData.parentHash) + assert.equal(res.body.result.status, 'INVALID') + assert.equal(res.body.result.latestValidHash, blockData.parentHash) const expectedError = 'Invalid tx at index 0: Error: Invalid serialized tx input: must be array' - t.ok( + assert.ok( res.body.result.validationError.includes(expectedError), `should error with - ${expectedError}` ) } const req = params(method, [blockDataWithInvalidTransaction]) - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with valid data & valid transaction but not signed`, async (t) => { + it('call with valid data & valid transaction but not signed', async () => { const { server, common, chain } = await setupChain(genesisJSON, 'post-merge', { engine: true }) chain.config.logger.silent = true @@ -207,15 +201,17 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { blockHash: '0x308f490332a31fade8b2b46a8e1132cd15adeaffbb651cb523c067b3f007dd9e', } const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') - t.true(res.body.result.validationError.includes('Error verifying block while running:')) + assert.equal(res.body.result.status, 'INVALID') + assert.isTrue( + res.body.result.validationError.includes('Error verifying block while running:') + ) } const req = params(method, [blockDataWithValidTransaction]) - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: call with valid data & valid transaction`, async (t) => { + it('call with valid data & valid transaction', async () => { const accountPk = hexToBytes( '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' ) @@ -251,16 +247,16 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { blockHash: '0x625f2fd36bf278f92211376cbfe5acd7ac5da694e28f3d94d59488b7dbe213a4', } const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.status, 'VALID') } const req = params(method, [blockDataWithValidTransaction]) - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: re-execute payload and verify that no errors occur`, async (t) => { + it('re-execute payload and verify that no errors occur', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - await batchBlocks(t, server) + await batchBlocks(server) let req = params('engine_forkchoiceUpdatedV1', [ { @@ -272,20 +268,20 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { // Let's set new head hash const expectResFcu = (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') + assert.equal(res.body.result.payloadStatus.status, 'VALID') } - await baseRequest(t, server, req, 200, expectResFcu, false, false) + await baseRequest(server, req, 200, expectResFcu, false, false) // Now let's try to re-execute payload req = params(method, [blockData]) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.status, 'VALID') } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`${method}: parent hash equals to block hash`, async (t) => { + it('parent hash equals to block hash', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) const blockDataHasBlockHashSameAsParentHash = [ { @@ -295,27 +291,23 @@ tape(`${method}: call with executionPayloadV1`, (v1) => { ] const req = params(method, blockDataHasBlockHashSameAsParentHash) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') + assert.equal(res.body.result.status, 'INVALID') } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`reset TD`, (t) => { - ;(BlockHeader as any).prototype._consensusFormatValidation = originalValidate + it(`reset TD`, () => { + BlockHeader.prototype['_consensusFormatValidation'] = originalValidate td.reset() - t.end() }) - v1.end() -}) -tape(`${method}: call with executionPayloadV2`, (v2) => { - v2.pass('TODO: add tests for executionPayloadV2') - v2.end() - // TODO: add tests for executionPayloadV2 -}) -tape(`${method}: call with executionPayloadV3`, (v2) => { - v2.pass('TODO: add tests for executionPayloadV2') - v2.end() - // TODO: add tests for executionPayloadV3 + it('call with executionPayloadV2', () => { + assert.ok(true, 'TODO: add tests for executionPayloadV2') + // TODO: add tests for executionPayloadV2 + }) + it('call with executionPayloadV3', () => { + assert.ok(true, 'TODO: add tests for executionPayloadV2') + // TODO: add tests for executionPayloadV3 + }) }) diff --git a/packages/client/test/rpc/engine/newPayloadV3VersionedHashes.spec.ts b/packages/client/test/rpc/engine/newPayloadV3VersionedHashes.spec.ts index 26c3501e06..80d78b80d3 100644 --- a/packages/client/test/rpc/engine/newPayloadV3VersionedHashes.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV3VersionedHashes.spec.ts @@ -1,15 +1,14 @@ import { BlockHeader } from '@ethereumjs/block' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' -import * as blocks from '../../testdata/blocks/beacon.json' -import * as genesisJSON from '../../testdata/geth-genesis/eip4844.json' +import blocks from '../../testdata/blocks/beacon.json' +import genesisJSON from '../../testdata/geth-genesis/eip4844.json' import { baseRequest, params, setupChain } from '../helpers' import { checkError } from '../util' import type { HttpServer } from 'jayson' -type Test = tape.Test const method = 'engine_newPayloadV3' @@ -19,18 +18,18 @@ const [blockData] = blocks const originalValidate = (BlockHeader as any).prototype._consensusFormatValidation -export const batchBlocks = async (t: Test, server: HttpServer) => { +export const batchBlocks = async (server: HttpServer) => { for (let i = 0; i < 3; i++) { const req = params(method, [blocks[i], []]) const expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.status, 'VALID') } - await baseRequest(t, server, req, 200, expectRes, false) + await baseRequest(server, req, 200, expectRes, false) } } -tape(`${method}: Cancun validations`, (v1) => { - v1.test(`${method}: versionedHashes`, async (t) => { +describe(`${method}: Cancun validations`, () => { + it('versionedHashes', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) const blockDataExtraVersionedHashes = [ @@ -46,14 +45,14 @@ tape(`${method}: Cancun validations`, (v1) => { ] let req = params(method, blockDataExtraVersionedHashes) let expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') - t.equal( + assert.equal(res.body.result.status, 'INVALID') + assert.equal( res.body.result.validationError, 'Error verifying versionedHashes: expected=0 received=2' ) } - await baseRequest(t, server, req, 200, expectRes, false) + await baseRequest(server, req, 200, expectRes, false) const txString = '0x03f87c01808405f5e1008502540be4008401c9c380808080c001e1a001317228841f747eac2b4987a0225753a4f81688b31b21192ad2d2a3f5d252c580a01146addbda4889ddeaa8e4d74baae37c55f9796ab17030c762260faa797ca33ea0555a673397ea115d81c390a560ab77d3f63e93a59270b1b8d12cd2a1fb8b9b11' @@ -71,8 +70,8 @@ tape(`${method}: Cancun validations`, (v1) => { }, ] req = params(method, blockDataNoneHashes) - expectRes = checkError(t, INVALID_PARAMS, 'Missing versionedHashes after Cancun is activated') - await baseRequest(t, server, req, 200, expectRes, false) + expectRes = checkError(INVALID_PARAMS, 'Missing versionedHashes after Cancun is activated') + await baseRequest(server, req, 200, expectRes, false) const blockDataExtraMissingHashes1 = [ { @@ -89,13 +88,13 @@ tape(`${method}: Cancun validations`, (v1) => { ] req = params(method, blockDataExtraMissingHashes1) expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') - t.equal( + assert.equal(res.body.result.status, 'INVALID') + assert.equal( res.body.result.validationError, 'Error verifying versionedHashes: expected=2 received=1' ) } - await baseRequest(t, server, req, 200, expectRes, false) + await baseRequest(server, req, 200, expectRes, false) const blockDataExtraMisMatchingHashes1 = [ { @@ -112,13 +111,13 @@ tape(`${method}: Cancun validations`, (v1) => { ] req = params(method, blockDataExtraMisMatchingHashes1) expectRes = (res: any) => { - t.equal(res.body.result.status, 'INVALID') - t.equal( + assert.equal(res.body.result.status, 'INVALID') + assert.equal( res.body.result.validationError, 'Error verifying versionedHashes: mismatch at index=1 expected=0x0131…52c5 received=0x3456…' ) } - await baseRequest(t, server, req, 200, expectRes, false) + await baseRequest(server, req, 200, expectRes, false) const blockDataMatchingVersionedHashes = [ { @@ -135,15 +134,13 @@ tape(`${method}: Cancun validations`, (v1) => { ] req = params(method, blockDataMatchingVersionedHashes) expectRes = (res: any) => { - t.equal(res.body.result.status, 'ACCEPTED') + assert.equal(res.body.result.status, 'ACCEPTED') } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) - v1.test(`reset TD`, (t) => { - ;(BlockHeader as any).prototype._consensusFormatValidation = originalValidate + it(`reset TD`, () => { + BlockHeader.prototype['_consensusFormatValidation'] = originalValidate td.reset() - t.end() }) - v1.end() }) diff --git a/packages/client/test/rpc/engine/withdrawals.spec.ts b/packages/client/test/rpc/engine/withdrawals.spec.ts index 386017ad18..6afd8eb9e1 100644 --- a/packages/client/test/rpc/engine/withdrawals.spec.ts +++ b/packages/client/test/rpc/engine/withdrawals.spec.ts @@ -1,9 +1,9 @@ import { Block } from '@ethereumjs/block' import { Withdrawal, bigIntToHex, bytesToHex, intToHex } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' -import * as genesisJSON from '../../testdata/geth-genesis/withdrawals.json' +import genesisJSON from '../../testdata/geth-genesis/withdrawals.json' import { baseRequest, params, setupChain } from '../helpers' import { checkError } from '../util' @@ -101,21 +101,24 @@ const testCases = [ for (const { name, withdrawals, withdrawalsRoot, gethBlockRlp } of testCases) { const validPayloadAttributesWithWithdrawals = { ...validPayloadAttributes, withdrawals } - tape(name, async (t) => { + it(name, async () => { // check withdrawals root computation const computedWithdrawalsRoot = bytesToHex( await Block.genWithdrawalsTrieRoot(withdrawals.map(Withdrawal.fromWithdrawalData)) ) - t.equal(withdrawalsRoot, computedWithdrawalsRoot, 'withdrawalsRoot compuation should match') + assert.equal( + withdrawalsRoot, + computedWithdrawalsRoot, + 'withdrawalsRoot compuation should match' + ) const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) let req = params('engine_forkchoiceUpdatedV2', [validForkChoiceState, validPayloadAttributes]) let expectRes = checkError( - t, INVALID_PARAMS, 'PayloadAttributesV2 MUST be used after Shanghai is activated' ) - await baseRequest(t, server, req, 200, expectRes, false, false) + await baseRequest(server, req, 200, expectRes, false, false) req = params('engine_forkchoiceUpdatedV2', [ validForkChoiceState, @@ -123,25 +126,29 @@ for (const { name, withdrawals, withdrawalsRoot, gethBlockRlp } of testCases) { ]) let payloadId expectRes = (res: any) => { - t.equal(res.body.result.payloadId !== undefined, true) + assert.equal(res.body.result.payloadId !== undefined, true) payloadId = res.body.result.payloadId } - await baseRequest(t, server, req, 200, expectRes, false, false) + await baseRequest(server, req, 200, expectRes, false, false) let payload: ExecutionPayload | undefined = undefined req = params('engine_getPayloadV2', [payloadId]) expectRes = (res: any) => { const { executionPayload, blockValue } = res.body.result - t.equal(executionPayload!.blockNumber, '0x1') - t.equal(executionPayload!.withdrawals!.length, withdrawals.length, 'withdrawals should match') - t.equal(blockValue, '0x0', 'No value should be returned') + assert.equal(executionPayload!.blockNumber, '0x1') + assert.equal( + executionPayload!.withdrawals!.length, + withdrawals.length, + 'withdrawals should match' + ) + assert.equal(blockValue, '0x0', 'No value should be returned') payload = executionPayload } - await baseRequest(t, server, req, 200, expectRes, false, false) + await baseRequest(server, req, 200, expectRes, false, false) if (gethBlockRlp !== undefined) { // check if stateroot matches - t.equal( + assert.equal( payload!.stateRoot, '0x23eadd91fca55c0e14034e4d63b2b3ed43f2e807b6bf4d276b784ac245e7fa3f', 'stateRoot should match' @@ -150,9 +157,9 @@ for (const { name, withdrawals, withdrawalsRoot, gethBlockRlp } of testCases) { req = params('engine_newPayloadV2', [payload]) expectRes = (res: any) => { - t.equal(res.body.result.status, 'VALID') + assert.equal(res.body.result.status, 'VALID') } - await baseRequest(t, server, req, 200, expectRes, false, false) + await baseRequest(server, req, 200, expectRes, false, false) req = params('engine_forkchoiceUpdatedV2', [ { @@ -161,8 +168,8 @@ for (const { name, withdrawals, withdrawalsRoot, gethBlockRlp } of testCases) { }, ]) expectRes = async (res: any) => { - t.equal(res.body.result.payloadStatus.status, 'VALID') + assert.equal(res.body.result.payloadStatus.status, 'VALID') } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) }) } diff --git a/packages/client/test/rpc/eth/blockNumber.spec.ts b/packages/client/test/rpc/eth/blockNumber.spec.ts index 74b7e9292f..4b7676c9c9 100644 --- a/packages/client/test/rpc/eth/blockNumber.spec.ts +++ b/packages/client/test/rpc/eth/blockNumber.spec.ts @@ -1,26 +1,28 @@ import { bigIntToHex } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' const method = 'eth_blockNumber' -tape(`${method}: call with valid arguments`, async (t) => { - const mockBlockNumber = BigInt(123) - const mockChain = { - headers: { latest: { number: mockBlockNumber } }, - async getCanonicalHeadHeader(): Promise { - return { - number: mockBlockNumber, - } - }, - } - const manager = createManager(createClient({ chain: mockChain })) - const server = startRPC(manager.getMethods()) +describe(method, () => { + it('call with valid arguments', async () => { + const mockBlockNumber = BigInt(123) + const mockChain = { + headers: { latest: { number: mockBlockNumber } }, + async getCanonicalHeadHeader(): Promise { + return { + number: mockBlockNumber, + } + }, + } + const manager = createManager(createClient({ chain: mockChain })) + const server = startRPC(manager.getMethods()) - const req = params(method) - const expectRes = (res: any) => { - t.equal(res.body.result, bigIntToHex(mockBlockNumber)) - } - await baseRequest(t, server, req, 200, expectRes) + const req = params(method) + const expectRes = (res: any) => { + assert.equal(res.body.result, bigIntToHex(mockBlockNumber)) + } + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/call.spec.ts b/packages/client/test/rpc/eth/call.spec.ts index 5595a2739a..7681059be1 100644 --- a/packages/client/test/rpc/eth/call.spec.ts +++ b/packages/client/test/rpc/eth/call.spec.ts @@ -3,7 +3,7 @@ import { Blockchain } from '@ethereumjs/blockchain' import { Chain, Common, Hardfork } from '@ethereumjs/common' import { LegacyTransaction } from '@ethereumjs/tx' import { Address, bigIntToHex, bytesToHex } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' @@ -13,27 +13,28 @@ import type { FullEthereumService } from '../../../src/service' const method = 'eth_call' -tape(`${method}: call with valid arguments`, async (t) => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - const blockchain = await Blockchain.create({ - common, - validateBlocks: false, - validateConsensus: false, - }) +describe(method, () => { + it('call with valid arguments', async () => { + const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const blockchain = await Blockchain.create({ + common, + validateBlocks: false, + validateConsensus: false, + }) - const client = createClient({ blockchain, commonChain: common, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) + const client = createClient({ blockchain, commonChain: common, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) - const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService - t.notEqual(execution, undefined, 'should have valid execution') - const { vm } = execution + const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService + assert.notEqual(execution, undefined, 'should have valid execution') + const { vm } = execution - // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + // genesis address with balance + const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') - // contract: - /* + // contract: + /* // SPDX-License-Identifier: MIT pragma solidity ^0.7.4; @@ -43,124 +44,124 @@ tape(`${method}: call with valid arguments`, async (t) => { } } */ - const data = - '0x6080604052348015600f57600080fd5b50609d8061001e6000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c806326b85ee114602d575b600080fd5b6033605f565b604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60003390509056fea2646970667358221220455a67424337c6c5783846576348cb04caa9cf6f3e7def201c1f3fbc54aa373a64736f6c63430007060033' - - // construct block with tx - const gasLimit = 2000000 - const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) - tx.getSenderAddress = () => { - return address - } - const parent = await blockchain.getCanonicalHeadHeader() - const block = Block.fromBlockData( - { - header: { - parentHash: parent.hash(), - number: 1, - gasLimit, + const data = + '0x6080604052348015600f57600080fd5b50609d8061001e6000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c806326b85ee114602d575b600080fd5b6033605f565b604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60003390509056fea2646970667358221220455a67424337c6c5783846576348cb04caa9cf6f3e7def201c1f3fbc54aa373a64736f6c63430007060033' + + // construct block with tx + const gasLimit = 2000000 + const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) + tx.getSenderAddress = () => { + return address + } + const parent = await blockchain.getCanonicalHeadHeader() + const block = Block.fromBlockData( + { + header: { + parentHash: parent.hash(), + number: 1, + gasLimit, + }, }, - }, - { common, calcDifficultyFromHeader: parent } - ) - block.transactions[0] = tx - - // deploy contract - let ranBlock: Block | undefined = undefined - vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) - const { createdAddress } = result.results[0] - await vm.blockchain.putBlock(ranBlock!) - - // get return value - const funcHash = '26b85ee1' // myAddress() - const estimateTxData = { - to: createdAddress!.toString(), - from: address.toString(), - data: `0x${funcHash}`, - gasLimit: bigIntToHex(BigInt(53000)), - } - const estimateTx = LegacyTransaction.fromTxData(estimateTxData, { freeze: false }) - estimateTx.getSenderAddress = () => { - return address - } - const { execResult } = await ( - await vm.shallowCopy() - ).runTx({ - tx: estimateTx, - skipNonce: true, - skipBalance: true, - skipBlockGasLimitValidation: true, - skipHardForkValidation: true, + { common, calcDifficultyFromHeader: parent } + ) + block.transactions[0] = tx + + // deploy contract + let ranBlock: Block | undefined = undefined + vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) + const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + const { createdAddress } = result.results[0] + await vm.blockchain.putBlock(ranBlock!) + + // get return value + const funcHash = '26b85ee1' // myAddress() + const estimateTxData = { + to: createdAddress!.toString(), + from: address.toString(), + data: `0x${funcHash}`, + gasLimit: bigIntToHex(BigInt(53000)), + } + const estimateTx = LegacyTransaction.fromTxData(estimateTxData, { freeze: false }) + estimateTx.getSenderAddress = () => { + return address + } + const { execResult } = await ( + await vm.shallowCopy() + ).runTx({ + tx: estimateTx, + skipNonce: true, + skipBalance: true, + skipBlockGasLimitValidation: true, + skipHardForkValidation: true, + }) + + // verify return value is accurate + let req = params(method, [{ ...estimateTxData, gas: estimateTxData.gasLimit }, 'latest']) + let expectRes = (res: any) => { + const msg = 'should return the correct return value' + assert.equal(res.body.result, bytesToHex(execResult.returnValue), msg) + } + await baseRequest(server, req, 200, expectRes, false) + + req = params(method, [{ ...estimateTxData }, 'latest']) + expectRes = (res: any) => { + const msg = 'should return the correct return value with no gas limit provided' + assert.equal(res.body.result, bytesToHex(execResult.returnValue), msg) + } + await baseRequest(server, req, 200, expectRes, false) + + req = params(method, [{ gasLimit, data }, 'latest']) + expectRes = (res: any) => { + const msg = `should let run call without 'to' for contract creation` + assert.equal(res.body.result, bytesToHex(result.results[0].execResult.returnValue), msg) + } + await baseRequest(server, req, 200, expectRes, true) }) - // verify return value is accurate - let req = params(method, [{ ...estimateTxData, gas: estimateTxData.gasLimit }, 'latest']) - let expectRes = (res: any) => { - const msg = 'should return the correct return value' - t.equal(res.body.result, bytesToHex(execResult.returnValue), msg) - } - await baseRequest(t, server, req, 200, expectRes, false) - - req = params(method, [{ ...estimateTxData }, 'latest']) - expectRes = (res: any) => { - const msg = 'should return the correct return value with no gas limit provided' - t.equal(res.body.result, bytesToHex(execResult.returnValue), msg) - } - await baseRequest(t, server, req, 200, expectRes, false) - - req = params(method, [{ gasLimit, data }, 'latest']) - expectRes = (res: any) => { - const msg = `should let run call without 'to' for contract creation` - t.equal(res.body.result, bytesToHex(result.results[0].execResult.returnValue), msg) - } - await baseRequest(t, server, req, 200, expectRes, true) -}) + it('call with unsupported block argument', async () => { + const blockchain = await Blockchain.create() -tape(`${method}: call with unsupported block argument`, async (t) => { - const blockchain = await Blockchain.create() + const client = createClient({ blockchain, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) - const client = createClient({ blockchain, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) + // genesis address with balance + const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') - // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const funcHash = '26b85ee1' // borrowed from valid test above + const estimateTxData = { + to: address.toString(), + from: address.toString(), + data: `0x${funcHash}`, + gasLimit: bigIntToHex(BigInt(53000)), + } - const funcHash = '26b85ee1' // borrowed from valid test above - const estimateTxData = { - to: address.toString(), - from: address.toString(), - data: `0x${funcHash}`, - gasLimit: bigIntToHex(BigInt(53000)), - } + const req = params(method, [{ ...estimateTxData, gas: estimateTxData.gasLimit }, 'pending']) + const expectRes = checkError(INVALID_PARAMS, '"pending" is not yet supported') + await baseRequest(server, req, 200, expectRes) + }) - const req = params(method, [{ ...estimateTxData, gas: estimateTxData.gasLimit }, 'pending']) - const expectRes = checkError(t, INVALID_PARAMS, '"pending" is not yet supported') - await baseRequest(t, server, req, 200, expectRes) -}) + it('call with invalid hex params', async () => { + const blockchain = await Blockchain.create() -tape(`${method}: call with invalid hex params`, async (t) => { - const blockchain = await Blockchain.create() - - const client = createClient({ blockchain, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) - - // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') - const estimateTxData = { - to: address.toString(), - from: address.toString(), - data: ``, - gasLimit: bigIntToHex(BigInt(53000)), - } - - const req = params(method, [{ ...estimateTxData, gas: estimateTxData.gasLimit }, 'latest']) - const expectRes = checkError( - t, - INVALID_PARAMS, - 'invalid argument data: hex string without 0x prefix' - ) - await baseRequest(t, server, req, 200, expectRes) + const client = createClient({ blockchain, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) + + // genesis address with balance + const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const estimateTxData = { + to: address.toString(), + from: address.toString(), + data: ``, + gasLimit: bigIntToHex(BigInt(53000)), + } + + const req = params(method, [{ ...estimateTxData, gas: estimateTxData.gasLimit }, 'latest']) + const expectRes = checkError( + INVALID_PARAMS, + 'invalid argument data: hex string without 0x prefix' + ) + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/chainId.spec.ts b/packages/client/test/rpc/eth/chainId.spec.ts index 7f0ff514df..4acac4b9db 100644 --- a/packages/client/test/rpc/eth/chainId.spec.ts +++ b/packages/client/test/rpc/eth/chainId.spec.ts @@ -1,37 +1,60 @@ import { BlockHeader } from '@ethereumjs/block' -import * as tape from 'tape' +import { Chain, Common } from '@ethereumjs/common' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' -import { baseRequest, baseSetup, params } from '../helpers' +import { + baseRequest, + baseSetup, + createClient, + createManager, + params, + startRPC, +} from '../helpers.js' const method = 'eth_chainId' const originalValidate = (BlockHeader as any).prototype._consensusFormatValidation -tape(`${method}: calls`, async (t) => { - const { server } = baseSetup() - - const req = params(method, []) - const expectRes = (res: any) => { - const msg = 'chainId should be a string' - t.equal(typeof res.body.result, 'string', msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: returns 1 for Mainnet`, async (t) => { - const { server } = baseSetup() - - const req = params(method, []) - const expectRes = (res: any) => { - const msg = 'should return chainId 1' - t.equal(res.body.result, '0x1', msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`reset TD`, (t) => { - ;(BlockHeader as any).prototype._consensusFormatValidation = originalValidate - td.reset() - t.end() +describe(method, () => { + it('calls', async () => { + const { server } = baseSetup() + + const req = params(method, []) + const expectRes = (res: any) => { + const msg = 'chainId should be a string' + assert.equal(typeof res.body.result, 'string', msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('returns 1 for Mainnet', async () => { + const { server } = baseSetup() + + const req = params(method, []) + const expectRes = (res: any) => { + const msg = 'should return chainId 1' + assert.equal(res.body.result, '0x1', msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('returns 3 for Goerli', async () => { + const manager = createManager( + createClient({ opened: true, commonChain: new Common({ chain: Chain.Goerli }) }) + ) + const server = startRPC(manager.getMethods()) + + const req = params(method, []) + const expectRes = (res: any) => { + const msg = 'should return chainId 5' + assert.equal(res.body.result, '0x5', msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('reset TD', () => { + BlockHeader.prototype['_consensusFormatValidation'] = originalValidate + td.reset() + }) }) diff --git a/packages/client/test/rpc/eth/estimateGas.spec.ts b/packages/client/test/rpc/eth/estimateGas.spec.ts index 2c13480877..c3ad491a6e 100644 --- a/packages/client/test/rpc/eth/estimateGas.spec.ts +++ b/packages/client/test/rpc/eth/estimateGas.spec.ts @@ -1,9 +1,10 @@ import { Block, BlockHeader } from '@ethereumjs/block' import { Blockchain } from '@ethereumjs/blockchain' import { Common } from '@ethereumjs/common' +import { getGenesis } from '@ethereumjs/genesis' import { LegacyTransaction } from '@ethereumjs/tx' import { Address, bigIntToHex } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' @@ -13,30 +14,33 @@ import type { FullEthereumService } from '../../../src/service' const method = 'eth_estimateGas' -tape(`${method}: call with valid arguments`, async (t) => { - // Use custom genesis so we can test EIP1559 txs more easily - const genesisJson = await import('../../testdata/geth-genesis/rpctestnet.json') - const common = Common.fromGethGenesis(genesisJson, { chain: 'testnet', hardfork: 'berlin' }) - const blockchain = await Blockchain.create({ - common, - validateBlocks: false, - validateConsensus: false, - }) - - const client = createClient({ blockchain, commonChain: common, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) - - const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService - t.notEqual(execution, undefined, 'should have valid execution') - const { vm } = execution - await vm.stateManager.generateCanonicalGenesis({}) - - // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') - - // contract: - /* +describe( + method, + () => { + it('call with valid arguments', async () => { + // Use custom genesis so we can test EIP1559 txs more easily + const genesisJson = await import('../../testdata/geth-genesis/rpctestnet.json') + const common = Common.fromGethGenesis(genesisJson, { chain: 'testnet', hardfork: 'berlin' }) + const blockchain = await Blockchain.create({ + common, + validateBlocks: false, + validateConsensus: false, + }) + + const client = createClient({ blockchain, commonChain: common, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) + + const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService + assert.notEqual(execution, undefined, 'should have valid execution') + const { vm } = execution + await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + + // genesis address with balance + const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + + // contract: + /* // SPDX-License-Identifier: MIT pragma solidity ^0.7.4; @@ -46,139 +50,144 @@ tape(`${method}: call with valid arguments`, async (t) => { } } */ - const data = - '0x6080604052348015600f57600080fd5b50609d8061001e6000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c806326b85ee114602d575b600080fd5b6033605f565b604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60003390509056fea2646970667358221220455a67424337c6c5783846576348cb04caa9cf6f3e7def201c1f3fbc54aa373a64736f6c63430007060033' - - // construct block with tx - const gasLimit = 2000000 - const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) - tx.getSenderAddress = () => { - return address - } - const parent = await blockchain.getCanonicalHeadHeader() - const block = Block.fromBlockData( - { - header: { - parentHash: parent.hash(), - number: 1, - gasLimit, - }, - }, - { common, calcDifficultyFromHeader: parent } - ) - block.transactions[0] = tx - - // deploy contract - let ranBlock: Block | undefined = undefined - vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) - const { createdAddress } = result.results[0] - await vm.blockchain.putBlock(ranBlock!) - - // get gas estimate - const funcHash = '26b85ee1' // myAddress() - const estimateTxData = { - to: createdAddress!.toString(), - from: address.toString(), - data: `0x${funcHash}`, - gasLimit: bigIntToHex(BigInt(53000)), - } - const estimateTx = LegacyTransaction.fromTxData(estimateTxData, { freeze: false }) - estimateTx.getSenderAddress = () => { - return address - } - const { totalGasSpent } = await ( - await vm.shallowCopy() - ).runTx({ - tx: estimateTx, - skipNonce: true, - skipBalance: true, - skipBlockGasLimitValidation: true, - skipHardForkValidation: true, - }) - - // verify estimated gas is accurate - const req = params(method, [{ ...estimateTxData, gas: estimateTxData.gasLimit }, 'latest']) - const expectRes = (res: any) => { - const msg = 'should return the correct gas estimate' - t.equal(res.body.result, '0x' + totalGasSpent.toString(16), msg) - } - await baseRequest(t, server, req, 200, expectRes, false) - - // Test without blockopt as its optional and should default to latest - const reqWithoutBlockOpt = params(method, [{ ...estimateTxData, gas: estimateTxData.gasLimit }]) - await baseRequest(t, server, reqWithoutBlockOpt, 200, expectRes, false) - - // Setup chain to run an EIP1559 tx - const service = client.services[0] as FullEthereumService - service.execution.vm.common.setHardfork('london') - service.chain.config.chainCommon.setHardfork('london') - const headBlock = await service.chain.getCanonicalHeadBlock() - const londonBlock = Block.fromBlockData( - { - header: BlockHeader.fromHeaderData( + const data = + '0x6080604052348015600f57600080fd5b50609d8061001e6000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c806326b85ee114602d575b600080fd5b6033605f565b604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60003390509056fea2646970667358221220455a67424337c6c5783846576348cb04caa9cf6f3e7def201c1f3fbc54aa373a64736f6c63430007060033' + + // construct block with tx + const gasLimit = 2000000 + const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) + tx.getSenderAddress = () => { + return address + } + const parent = await blockchain.getCanonicalHeadHeader() + const block = Block.fromBlockData( { - baseFeePerGas: 1000000000n, - number: 2n, - parentHash: headBlock.header.hash(), + header: { + parentHash: parent.hash(), + number: 1, + gasLimit, + }, }, + { common, calcDifficultyFromHeader: parent } + ) + block.transactions[0] = tx + + // deploy contract + let ranBlock: Block | undefined = undefined + vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) + const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + const { createdAddress } = result.results[0] + await vm.blockchain.putBlock(ranBlock!) + + // get gas estimate + const funcHash = '26b85ee1' // myAddress() + const estimateTxData = { + to: createdAddress!.toString(), + from: address.toString(), + data: `0x${funcHash}`, + gasLimit: bigIntToHex(BigInt(53000)), + } + const estimateTx = LegacyTransaction.fromTxData(estimateTxData, { freeze: false }) + estimateTx.getSenderAddress = () => { + return address + } + const { totalGasSpent } = await ( + await vm.shallowCopy() + ).runTx({ + tx: estimateTx, + skipNonce: true, + skipBalance: true, + skipBlockGasLimitValidation: true, + skipHardForkValidation: true, + }) + + // verify estimated gas is accurate + const req = params(method, [{ ...estimateTxData, gas: estimateTxData.gasLimit }, 'latest']) + const expectRes = (res: any) => { + const msg = 'should return the correct gas estimate' + assert.equal(res.body.result, '0x' + totalGasSpent.toString(16), msg) + } + await baseRequest(server, req, 200, expectRes, false) + + // Test without blockopt as its optional and should default to latest + const reqWithoutBlockOpt = params(method, [ + { ...estimateTxData, gas: estimateTxData.gasLimit }, + ]) + await baseRequest(server, reqWithoutBlockOpt, 200, expectRes, false) + + // Setup chain to run an EIP1559 tx + const service = client.services[0] as FullEthereumService + service.execution.vm.common.setHardfork('london') + service.chain.config.chainCommon.setHardfork('london') + const headBlock = await service.chain.getCanonicalHeadBlock() + const londonBlock = Block.fromBlockData( { - common: service.chain.config.chainCommon, - skipConsensusFormatValidation: true, - calcDifficultyFromHeader: headBlock.header, - } - ), - }, - { common: service.chain.config.chainCommon } - ) - - vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - await vm.runBlock({ block: londonBlock, generate: true, skipBlockValidation: true }) - await vm.blockchain.putBlock(ranBlock!) - - // Test EIP1559 tx - const EIP1559req = params(method, [ - { ...estimateTxData, type: 2, maxFeePerGas: '0x' + 10000000000n.toString(16) }, - ]) - const expect1559Res = (res: any) => { - const msg = 'should return the correct gas estimate for EIP1559 tx' - t.equal(res.body.result, '0x' + totalGasSpent.toString(16), msg) - } - - await baseRequest(t, server, EIP1559req, 200, expect1559Res, false) - - // Test EIP1559 tx with no maxFeePerGas - const EIP1559reqNoGas = params(method, [ - { ...estimateTxData, type: 2, maxFeePerGas: undefined, gasLimit: undefined }, - ]) - await baseRequest(t, server, EIP1559reqNoGas, 200, expect1559Res, false) - - // Test legacy tx with London head block - const legacyTxNoGas = params(method, [ - { ...estimateTxData, maxFeePerGas: undefined, gasLimit: undefined }, - ]) - await baseRequest(t, server, legacyTxNoGas, 200, expect1559Res) -}) - -tape(`${method}: call with unsupported block argument`, async (t) => { - const blockchain = await Blockchain.create() - - const client = createClient({ blockchain, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) - - // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') - - const funcHash = '26b85ee1' // borrowed from valid test above - const estimateTxData = { - to: address.toString(), - from: address.toString(), - data: `0x${funcHash}`, - gasLimit: bigIntToHex(BigInt(53000)), - } - - const req = params(method, [{ ...estimateTxData, gas: estimateTxData.gasLimit }, 'pending']) - const expectRes = checkError(t, INVALID_PARAMS, '"pending" is not yet supported') - await baseRequest(t, server, req, 200, expectRes) -}) + header: BlockHeader.fromHeaderData( + { + baseFeePerGas: 1000000000n, + number: 2n, + parentHash: headBlock.header.hash(), + }, + { + common: service.chain.config.chainCommon, + skipConsensusFormatValidation: true, + calcDifficultyFromHeader: headBlock.header, + } + ), + }, + { common: service.chain.config.chainCommon } + ) + + vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) + await vm.runBlock({ block: londonBlock, generate: true, skipBlockValidation: true }) + await vm.blockchain.putBlock(ranBlock!) + + // Test EIP1559 tx + const EIP1559req = params(method, [ + { ...estimateTxData, type: 2, maxFeePerGas: '0x' + 10000000000n.toString(16) }, + ]) + const expect1559Res = (res: any) => { + const msg = 'should return the correct gas estimate for EIP1559 tx' + assert.equal(res.body.result, '0x' + totalGasSpent.toString(16), msg) + } + + await baseRequest(server, EIP1559req, 200, expect1559Res, false) + + // Test EIP1559 tx with no maxFeePerGas + const EIP1559reqNoGas = params(method, [ + { ...estimateTxData, type: 2, maxFeePerGas: undefined, gasLimit: undefined }, + ]) + await baseRequest(server, EIP1559reqNoGas, 200, expect1559Res, false) + + // Test legacy tx with London head block + const legacyTxNoGas = params(method, [ + { ...estimateTxData, maxFeePerGas: undefined, gasLimit: undefined }, + ]) + await baseRequest(server, legacyTxNoGas, 200, expect1559Res) + }) + + it('call with unsupported block argument', async () => { + const blockchain = await Blockchain.create() + + const client = createClient({ blockchain, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) + + // genesis address with balance + const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + + const funcHash = '26b85ee1' // borrowed from valid test above + const estimateTxData = { + to: address.toString(), + from: address.toString(), + data: `0x${funcHash}`, + gasLimit: bigIntToHex(BigInt(53000)), + } + + const req = params(method, [{ ...estimateTxData, gas: estimateTxData.gasLimit }, 'pending']) + const expectRes = checkError(INVALID_PARAMS, '"pending" is not yet supported') + await baseRequest(server, req, 200, expectRes) + }) + }, + 20000 +) diff --git a/packages/client/test/rpc/eth/gasPrice.spec.ts b/packages/client/test/rpc/eth/gasPrice.spec.ts index f1037ab95c..05743d0fb1 100644 --- a/packages/client/test/rpc/eth/gasPrice.spec.ts +++ b/packages/client/test/rpc/eth/gasPrice.spec.ts @@ -1,8 +1,8 @@ import { FeeMarketEIP1559Transaction, LegacyTransaction } from '@ethereumjs/tx' import { bigIntToHex, intToHex } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' -import * as pow from '../../testdata/geth-genesis/pow.json' +import pow from '../../testdata/geth-genesis/pow.json' import { baseRequest, dummy, @@ -14,187 +14,189 @@ import { const method = 'eth_gasPrice' -tape(`${method}: call with legacy transaction data`, async (t) => { - const { chain, common, execution, server } = await setupChain(pow, 'pow') +describe(method, () => { + it('call with legacy transaction data', async () => { + const { chain, common, execution, server } = await setupChain(pow, 'pow') - const GAS_PRICE = 100 - // construct tx - const tx = LegacyTransaction.fromTxData( - { gasLimit: 21000, gasPrice: GAS_PRICE, to: '0x0000000000000000000000000000000000000000' }, - { common } - ).sign(dummy.privKey) + const GAS_PRICE = 100 + // construct tx + const tx = LegacyTransaction.fromTxData( + { gasLimit: 21000, gasPrice: GAS_PRICE, to: '0x0000000000000000000000000000000000000000' }, + { common } + ).sign(dummy.privKey) - await runBlockWithTxs(chain, execution, [tx]) + await runBlockWithTxs(chain, execution, [tx]) - const req = params(method, []) - const expectRes = (res: any) => { - const msg = 'should return the correct suggested gas price with 1 legacy transaction' - t.equal(res.body.result, intToHex(GAS_PRICE), msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, []) + const expectRes = (res: any) => { + const msg = 'should return the correct suggested gas price with 1 legacy transaction' + assert.equal(res.body.result, intToHex(GAS_PRICE), msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('call with multiple legacy transactions', async () => { + const { chain, common, execution, server } = await setupChain(pow, 'pow') + const iterations = BigInt(20) + let averageGasPrice = BigInt(0) + for (let i = 0; i < iterations; i++) { + const gasPrice = i * 100 + averageGasPrice += BigInt(gasPrice) + const tx = LegacyTransaction.fromTxData( + { nonce: i, gasLimit: 21000, gasPrice, to: '0x0000000000000000000000000000000000000000' }, + { common } + ).sign(dummy.privKey) + await runBlockWithTxs(chain, execution, [tx]) + } -tape(`${method}: call with multiple legacy transactions`, async (t) => { - const { chain, common, execution, server } = await setupChain(pow, 'pow') - const iterations = BigInt(20) - let averageGasPrice = BigInt(0) - for (let i = 0; i < iterations; i++) { - const gasPrice = i * 100 - averageGasPrice += BigInt(gasPrice) - const tx = LegacyTransaction.fromTxData( - { nonce: i, gasLimit: 21000, gasPrice, to: '0x0000000000000000000000000000000000000000' }, + averageGasPrice = averageGasPrice / iterations + const req = params(method, []) + const expectRes = (res: any) => { + const msg = 'should return the correct gas price with multiple legacy transactions' + assert.equal(res.body.result, bigIntToHex(averageGasPrice), msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('call with multiple legacy transactions in a single block', async () => { + const { chain, common, execution, server } = await setupChain(pow, 'pow') + + const G1 = 100 + const G2 = 1231231 + + const tx1 = LegacyTransaction.fromTxData( + { gasLimit: 21000, gasPrice: G1, to: '0x0000000000000000000000000000000000000000' }, + { common } + ).sign(dummy.privKey) + const tx2 = LegacyTransaction.fromTxData( + { nonce: 1, gasLimit: 21000, gasPrice: G2, to: '0x0000000000000000000000000000000000000000' }, { common } ).sign(dummy.privKey) - await runBlockWithTxs(chain, execution, [tx]) - } - - averageGasPrice = averageGasPrice / iterations - const req = params(method, []) - const expectRes = (res: any) => { - const msg = 'should return the correct gas price with multiple legacy transactions' - t.equal(res.body.result, bigIntToHex(averageGasPrice), msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) -tape(`${method}: call with multiple legacy transactions in a single block`, async (t) => { - const { chain, common, execution, server } = await setupChain(pow, 'pow') - - const G1 = 100 - const G2 = 1231231 - - const tx1 = LegacyTransaction.fromTxData( - { gasLimit: 21000, gasPrice: G1, to: '0x0000000000000000000000000000000000000000' }, - { common } - ).sign(dummy.privKey) - const tx2 = LegacyTransaction.fromTxData( - { nonce: 1, gasLimit: 21000, gasPrice: G2, to: '0x0000000000000000000000000000000000000000' }, - { common } - ).sign(dummy.privKey) - - await runBlockWithTxs(chain, execution, [tx1, tx2]) - - const averageGasPrice = (G1 + G2) / 2 - const req = params(method, []) - const expectRes = (res: any) => { - const msg = 'should return the correct gas price with multiple legacy transactions in a block' - t.equal(res.body.result, intToHex(Math.trunc(averageGasPrice)), msg) - } - await baseRequest(t, server, req, 200, () => expectRes) -}) + await runBlockWithTxs(chain, execution, [tx1, tx2]) -tape(`${method}: call with 1559 transaction data`, async (t) => { - const { chain, common, execution, server } = await setupChain( - gethGenesisStartLondon(pow), - 'powLondon' - ) - - const tx = FeeMarketEIP1559Transaction.fromTxData( - { - gasLimit: 21000, - maxPriorityFeePerGas: 10, - maxFeePerGas: 975000000, - to: '0x0000000000000000000000000000000000000000', - }, - { common } - ).sign(dummy.privKey) - - await runBlockWithTxs(chain, execution, [tx]) - const req = params(method, []) - const latest = await chain.getCanonicalHeadHeader() - const baseFee = latest.calcNextBaseFee() - const gasPrice = BigInt(baseFee + tx.maxPriorityFeePerGas) - - const expectRes = (res: any) => { - const msg = 'should return the correct gas price with 1 1559 transaction' - t.equal(res.body.result, bigIntToHex(gasPrice), msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) + const averageGasPrice = (G1 + G2) / 2 + const req = params(method, []) + const expectRes = (res: any) => { + const msg = 'should return the correct gas price with multiple legacy transactions in a block' + assert.equal(res.body.result, intToHex(Math.trunc(averageGasPrice)), msg) + } + await baseRequest(server, req, 200, () => expectRes) + }) + + it('call with 1559 transaction data', async () => { + const { chain, common, execution, server } = await setupChain( + gethGenesisStartLondon(pow), + 'powLondon' + ) + + const tx = FeeMarketEIP1559Transaction.fromTxData( + { + gasLimit: 21000, + maxPriorityFeePerGas: 10, + maxFeePerGas: 975000000, + to: '0x0000000000000000000000000000000000000000', + }, + { common } + ).sign(dummy.privKey) -tape(`${method}: call with multiple 1559 transactions`, async (t) => { - const { chain, common, execution, server } = await setupChain( - gethGenesisStartLondon(pow), - 'powLondon' - ) - - const maxPriority1 = 10 - const maxPriority2 = 1231231 - const tx1 = FeeMarketEIP1559Transaction.fromTxData( - { - gasLimit: 21000, - maxPriorityFeePerGas: maxPriority1, - maxFeePerGas: 975000000, - to: '0x0000000000000000000000000000000000000000', - }, - { common } - ).sign(dummy.privKey) - const tx2 = FeeMarketEIP1559Transaction.fromTxData( - { - nonce: 1, - gasLimit: 21000, - maxPriorityFeePerGas: maxPriority2, - maxFeePerGas: 975000000, - to: '0x0000000000000000000000000000000000000000', - }, - { common } - ).sign(dummy.privKey) - - await runBlockWithTxs(chain, execution, [tx1, tx2]) - const req = params(method, []) - const averagePriorityFee = BigInt(Math.trunc((maxPriority1 + maxPriority2) / 2)) - const latest = await chain.getCanonicalHeadHeader() - const baseFee = latest.calcNextBaseFee() - const gasPrice = BigInt(baseFee + averagePriorityFee) - const expectRes = (res: any) => { - const msg = 'should return the correct gas price with 1 1559 transaction' - t.equal(res.body.result, bigIntToHex(gasPrice), msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) + await runBlockWithTxs(chain, execution, [tx]) + const req = params(method, []) + const latest = await chain.getCanonicalHeadHeader() + const baseFee = latest.calcNextBaseFee() + const gasPrice = BigInt(baseFee + tx.maxPriorityFeePerGas) + + const expectRes = (res: any) => { + const msg = 'should return the correct gas price with 1 1559 transaction' + assert.equal(res.body.result, bigIntToHex(gasPrice), msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('call with multiple 1559 transactions', async () => { + const { chain, common, execution, server } = await setupChain( + gethGenesisStartLondon(pow), + 'powLondon' + ) + + const maxPriority1 = 10 + const maxPriority2 = 1231231 + const tx1 = FeeMarketEIP1559Transaction.fromTxData( + { + gasLimit: 21000, + maxPriorityFeePerGas: maxPriority1, + maxFeePerGas: 975000000, + to: '0x0000000000000000000000000000000000000000', + }, + { common } + ).sign(dummy.privKey) + const tx2 = FeeMarketEIP1559Transaction.fromTxData( + { + nonce: 1, + gasLimit: 21000, + maxPriorityFeePerGas: maxPriority2, + maxFeePerGas: 975000000, + to: '0x0000000000000000000000000000000000000000', + }, + { common } + ).sign(dummy.privKey) -tape(`${method}: compute average gas price for 21 blocks`, async (t) => { - const { chain, common, execution, server } = await setupChain(pow, 'pow') - const iterations = BigInt(21) - const gasPrice = BigInt(20) - const firstBlockGasPrice = BigInt(11111111111111) - let tx: LegacyTransaction - for (let i = 0; i < iterations; i++) { - if (i === 0) { - tx = LegacyTransaction.fromTxData( - { - nonce: i, - gasLimit: 21000, - gasPrice: firstBlockGasPrice, - to: '0x0000000000000000000000000000000000000000', - }, - { common } - ).sign(dummy.privKey) - } else { - tx = LegacyTransaction.fromTxData( - { - nonce: i, - gasLimit: 21000, - gasPrice, - to: '0x0000000000000000000000000000000000000000', - }, - { common } - ).sign(dummy.privKey) + await runBlockWithTxs(chain, execution, [tx1, tx2]) + const req = params(method, []) + const averagePriorityFee = BigInt(Math.trunc((maxPriority1 + maxPriority2) / 2)) + const latest = await chain.getCanonicalHeadHeader() + const baseFee = latest.calcNextBaseFee() + const gasPrice = BigInt(baseFee + averagePriorityFee) + const expectRes = (res: any) => { + const msg = 'should return the correct gas price with 1 1559 transaction' + assert.equal(res.body.result, bigIntToHex(gasPrice), msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('compute average gas price for 21 blocks', async () => { + const { chain, common, execution, server } = await setupChain(pow, 'pow') + const iterations = BigInt(21) + const gasPrice = BigInt(20) + const firstBlockGasPrice = BigInt(11111111111111) + let tx: LegacyTransaction + for (let i = 0; i < iterations; i++) { + if (i === 0) { + tx = LegacyTransaction.fromTxData( + { + nonce: i, + gasLimit: 21000, + gasPrice: firstBlockGasPrice, + to: '0x0000000000000000000000000000000000000000', + }, + { common } + ).sign(dummy.privKey) + } else { + tx = LegacyTransaction.fromTxData( + { + nonce: i, + gasLimit: 21000, + gasPrice, + to: '0x0000000000000000000000000000000000000000', + }, + { common } + ).sign(dummy.privKey) + } + await runBlockWithTxs(chain, execution, [tx!]) } - await runBlockWithTxs(chain, execution, [tx!]) - } - const latest = await chain.getCanonicalHeadHeader() - const blockNumber = latest.number + const latest = await chain.getCanonicalHeadHeader() + const blockNumber = latest.number - // Should be block number 21 - t.equal(blockNumber, 21n) + // Should be block number 21 + assert.equal(blockNumber, 21n) - const req = params(method, []) - const expectRes = (res: any) => { - const msg = 'should return the correct gas price for 21 blocks' - t.equal(res.body.result, bigIntToHex(gasPrice), msg) - } + const req = params(method, []) + const expectRes = (res: any) => { + const msg = 'should return the correct gas price for 21 blocks' + assert.equal(res.body.result, bigIntToHex(gasPrice), msg) + } - await baseRequest(t, server, req, 200, expectRes) + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/getBalance.spec.ts b/packages/client/test/rpc/eth/getBalance.spec.ts index 8e96e39bb0..630dd06dec 100644 --- a/packages/client/test/rpc/eth/getBalance.spec.ts +++ b/packages/client/test/rpc/eth/getBalance.spec.ts @@ -4,7 +4,7 @@ import { Chain, Common, Hardfork } from '@ethereumjs/common' import { getGenesis } from '@ethereumjs/genesis' import { LegacyTransaction } from '@ethereumjs/tx' import { Address, bigIntToHex } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' @@ -14,94 +14,99 @@ import type { FullEthereumService } from '../../../src/service' const method = 'eth_getBalance' -tape(`${method}: ensure balance deducts after a tx`, async (t) => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - const blockchain = await Blockchain.create({ common }) - - const client = createClient({ blockchain, commonChain: common, includeVM: true }) - const manager = createManager(client) - - const server = startRPC(manager.getMethods()) - - const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService - t.notEqual(execution, undefined, 'should have valid execution') - const { vm } = execution - - // since synchronizer.run() is not executed in the mock setup, - // manually run stateManager.generateCanonicalGenesis() - const genesisState = getGenesis(Chain.Mainnet) - await vm.stateManager.generateCanonicalGenesis(genesisState) - - // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') - - // verify balance is genesis amount - const genesisBalance = BigInt(0x15ac56edc4d12c0000) - let req = params(method, [address.toString(), 'latest']) - let expectRes = (res: any) => { - const msg = 'should return the correct genesis balance' - t.equal(res.body.result, bigIntToHex(genesisBalance), msg) - } - await baseRequest(t, server, req, 200, expectRes, false) - - // construct block with tx - const tx = LegacyTransaction.fromTxData({ gasLimit: 53000 }, { common, freeze: false }) - tx.getSenderAddress = () => { - return address - } - const block = Block.fromBlockData({}, { common }) - block.transactions[0] = tx - - const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) - const { amountSpent } = result.results[0] - - // verify balance is genesis amount minus amountSpent - const expectedNewBalance = genesisBalance - amountSpent - req = params(method, [address.toString(), 'latest']) - expectRes = (res: any) => { - const msg = 'should return the correct balance after a tx' - t.equal(res.body.result, bigIntToHex(expectedNewBalance), msg) - } - await baseRequest(t, server, req, 200, expectRes, false) - - // verify we can query with "earliest" - req = params(method, [address.toString(), 'earliest']) - expectRes = (res: any) => { - const msg = "should return the correct balance with 'earliest'" - t.equal(res.body.result, bigIntToHex(genesisBalance), msg) - } - await baseRequest(t, server, req, 200, expectRes, false) - - // verify we can query with a past block number - req = params(method, [address.toString(), '0x0']) - expectRes = (res: any) => { - const msg = 'should return the correct balance with a past block number' - t.equal(res.body.result, bigIntToHex(genesisBalance), msg) - } - await baseRequest(t, server, req, 200, expectRes, false) - - // call with height that exceeds chain height - req = params(method, [address.toString(), '0x1']) - expectRes = checkError(t, INVALID_PARAMS, 'specified block greater than current height') - await baseRequest(t, server, req, 200, expectRes, false) - - // call with nonexistent account - req = params(method, [`0x${'11'.repeat(20)}`, 'latest']) - expectRes = (res: any) => { - const msg = 'should return 0x0 for nonexistent account' - t.equal(res.body.result, `0x0`, msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with unsupported block argument`, async (t) => { - const blockchain = await Blockchain.create() - - const client = createClient({ blockchain, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) - - const req = params(method, ['0xccfd725760a68823ff1e062f4cc97e1360e8d997', 'pending']) - const expectRes = checkError(t, INVALID_PARAMS, '"pending" is not yet supported') - await baseRequest(t, server, req, 200, expectRes) -}) +describe( + method, + () => { + it('ensure balance deducts after a tx', async () => { + const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const blockchain = await Blockchain.create({ common }) + + const client = createClient({ blockchain, commonChain: common, includeVM: true }) + const manager = createManager(client) + + const server = startRPC(manager.getMethods()) + + const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService + assert.notEqual(execution, undefined, 'should have valid execution') + const { vm } = execution + + // since synchronizer.run() is not executed in the mock setup, + // manually run stateManager.generateCanonicalGenesis() + await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + + // genesis address with balance + const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + + // verify balance is genesis amount + const genesisBalance = BigInt(0x15ac56edc4d12c0000) + let req = params(method, [address.toString(), 'latest']) + let expectRes = (res: any) => { + const msg = 'should return the correct genesis balance' + assert.equal(res.body.result, bigIntToHex(genesisBalance), msg) + } + await baseRequest(server, req, 200, expectRes, false) + + // construct block with tx + const tx = LegacyTransaction.fromTxData({ gasLimit: 53000 }, { common, freeze: false }) + tx.getSenderAddress = () => { + return address + } + const block = Block.fromBlockData({}, { common }) + block.transactions[0] = tx + + const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + const { amountSpent } = result.results[0] + + // verify balance is genesis amount minus amountSpent + const expectedNewBalance = genesisBalance - amountSpent + req = params(method, [address.toString(), 'latest']) + expectRes = (res: any) => { + const msg = 'should return the correct balance after a tx' + assert.equal(res.body.result, bigIntToHex(expectedNewBalance), msg) + } + await baseRequest(server, req, 200, expectRes, false) + + // verify we can query with "earliest" + req = params(method, [address.toString(), 'earliest']) + expectRes = (res: any) => { + const msg = "should return the correct balance with 'earliest'" + assert.equal(res.body.result, bigIntToHex(genesisBalance), msg) + } + await baseRequest(server, req, 200, expectRes, false) + + // verify we can query with a past block number + req = params(method, [address.toString(), '0x0']) + expectRes = (res: any) => { + const msg = 'should return the correct balance with a past block number' + assert.equal(res.body.result, bigIntToHex(genesisBalance), msg) + } + await baseRequest(server, req, 200, expectRes, false) + + // call with height that exceeds chain height + req = params(method, [address.toString(), '0x1']) + expectRes = checkError(INVALID_PARAMS, 'specified block greater than current height') + await baseRequest(server, req, 200, expectRes, false) + + // call with nonexistent account + req = params(method, [`0x${'11'.repeat(20)}`, 'latest']) + expectRes = (res: any) => { + const msg = 'should return 0x0 for nonexistent account' + assert.equal(res.body.result, `0x0`, msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('call with unsupported block argument', async () => { + const blockchain = await Blockchain.create() + + const client = createClient({ blockchain, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) + + const req = params(method, ['0xccfd725760a68823ff1e062f4cc97e1360e8d997', 'pending']) + const expectRes = checkError(INVALID_PARAMS, '"pending" is not yet supported') + await baseRequest(server, req, 200, expectRes) + }) + }, + 30000 +) diff --git a/packages/client/test/rpc/eth/getBlockByHash.spec.ts b/packages/client/test/rpc/eth/getBlockByHash.spec.ts index 34d4153604..e162556036 100644 --- a/packages/client/test/rpc/eth/getBlockByHash.spec.ts +++ b/packages/client/test/rpc/eth/getBlockByHash.spec.ts @@ -1,4 +1,4 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' import { baseRequest, baseSetup, params } from '../helpers' @@ -6,75 +6,77 @@ import { checkError } from '../util' const method = 'eth_getBlockByHash' -tape(`${method}: call with valid arguments`, async (t) => { - const { server } = baseSetup() +describe(method, () => { + it('call with valid arguments', async () => { + const { server } = baseSetup() - const blockHash = '0x910abca1728c53e8d6df870dd7af5352e974357dc58205dea1676be17ba6becf' - let includeTransactions = false - let req = params(method, [blockHash, includeTransactions]) - let expectRes = (res: any) => { - t.equal(res.body.result.number, '0x444444', 'should return the correct number') - t.equal(typeof res.body.result.transactions[0], 'string', 'should only include tx hashes') - } - await baseRequest(t, server, req, 200, expectRes, false) + const blockHash = '0x910abca1728c53e8d6df870dd7af5352e974357dc58205dea1676be17ba6becf' + let includeTransactions = false + let req = params(method, [blockHash, includeTransactions]) + let expectRes = (res: any) => { + assert.equal(res.body.result.number, '0x444444', 'should return the correct number') + assert.equal( + typeof res.body.result.transactions[0], + 'string', + 'should only include tx hashes' + ) + } + await baseRequest(server, req, 200, expectRes, false) - includeTransactions = true - req = params(method, [blockHash, includeTransactions]) - expectRes = (res: any) => { - t.equal(res.body.result.number, '0x444444', 'should return the correct number') - t.equal(typeof res.body.result.transactions[0], 'object', 'should include tx objects') - } - await baseRequest(t, server, req, 200, expectRes, true) // pass endOnFinish=true for last test -}) + includeTransactions = true + req = params(method, [blockHash, includeTransactions]) + expectRes = (res: any) => { + assert.equal(res.body.result.number, '0x444444', 'should return the correct number') + assert.equal(typeof res.body.result.transactions[0], 'object', 'should include tx objects') + } + await baseRequest(server, req, 200, expectRes, true) // pass endOnFinish=true for last test + }) -tape(`${method}: call with false for second argument`, async (t) => { - const { server } = baseSetup() + it('call with false for second argument', async () => { + const { server } = baseSetup() - const req = params(method, [ - '0xdc0818cf78f21a8e70579cb46a43643f78291264dda342ae31049421c82d21ae', - false, - ]) - const expectRes = (res: any) => { - let msg = 'should return the correct number' - t.equal(res.body.result.number, '0x444444', msg) - msg = 'should return only the hashes of the transactions' - t.equal(typeof res.body.result.transactions[0], 'string', msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, [ + '0xdc0818cf78f21a8e70579cb46a43643f78291264dda342ae31049421c82d21ae', + false, + ]) + const expectRes = (res: any) => { + let msg = 'should return the correct number' + assert.equal(res.body.result.number, '0x444444', msg) + msg = 'should return only the hashes of the transactions' + assert.equal(typeof res.body.result.transactions[0], 'string', msg) + } + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with invalid block hash without 0x`, async (t) => { - const { server } = baseSetup() + it('call with invalid block hash without 0x', async () => { + const { server } = baseSetup() - const req = params(method, ['WRONG BLOCK NUMBER', true]) - const expectRes = checkError( - t, - INVALID_PARAMS, - 'invalid argument 0: hex string without 0x prefix' - ) - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, ['WRONG BLOCK NUMBER', true]) + const expectRes = checkError(INVALID_PARAMS, 'invalid argument 0: hex string without 0x prefix') + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with invalid hex string as block hash`, async (t) => { - const { server } = baseSetup() + it('call with invalid hex string as block hash', async () => { + const { server } = baseSetup() - const req = params(method, ['0xWRONG BLOCK NUMBER', true]) - const expectRes = checkError(t, INVALID_PARAMS, 'invalid argument 0: invalid block hash') - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, ['0xWRONG BLOCK NUMBER', true]) + const expectRes = checkError(INVALID_PARAMS, 'invalid argument 0: invalid block hash') + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call without second parameter`, async (t) => { - const { server } = baseSetup() + it('call without second parameter', async () => { + const { server } = baseSetup() - const req = params(method, ['0x0']) - const expectRes = checkError(t, INVALID_PARAMS, 'missing value for required argument 1') - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, ['0x0']) + const expectRes = checkError(INVALID_PARAMS, 'missing value for required argument 1') + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with invalid second parameter`, async (t) => { - const { server } = baseSetup() + it('call with invalid second parameter', async () => { + const { server } = baseSetup() - const req = params(method, ['0x0', 'INVALID PARAMETER']) - const expectRes = checkError(t, INVALID_PARAMS) - await baseRequest(t, server, req, 200, expectRes) + const req = params(method, ['0x0', 'INVALID PARAMETER']) + const expectRes = checkError(INVALID_PARAMS) + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/getBlockByNumber.spec.ts b/packages/client/test/rpc/eth/getBlockByNumber.spec.ts index 91a904f0e5..1a623b315f 100644 --- a/packages/client/test/rpc/eth/getBlockByNumber.spec.ts +++ b/packages/client/test/rpc/eth/getBlockByNumber.spec.ts @@ -1,7 +1,7 @@ import { Block } from '@ethereumjs/block' import { LegacyTransaction } from '@ethereumjs/tx' import { hexToBytes } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' import { baseRequest, createClient, createManager, dummy, params, startRPC } from '../helpers' @@ -50,115 +50,120 @@ function createChain() { const method = 'eth_getBlockByNumber' -tape(`${method}: call with valid arguments`, async (t) => { - const manager = createManager(createClient({ chain: createChain() })) - const server = startRPC(manager.getMethods()) - - const req = params(method, ['0x0', false]) - const expectRes = (res: any) => { - const msg = 'should return a valid block' - t.equal(res.body.result.number, '0x0', msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with false for second argument`, async (t) => { - const manager = createManager(createClient({ chain: createChain() })) - const server = startRPC(manager.getMethods()) - - const req = params(method, ['0x0', false]) - const expectRes = (res: any) => { - let msg = 'should return a valid block' - t.equal(res.body.result.number, '0x0', msg) - msg = 'should return only the hashes of the transactions' - t.equal(typeof res.body.result.transactions[0], 'string', msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with earliest param`, async (t) => { - const manager = createManager(createClient({ chain: createChain() })) - const server = startRPC(manager.getMethods()) - - const req = params(method, ['earliest', false]) - const expectRes = (res: any) => { - const msg = 'should return the genesis block number' - t.equal(res.body.result.number, '0x0', msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with latest param`, async (t) => { - const manager = createManager(createClient({ chain: createChain() })) - const server = startRPC(manager.getMethods()) - - const req = params(method, ['latest', false]) - const expectRes = (res: any) => { - const msg = 'should return a block number' - t.equal(res.body.result.number, '0x1', msg) - t.equal(typeof res.body.result.transactions[0], 'string', 'should only include tx hashes') - } - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with unimplemented pending param`, async (t) => { - const manager = createManager(createClient({ chain: createChain() })) - const server = startRPC(manager.getMethods()) - - const req = params(method, ['pending', true]) - - const expectRes = checkError(t, INVALID_PARAMS, '"pending" is not yet supported') - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with non-string block number`, async (t) => { - const manager = createManager(createClient({ chain: createChain() })) - const server = startRPC(manager.getMethods()) - - const req = params(method, [10, true]) - const expectRes = checkError(t, INVALID_PARAMS, 'invalid argument 0: argument must be a string') - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with invalid block number`, async (t) => { - const manager = createManager(createClient({ chain: createChain() })) - const server = startRPC(manager.getMethods()) - - const req = params(method, ['WRONG BLOCK NUMBER', true]) - const expectRes = checkError( - t, - INVALID_PARAMS, - 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"' - ) - - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call without second parameter`, async (t) => { - const manager = createManager(createClient({ chain: createChain() })) - const server = startRPC(manager.getMethods()) - - const req = params(method, ['0x0']) - const expectRes = checkError(t, INVALID_PARAMS, 'missing value for required argument 1') - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with invalid second parameter`, async (t) => { - const manager = createManager(createClient({ chain: createChain() })) - const server = startRPC(manager.getMethods()) - - const req = params(method, ['0x0', 'INVALID PARAMETER']) - const expectRes = checkError(t, INVALID_PARAMS) - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with transaction objects`, async (t) => { - const manager = createManager(createClient({ chain: createChain() })) - const server = startRPC(manager.getMethods()) - const req = params(method, ['latest', true]) - - const expectRes = (res: any) => { - t.equal(typeof res.body.result.transactions[0], 'object', 'should include tx objects') - } - await baseRequest(t, server, req, 200, expectRes) +describe(method, async () => { + it('call with valid arguments', async () => { + const manager = createManager(createClient({ chain: createChain() })) + const server = startRPC(manager.getMethods()) + + const req = params(method, ['0x0', false]) + const expectRes = (res: any) => { + const msg = 'should return a valid block' + assert.equal(res.body.result.number, '0x0', msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('call with false for second argument', async () => { + const manager = createManager(createClient({ chain: createChain() })) + const server = startRPC(manager.getMethods()) + + const req = params(method, ['0x0', false]) + const expectRes = (res: any) => { + let msg = 'should return a valid block' + assert.equal(res.body.result.number, '0x0', msg) + msg = 'should return only the hashes of the transactions' + assert.equal(typeof res.body.result.transactions[0], 'string', msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('call with earliest param', async () => { + const manager = createManager(createClient({ chain: createChain() })) + const server = startRPC(manager.getMethods()) + + const req = params(method, ['earliest', false]) + const expectRes = (res: any) => { + const msg = 'should return the genesis block number' + assert.equal(res.body.result.number, '0x0', msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('call with latest param', async () => { + const manager = createManager(createClient({ chain: createChain() })) + const server = startRPC(manager.getMethods()) + + const req = params(method, ['latest', false]) + const expectRes = (res: any) => { + const msg = 'should return a block number' + assert.equal(res.body.result.number, '0x1', msg) + assert.equal( + typeof res.body.result.transactions[0], + 'string', + 'should only include tx hashes' + ) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('call with unimplemented pending param', async () => { + const manager = createManager(createClient({ chain: createChain() })) + const server = startRPC(manager.getMethods()) + + const req = params(method, ['pending', true]) + + const expectRes = checkError(INVALID_PARAMS, '"pending" is not yet supported') + await baseRequest(server, req, 200, expectRes) + }) + + it('call with non-string block number', async () => { + const manager = createManager(createClient({ chain: createChain() })) + const server = startRPC(manager.getMethods()) + + const req = params(method, [10, true]) + const expectRes = checkError(INVALID_PARAMS, 'invalid argument 0: argument must be a string') + await baseRequest(server, req, 200, expectRes) + }) + + it('call with invalid block number', async () => { + const manager = createManager(createClient({ chain: createChain() })) + const server = startRPC(manager.getMethods()) + + const req = params(method, ['WRONG BLOCK NUMBER', true]) + const expectRes = checkError( + INVALID_PARAMS, + 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"' + ) + + await baseRequest(server, req, 200, expectRes) + }) + + it('call without second parameter', async () => { + const manager = createManager(createClient({ chain: createChain() })) + const server = startRPC(manager.getMethods()) + + const req = params(method, ['0x0']) + const expectRes = checkError(INVALID_PARAMS, 'missing value for required argument 1') + await baseRequest(server, req, 200, expectRes) + }) + + it('call with invalid second parameter', async () => { + const manager = createManager(createClient({ chain: createChain() })) + const server = startRPC(manager.getMethods()) + + const req = params(method, ['0x0', 'INVALID PARAMETER']) + const expectRes = checkError(INVALID_PARAMS) + await baseRequest(server, req, 200, expectRes) + }) + + it('call with transaction objects', async () => { + const manager = createManager(createClient({ chain: createChain() })) + const server = startRPC(manager.getMethods()) + const req = params(method, ['latest', true]) + + const expectRes = (res: any) => { + assert.equal(typeof res.body.result.transactions[0], 'object', 'should include tx objects') + } + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/getBlockTransactionCountByHash.spec.ts b/packages/client/test/rpc/eth/getBlockTransactionCountByHash.spec.ts index 9847ae06c0..1ed7343290 100644 --- a/packages/client/test/rpc/eth/getBlockTransactionCountByHash.spec.ts +++ b/packages/client/test/rpc/eth/getBlockTransactionCountByHash.spec.ts @@ -1,4 +1,4 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' import { baseRequest, baseSetup, params } from '../helpers' @@ -6,49 +6,49 @@ import { checkError } from '../util' const method = 'eth_getBlockTransactionCountByHash' -tape(`${method}: call with valid arguments`, async (t) => { - const { server } = baseSetup() - - const req = params(method, ['0x910abca1728c53e8d6df870dd7af5352e974357dc58205dea1676be17ba6becf']) - const expectRes = (res: any) => { - const msg = 'transaction count should be 1' - t.equal(res.body.result, '0x1', msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with invalid block hash without 0x`, async (t) => { - const { server } = baseSetup() - - const req = params(method, ['WRONG BLOCK NUMBER']) - const expectRes = checkError( - t, - INVALID_PARAMS, - 'invalid argument 0: hex string without 0x prefix' - ) - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with invalid hex string as block hash`, async (t) => { - const { server } = baseSetup() - - const req = params(method, ['0xWRONG BLOCK NUMBER', true]) - const expectRes = checkError(t, INVALID_PARAMS, 'invalid argument 0: invalid block hash') - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call without first parameter`, async (t) => { - const { server } = baseSetup() - - const req = params(method, []) - const expectRes = checkError(t, INVALID_PARAMS, 'missing value for required argument 0') - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with invalid second parameter`, async (t) => { - const { server } = baseSetup() - - const req = params(method, ['INVALID PARAMETER']) - const expectRes = checkError(t, INVALID_PARAMS) - await baseRequest(t, server, req, 200, expectRes) +describe(method, () => { + it('call with valid arguments', async () => { + const { server } = baseSetup() + + const req = params(method, [ + '0x910abca1728c53e8d6df870dd7af5352e974357dc58205dea1676be17ba6becf', + ]) + const expectRes = (res: any) => { + const msg = 'transaction count should be 1' + assert.equal(res.body.result, '0x1', msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('call with invalid block hash without 0x', async () => { + const { server } = baseSetup() + + const req = params(method, ['WRONG BLOCK NUMBER']) + const expectRes = checkError(INVALID_PARAMS, 'invalid argument 0: hex string without 0x prefix') + await baseRequest(server, req, 200, expectRes) + }) + + it('call with invalid hex string as block hash', async () => { + const { server } = baseSetup() + + const req = params(method, ['0xWRONG BLOCK NUMBER', true]) + const expectRes = checkError(INVALID_PARAMS, 'invalid argument 0: invalid block hash') + await baseRequest(server, req, 200, expectRes) + }) + + it('call without first parameter', async () => { + const { server } = baseSetup() + + const req = params(method, []) + const expectRes = checkError(INVALID_PARAMS, 'missing value for required argument 0') + await baseRequest(server, req, 200, expectRes) + }) + + it('call with invalid second parameter', async () => { + const { server } = baseSetup() + + const req = params(method, ['INVALID PARAMETER']) + const expectRes = checkError(INVALID_PARAMS) + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/getBlockTransactionCountByNumber.spec.ts b/packages/client/test/rpc/eth/getBlockTransactionCountByNumber.spec.ts index 2141e8f006..54ee35ec82 100644 --- a/packages/client/test/rpc/eth/getBlockTransactionCountByNumber.spec.ts +++ b/packages/client/test/rpc/eth/getBlockTransactionCountByNumber.spec.ts @@ -1,9 +1,10 @@ import { Block } from '@ethereumjs/block' import { Blockchain } from '@ethereumjs/blockchain' import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { getGenesis } from '@ethereumjs/genesis' import { LegacyTransaction } from '@ethereumjs/tx' import { Address } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' @@ -15,128 +16,148 @@ const method = 'eth_getBlockTransactionCountByNumber' const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) -tape(`${method}: call with valid arguments`, async (t) => { - const blockchain = await Blockchain.create({ - common, - validateBlocks: false, - validateConsensus: false, - }) - - const client = createClient({ blockchain, commonChain: common, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) - - const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService - t.notEqual(execution, undefined, 'should have valid execution') - const { vm } = execution - - await vm.stateManager.generateCanonicalGenesis({}) - - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') - - // construct block with tx - const tx = LegacyTransaction.fromTxData({ gasLimit: 53000 }, { common, freeze: false }) - tx.getSenderAddress = () => { - return address - } - const parent = await blockchain.getCanonicalHeadHeader() - const block = Block.fromBlockData( - { - header: { - parentHash: parent.hash(), - number: 1, - gasLimit: 2000000, +describe( + method, + () => { + it( + 'call with valid arguments', + async () => { + const blockchain = await Blockchain.create({ + common, + validateBlocks: false, + validateConsensus: false, + }) + + const client = createClient({ blockchain, commonChain: common, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) + + const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService + assert.notEqual(execution, undefined, 'should have valid execution') + const { vm } = execution + + await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + + const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + + // construct block with tx + const tx = LegacyTransaction.fromTxData({ gasLimit: 53000 }, { common, freeze: false }) + tx.getSenderAddress = () => { + return address + } + const parent = await blockchain.getCanonicalHeadHeader() + const block = Block.fromBlockData( + { + header: { + parentHash: parent.hash(), + number: 1, + gasLimit: 2000000, + }, + }, + { common, calcDifficultyFromHeader: parent } + ) + block.transactions[0] = tx + + let ranBlock: Block | undefined = undefined + vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) + await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + await vm.blockchain.putBlock(ranBlock!) + + // verify that the transaction count is 1 + const req = params(method, ['latest']) + const expectRes = (res: any) => { + const msg = 'should return the correct block transaction count(1)' + assert.equal(res.body.result, '0x1', msg) + } + await baseRequest(server, req, 200, expectRes) }, - }, - { common, calcDifficultyFromHeader: parent } - ) - block.transactions[0] = tx - - let ranBlock: Block | undefined = undefined - vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - await vm.runBlock({ block, generate: true, skipBlockValidation: true }) - await vm.blockchain.putBlock(ranBlock!) - - // verify that the transaction count is 1 - const req = params(method, ['latest']) - const expectRes = (res: any) => { - const msg = 'should return the correct block transaction count(1)' - t.equal(res.body.result, '0x1', msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with valid arguments (multiple transactions)`, async (t) => { - const blockchain = await Blockchain.create({ - common, - validateBlocks: false, - validateConsensus: false, - }) - - const client = createClient({ blockchain, commonChain: common, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) - - const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService - t.notEqual(execution, undefined, 'should have valid execution') - const { vm } = execution - - await vm.stateManager.generateCanonicalGenesis({}) - - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') - - // construct block with tx - const tx = LegacyTransaction.fromTxData({ gasLimit: 53000 }, { common, freeze: false }) - tx.getSenderAddress = () => { - return address - } - const tx2 = LegacyTransaction.fromTxData({ gasLimit: 53000, nonce: 1 }, { common, freeze: false }) - tx2.getSenderAddress = () => { - return address - } - const tx3 = LegacyTransaction.fromTxData({ gasLimit: 53000, nonce: 2 }, { common, freeze: false }) - tx3.getSenderAddress = () => { - return address - } - - const parent = await blockchain.getCanonicalHeadHeader() - const block = Block.fromBlockData( - { - header: { - parentHash: parent.hash(), - number: 1, - gasLimit: 2000000, + { timeout: 30000 } + ) + + it( + 'call with valid arguments (multiple transactions)', + async () => { + const blockchain = await Blockchain.create({ + common, + validateBlocks: false, + validateConsensus: false, + }) + + const client = createClient({ blockchain, commonChain: common, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) + + const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService + assert.notEqual(execution, undefined, 'should have valid execution') + const { vm } = execution + + await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + + const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + + // construct block with tx + const tx = LegacyTransaction.fromTxData({ gasLimit: 53000 }, { common, freeze: false }) + tx.getSenderAddress = () => { + return address + } + const tx2 = LegacyTransaction.fromTxData( + { gasLimit: 53000, nonce: 1 }, + { common, freeze: false } + ) + tx2.getSenderAddress = () => { + return address + } + const tx3 = LegacyTransaction.fromTxData( + { gasLimit: 53000, nonce: 2 }, + { common, freeze: false } + ) + tx3.getSenderAddress = () => { + return address + } + + const parent = await blockchain.getCanonicalHeadHeader() + const block = Block.fromBlockData( + { + header: { + parentHash: parent.hash(), + number: 1, + gasLimit: 2000000, + }, + }, + { common, calcDifficultyFromHeader: parent } + ) + block.transactions[0] = tx + block.transactions[1] = tx2 + block.transactions[2] = tx3 + + let ranBlock: Block | undefined = undefined + vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) + await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + await vm.blockchain.putBlock(ranBlock!) + + // verify that the transaction count is 3 + // specify the block number instead of using latest + const req = params(method, ['0x1']) + const expectRes = (res: any) => { + const msg = 'should return the correct block transaction count(3)' + assert.equal(res.body.result, '0x3', msg) + } + await baseRequest(server, req, 200, expectRes) }, - }, - { common, calcDifficultyFromHeader: parent } - ) - block.transactions[0] = tx - block.transactions[1] = tx2 - block.transactions[2] = tx3 - - let ranBlock: Block | undefined = undefined - vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - await vm.runBlock({ block, generate: true, skipBlockValidation: true }) - await vm.blockchain.putBlock(ranBlock!) - - // verify that the transaction count is 3 - // specify the block number instead of using latest - const req = params(method, ['0x1']) - const expectRes = (res: any) => { - const msg = 'should return the correct block transaction count(3)' - t.equal(res.body.result, '0x3', msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: call with unsupported block argument`, async (t) => { - const blockchain = await Blockchain.create() - - const client = createClient({ blockchain, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) - - const req = params(method, ['pending']) - const expectRes = checkError(t, INVALID_PARAMS, '"pending" is not yet supported') - await baseRequest(t, server, req, 200, expectRes) -}) + { timeout: 15000 } + ) + + it('call with unsupported block argument', async () => { + const blockchain = await Blockchain.create() + + const client = createClient({ blockchain, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) + + const req = params(method, ['pending']) + const expectRes = checkError(INVALID_PARAMS, '"pending" is not yet supported') + await baseRequest(server, req, 200, expectRes) + }) + }, + 10000 +) diff --git a/packages/client/test/rpc/eth/getCode.spec.ts b/packages/client/test/rpc/eth/getCode.spec.ts index 27d21bd6e0..9ee7664dee 100644 --- a/packages/client/test/rpc/eth/getCode.spec.ts +++ b/packages/client/test/rpc/eth/getCode.spec.ts @@ -4,7 +4,7 @@ import { Chain, Common, Hardfork } from '@ethereumjs/common' import { getGenesis } from '@ethereumjs/genesis' import { LegacyTransaction } from '@ethereumjs/tx' import { Address } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' @@ -16,104 +16,109 @@ const method = 'eth_getCode' const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) -tape(`${method}: call with valid arguments`, async (t) => { - const blockchain = await Blockchain.create({ common }) - - const client = createClient({ blockchain, commonChain: common, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) - - const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService - t.notEqual(execution, undefined, 'should have valid execution') - const { vm } = execution - const genesisState = getGenesis(Chain.Mainnet) - await vm.stateManager.generateCanonicalGenesis(genesisState) - - // genesis address - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') - - // verify code is null - const req = params(method, [address.toString(), 'latest']) - const expectRes = (res: any) => { - const msg = 'should return the correct code' - t.equal(res.body.result, '0x', msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: ensure returns correct code`, async (t) => { - const blockchain = await Blockchain.create({ - common, - validateBlocks: false, - validateConsensus: false, - }) - - const client = createClient({ blockchain, commonChain: common, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) - - const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService - t.notEqual(execution, undefined, 'should have valid execution') - const { vm } = execution - - // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') - - // sample contract from https://ethereum.stackexchange.com/a/70791 - const data = - '0x608060405234801561001057600080fd5b506040516020806100ef8339810180604052602081101561003057600080fd5b810190808051906020019092919050505080600081905550506098806100576000396000f3fe6080604052600436106039576000357c010000000000000000000000000000000000000000000000000000000090048063a2a9679914603e575b600080fd5b348015604957600080fd5b5060506066565b6040518082815260200191505060405180910390f35b6000548156fea165627a7a72305820fe2ba3506418c87a075f8f3ae19bc636bd4c18ebde0644bcb45199379603a72c00290000000000000000000000000000000000000000000000000000000000000064' - const code = - '0x6080604052600436106039576000357c010000000000000000000000000000000000000000000000000000000090048063a2a9679914603e575b600080fd5b348015604957600080fd5b5060506066565b6040518082815260200191505060405180910390f35b6000548156fea165627a7a72305820fe2ba3506418c87a075f8f3ae19bc636bd4c18ebde0644bcb45199379603a72c0029' - - // construct block with tx - const gasLimit = 2000000 - const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) - tx.getSenderAddress = () => { - return address - } - const parent = await blockchain.getCanonicalHeadHeader() - const block = Block.fromBlockData( - { - header: { - parentHash: parent.hash(), - number: 1, - gasLimit, - }, +describe(method, () => { + it( + 'call with valid arguments', + async () => { + const blockchain = await Blockchain.create({ common }) + + const client = createClient({ blockchain, commonChain: common, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) + + const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService + assert.notEqual(execution, undefined, 'should have valid execution') + const { vm } = execution + await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + + // genesis address + const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + + // verify code is null + const req = params(method, [address.toString(), 'latest']) + const expectRes = (res: any) => { + const msg = 'should return the correct code' + assert.equal(res.body.result, '0x', msg) + } + await baseRequest(server, req, 200, expectRes) }, - { common, calcDifficultyFromHeader: parent } - ) - block.transactions[0] = tx - - // deploy contract - let ranBlock: Block | undefined = undefined - vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) - const { createdAddress } = result.results[0] - await vm.blockchain.putBlock(ranBlock!) - - const expectedContractAddress = Address.generate(address, BigInt(0)) - t.ok( - createdAddress!.equals(expectedContractAddress), - 'should match the expected contract address' + { timeout: 30000 } ) - // verify contract has code - const req = params(method, [expectedContractAddress.toString(), 'latest']) - const expectRes = (res: any) => { - const msg = 'should return the correct code' - t.equal(res.body.result, code, msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) + it('ensure returns correct code', async () => { + const blockchain = await Blockchain.create({ + common, + validateBlocks: false, + validateConsensus: false, + }) + + const client = createClient({ blockchain, commonChain: common, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) + + const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService + assert.notEqual(execution, undefined, 'should have valid execution') + const { vm } = execution + + // genesis address with balance + const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + + // sample contract from https://ethereum.stackexchange.com/a/70791 + const data = + '0x608060405234801561001057600080fd5b506040516020806100ef8339810180604052602081101561003057600080fd5b810190808051906020019092919050505080600081905550506098806100576000396000f3fe6080604052600436106039576000357c010000000000000000000000000000000000000000000000000000000090048063a2a9679914603e575b600080fd5b348015604957600080fd5b5060506066565b6040518082815260200191505060405180910390f35b6000548156fea165627a7a72305820fe2ba3506418c87a075f8f3ae19bc636bd4c18ebde0644bcb45199379603a72c00290000000000000000000000000000000000000000000000000000000000000064' + const code = + '0x6080604052600436106039576000357c010000000000000000000000000000000000000000000000000000000090048063a2a9679914603e575b600080fd5b348015604957600080fd5b5060506066565b6040518082815260200191505060405180910390f35b6000548156fea165627a7a72305820fe2ba3506418c87a075f8f3ae19bc636bd4c18ebde0644bcb45199379603a72c0029' + + // construct block with tx + const gasLimit = 2000000 + const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) + tx.getSenderAddress = () => { + return address + } + const parent = await blockchain.getCanonicalHeadHeader() + const block = Block.fromBlockData( + { + header: { + parentHash: parent.hash(), + number: 1, + gasLimit, + }, + }, + { common, calcDifficultyFromHeader: parent } + ) + block.transactions[0] = tx + + // deploy contract + let ranBlock: Block | undefined = undefined + vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) + const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + const { createdAddress } = result.results[0] + await vm.blockchain.putBlock(ranBlock!) + + const expectedContractAddress = Address.generate(address, BigInt(0)) + assert.ok( + createdAddress!.equals(expectedContractAddress), + 'should match the expected contract address' + ) + + // verify contract has code + const req = params(method, [expectedContractAddress.toString(), 'latest']) + const expectRes = (res: any) => { + const msg = 'should return the correct code' + assert.equal(res.body.result, code, msg) + } + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with unsupported block argument`, async (t) => { - const blockchain = await Blockchain.create() + it('call with unsupported block argument', async () => { + const blockchain = await Blockchain.create() - const client = createClient({ blockchain, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) + const client = createClient({ blockchain, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) - const req = params(method, ['0xccfd725760a68823ff1e062f4cc97e1360e8d997', 'pending']) - const expectRes = checkError(t, INVALID_PARAMS, '"pending" is not yet supported') - await baseRequest(t, server, req, 200, expectRes) + const req = params(method, ['0xccfd725760a68823ff1e062f4cc97e1360e8d997', 'pending']) + const expectRes = checkError(INVALID_PARAMS, '"pending" is not yet supported') + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/getLogs.spec.ts b/packages/client/test/rpc/eth/getLogs.spec.ts index e64f4c71ed..fa25e82649 100644 --- a/packages/client/test/rpc/eth/getLogs.spec.ts +++ b/packages/client/test/rpc/eth/getLogs.spec.ts @@ -1,9 +1,9 @@ import { LegacyTransaction } from '@ethereumjs/tx' import { Address, bytesToHex, hexToBytes } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' -import * as pow from '../../testdata/geth-genesis/pow.json' +import pow from '../../testdata/geth-genesis/pow.json' import { baseRequest, dummy, params, runBlockWithTxs, setupChain } from '../helpers' import { checkError } from '../util' @@ -27,251 +27,251 @@ const logExampleBytecode = hexToBytes( '0x608060405234801561001057600080fd5b50610257806100206000396000f3fe608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063aefb4f0a1461004d575b600080fd5b610067600480360381019061006291906100de565b610069565b005b60005b858110156100c1578284867fbf642f3055e2ef2589825c2c0dd4855c1137a63f6260d9d112629e5cd034a3eb856040516100a69190610168565b60405180910390a480806100b99061018d565b91505061006c565b505050505050565b6000813590506100d88161020a565b92915050565b600080600080600060a086880312156100fa576100f9610205565b5b6000610108888289016100c9565b9550506020610119888289016100c9565b945050604061012a888289016100c9565b935050606061013b888289016100c9565b925050608061014c888289016100c9565b9150509295509295909350565b61016281610183565b82525050565b600060208201905061017d6000830184610159565b92915050565b6000819050919050565b600061019882610183565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8214156101cb576101ca6101d6565b5b600182019050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b600080fd5b61021381610183565b811461021e57600080fd5b5056fea2646970667358221220b98f45f4d4112e71fd287ab0ce7cc1872e53b463eb0abf1182b892192d3d8a1d64736f6c63430008070033' ) -tape(`${method}: call with valid arguments`, async (t) => { - const { chain, common, execution, server } = await setupChain(pow, 'pow') +describe(method, async () => { + it('call with valid arguments', async () => { + const { chain, common, execution, server } = await setupChain(pow, 'pow') - // deploy contracts at two different addresses - const txData = { gasLimit: 2000000, gasPrice: 100 } - const tx1 = LegacyTransaction.fromTxData( - { - ...txData, - data: logExampleBytecode, - nonce: 0, - }, - { common } - ).sign(dummy.privKey) - const tx2 = LegacyTransaction.fromTxData( - { - ...txData, - data: logExampleBytecode, - nonce: 1, - }, - { common } - ).sign(dummy.privKey) + // deploy contracts at two different addresses + const txData = { gasLimit: 2000000, gasPrice: 100 } + const tx1 = LegacyTransaction.fromTxData( + { + ...txData, + data: logExampleBytecode, + nonce: 0, + }, + { common } + ).sign(dummy.privKey) + const tx2 = LegacyTransaction.fromTxData( + { + ...txData, + data: logExampleBytecode, + nonce: 1, + }, + { common } + ).sign(dummy.privKey) - const contractAddr1 = Address.generate(dummy.addr, BigInt(0)) - const contractAddr2 = Address.generate(dummy.addr, BigInt(1)) - // construct txs to emit the logs - // data calls log(logCount: 10, num1: 1, num2: 2, num3: 3, num4: 4) - const data = hexToBytes( - '0xaefb4f0a000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000004' - ) - const tx3 = LegacyTransaction.fromTxData( - { - ...txData, - data, - to: contractAddr1, - nonce: 2, - }, - { common } - ).sign(dummy.privKey) - const tx4 = LegacyTransaction.fromTxData( - { - ...txData, - data, - to: contractAddr2, - nonce: 3, - }, - { common } - ).sign(dummy.privKey) + const contractAddr1 = Address.generate(dummy.addr, BigInt(0)) + const contractAddr2 = Address.generate(dummy.addr, BigInt(1)) + // construct txs to emit the logs + // data calls log(logCount: 10, num1: 1, num2: 2, num3: 3, num4: 4) + const data = hexToBytes( + '0xaefb4f0a000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000004' + ) + const tx3 = LegacyTransaction.fromTxData( + { + ...txData, + data, + to: contractAddr1, + nonce: 2, + }, + { common } + ).sign(dummy.privKey) + const tx4 = LegacyTransaction.fromTxData( + { + ...txData, + data, + to: contractAddr2, + nonce: 3, + }, + { common } + ).sign(dummy.privKey) - await runBlockWithTxs(chain, execution, [tx1, tx2, tx3, tx4]) + await runBlockWithTxs(chain, execution, [tx1, tx2, tx3, tx4]) - // compare the logs - let req = params(method, [{ fromBlock: 'earliest', toBlock: 'latest' }]) - let expectRes = (res: any) => { - const msg = `should return the correct logs (fromBlock/toBlock as 'earliest' and 'latest')` - if ( - res.body.result.length === 20 && - res.body.result[0].address === contractAddr1.toString() && - res.body.result[10].address === contractAddr2.toString() && - res.body.result[0].topics[0] === - '0xbf642f3055e2ef2589825c2c0dd4855c1137a63f6260d9d112629e5cd034a3eb' && - res.body.result[0].topics[1] === - '0x0000000000000000000000000000000000000000000000000000000000000001' && - res.body.result[0].topics[2] === - '0x0000000000000000000000000000000000000000000000000000000000000002' && - res.body.result[0].topics[3] === - '0x0000000000000000000000000000000000000000000000000000000000000003' - ) { - t.pass(msg) - } else { - t.fail(msg) + // compare the logs + let req = params(method, [{ fromBlock: 'earliest', toBlock: 'latest' }]) + let expectRes = (res: any) => { + const msg = `should return the correct logs (fromBlock/toBlock as 'earliest' and 'latest')` + if ( + res.body.result.length === 20 && + res.body.result[0].address === contractAddr1.toString() && + res.body.result[10].address === contractAddr2.toString() && + res.body.result[0].topics[0] === + '0xbf642f3055e2ef2589825c2c0dd4855c1137a63f6260d9d112629e5cd034a3eb' && + res.body.result[0].topics[1] === + '0x0000000000000000000000000000000000000000000000000000000000000001' && + res.body.result[0].topics[2] === + '0x0000000000000000000000000000000000000000000000000000000000000002' && + res.body.result[0].topics[3] === + '0x0000000000000000000000000000000000000000000000000000000000000003' + ) { + assert.ok(true, msg) + } else { + assert.fail(msg) + } } - } - await baseRequest(t, server, req, 200, expectRes, false) + await baseRequest(server, req, 200, expectRes, false) - // get the logs using fromBlock/toBlock as numbers - req = params(method, [{ fromBlock: '0x0', toBlock: '0x1' }]) - expectRes = (res: any) => { - const msg = 'should return the correct logs (fromBlock/toBlock as block numbers)' - t.equal(res.body.result.length, 20, msg) - } - await baseRequest(t, server, req, 200, expectRes, false) + // get the logs using fromBlock/toBlock as numbers + req = params(method, [{ fromBlock: '0x0', toBlock: '0x1' }]) + expectRes = (res: any) => { + const msg = 'should return the correct logs (fromBlock/toBlock as block numbers)' + assert.equal(res.body.result.length, 20, msg) + } + await baseRequest(server, req, 200, expectRes, false) - // test filtering by single address - req = params(method, [{ address: contractAddr1.toString() }]) - expectRes = (res: any) => { - const msg = 'should return the correct logs (filter by single address)' - if ( - res.body.result.length === 10 && - res.body.result.every((r: any) => r.address === contractAddr1.toString()) === true - ) { - t.pass(msg) - } else { - t.fail(msg) + // test filtering by single address + req = params(method, [{ address: contractAddr1.toString() }]) + expectRes = (res: any) => { + const msg = 'should return the correct logs (filter by single address)' + if ( + res.body.result.length === 10 && + res.body.result.every((r: any) => r.address === contractAddr1.toString()) === true + ) { + assert.ok(true, msg) + } else { + assert.fail(msg) + } } - } - await baseRequest(t, server, req, 200, expectRes, false) + await baseRequest(server, req, 200, expectRes, false) - // test filtering by multiple addresses - const addresses = [contractAddr1.toString(), contractAddr2.toString()] - req = params(method, [{ address: addresses }]) - expectRes = (res: any) => { - const msg = 'should return the correct logs (filter by multiple addresses)' - if ( - res.body.result.length === 20 && - res.body.result.every((r: any) => addresses.includes(r.address)) === true - ) { - t.pass(msg) - } else { - t.fail(msg) + // test filtering by multiple addresses + const addresses = [contractAddr1.toString(), contractAddr2.toString()] + req = params(method, [{ address: addresses }]) + expectRes = (res: any) => { + const msg = 'should return the correct logs (filter by multiple addresses)' + if ( + res.body.result.length === 20 && + res.body.result.every((r: any) => addresses.includes(r.address)) === true + ) { + assert.ok(true, msg) + } else { + assert.fail(msg) + } } - } - await baseRequest(t, server, req, 200, expectRes, false) + await baseRequest(server, req, 200, expectRes, false) - // test filtering by topics (empty means anything) - req = params(method, [{ topics: [] }]) - expectRes = (res: any) => { - const msg = 'should return the correct logs (filter by topic - empty means anything)' - t.equal(res.body.result.length, 20, msg) - } - await baseRequest(t, server, req, 200, expectRes, false) + // test filtering by topics (empty means anything) + req = params(method, [{ topics: [] }]) + expectRes = (res: any) => { + const msg = 'should return the correct logs (filter by topic - empty means anything)' + assert.equal(res.body.result.length, 20, msg) + } + await baseRequest(server, req, 200, expectRes, false) - // test filtering by topics (exact match) - req = params(method, [ - { topics: ['0xbf642f3055e2ef2589825c2c0dd4855c1137a63f6260d9d112629e5cd034a3eb'] }, - ]) - expectRes = (res: any) => { - const msg = 'should return the correct logs (filter by topic - exact match)' - t.equal(res.body.result.length, 20, msg) - } - await baseRequest(t, server, req, 200, expectRes, false) + // test filtering by topics (exact match) + req = params(method, [ + { topics: ['0xbf642f3055e2ef2589825c2c0dd4855c1137a63f6260d9d112629e5cd034a3eb'] }, + ]) + expectRes = (res: any) => { + const msg = 'should return the correct logs (filter by topic - exact match)' + assert.equal(res.body.result.length, 20, msg) + } + await baseRequest(server, req, 200, expectRes, false) - // test filtering by topics (exact match for second topic) - req = params(method, [ - { topics: [null, '0x0000000000000000000000000000000000000000000000000000000000000001'] }, - ]) - expectRes = (res: any) => { - const msg = 'should return the correct logs (filter by topic - exact match for second topic)' - t.equal(res.body.result.length, 20, msg) - } - await baseRequest(t, server, req, 200, expectRes, false) + // test filtering by topics (exact match for second topic) + req = params(method, [ + { topics: [null, '0x0000000000000000000000000000000000000000000000000000000000000001'] }, + ]) + expectRes = (res: any) => { + const msg = 'should return the correct logs (filter by topic - exact match for second topic)' + assert.equal(res.body.result.length, 20, msg) + } + await baseRequest(server, req, 200, expectRes, false) - // test filtering by topics (A or B in first position) - req = params(method, [ - { - topics: [ - [ - '0xbf642f3055e2ef2589825c2c0dd4855c1137a63f6260d9d112629e5cd034a3eb', - '0x0000000000000000000000000000000000000000000000000000000000000001', + // test filtering by topics (A or B in first position) + req = params(method, [ + { + topics: [ + [ + '0xbf642f3055e2ef2589825c2c0dd4855c1137a63f6260d9d112629e5cd034a3eb', + '0x0000000000000000000000000000000000000000000000000000000000000001', + ], + null, + '0x0000000000000000000000000000000000000000000000000000000000000002', ], - null, - '0x0000000000000000000000000000000000000000000000000000000000000002', - ], - }, - ]) - expectRes = (res: any) => { - const msg = 'should return the correct logs (filter by topic - A or B in first position)' - t.equal(res.body.result.length, 20, msg) - } - await baseRequest(t, server, req, 200, expectRes, false) + }, + ]) + expectRes = (res: any) => { + const msg = 'should return the correct logs (filter by topic - A or B in first position)' + assert.equal(res.body.result.length, 20, msg) + } + await baseRequest(server, req, 200, expectRes, false) - // test filtering by topics (null means anything) - req = params(method, [ - { - topics: [null, null, '0x0000000000000000000000000000000000000000000000000000000000000002'], - }, - ]) - expectRes = (res: any) => { - const msg = 'should return the correct logs (filter by topic - null means anything)' - t.equal(res.body.result.length, 20, msg) - } - await baseRequest(t, server, req, 200, expectRes, false) + // test filtering by topics (null means anything) + req = params(method, [ + { + topics: [null, null, '0x0000000000000000000000000000000000000000000000000000000000000002'], + }, + ]) + expectRes = (res: any) => { + const msg = 'should return the correct logs (filter by topic - null means anything)' + assert.equal(res.body.result.length, 20, msg) + } + await baseRequest(server, req, 200, expectRes, false) - // test filtering by blockHash - const latestHeader = chain.headers.latest! - req = params(method, [ - { - blockHash: bytesToHex(latestHeader.hash()), - }, - ]) - expectRes = (res: any) => { - const msg = 'should return the correct logs (filter by blockHash)' - t.equal(res.body.result.length, 20, msg) - } - await baseRequest(t, server, req, 200, expectRes, true) // pass endOnFinish=true for last test -}) + // test filtering by blockHash + const latestHeader = chain.headers.latest! + req = params(method, [ + { + blockHash: bytesToHex(latestHeader.hash()), + }, + ]) + expectRes = (res: any) => { + const msg = 'should return the correct logs (filter by blockHash)' + assert.equal(res.body.result.length, 20, msg) + } + await baseRequest(server, req, 200, expectRes, true) // pass endOnFinish=true for last test + }) -tape(`${method}: call with invalid params`, async (t) => { - const { server } = await setupChain(pow, 'pow') + it('call with invalid params', async () => { + const { server } = await setupChain(pow, 'pow') - // fromBlock greater than current height - let req = params(method, [{ fromBlock: '0x1234' }]) - let expectRes = checkError(t, INVALID_PARAMS, 'specified `fromBlock` greater than current height') - await baseRequest(t, server, req, 200, expectRes, false) + // fromBlock greater than current height + let req = params(method, [{ fromBlock: '0x1234' }]) + let expectRes = checkError(INVALID_PARAMS, 'specified `fromBlock` greater than current height') + await baseRequest(server, req, 200, expectRes, false) - // toBlock greater than current height - req = params(method, [{ toBlock: '0x1234' }]) - expectRes = checkError(t, INVALID_PARAMS, 'specified `toBlock` greater than current height') - await baseRequest(t, server, req, 200, expectRes, false) + // toBlock greater than current height + req = params(method, [{ toBlock: '0x1234' }]) + expectRes = checkError(INVALID_PARAMS, 'specified `toBlock` greater than current height') + await baseRequest(server, req, 200, expectRes, false) - // unknown blockHash - req = params(method, [ - { blockHash: '0x1000000000000000000000000000000000000000000000000000000000000001' }, - ]) - expectRes = checkError(t, INVALID_PARAMS, 'unknown blockHash') - await baseRequest(t, server, req, 200, expectRes, false) + // unknown blockHash + req = params(method, [ + { blockHash: '0x1000000000000000000000000000000000000000000000000000000000000001' }, + ]) + expectRes = checkError(INVALID_PARAMS, 'unknown blockHash') + await baseRequest(server, req, 200, expectRes, false) - // specifying fromBlock or toBlock with blockHash - req = params(method, [ - { - fromBlock: 'latest', - blockHash: '0x1000000000000000000000000000000000000000000000000000000000000001', - }, - ]) - expectRes = checkError( - t, - INVALID_PARAMS, - 'Can only specify a blockHash if fromBlock or toBlock are not provided' - ) - await baseRequest(t, server, req, 200, expectRes, false) - req = params(method, [ - { - toBlock: 'latest', - blockHash: '0x1000000000000000000000000000000000000000000000000000000000000001', - }, - ]) - expectRes = checkError( - t, - INVALID_PARAMS, - 'Can only specify a blockHash if fromBlock or toBlock are not provided' - ) - await baseRequest(t, server, req, 200, expectRes, false) + // specifying fromBlock or toBlock with blockHash + req = params(method, [ + { + fromBlock: 'latest', + blockHash: '0x1000000000000000000000000000000000000000000000000000000000000001', + }, + ]) + expectRes = checkError( + INVALID_PARAMS, + 'Can only specify a blockHash if fromBlock or toBlock are not provided' + ) + await baseRequest(server, req, 200, expectRes, false) + req = params(method, [ + { + toBlock: 'latest', + blockHash: '0x1000000000000000000000000000000000000000000000000000000000000001', + }, + ]) + expectRes = checkError( + INVALID_PARAMS, + 'Can only specify a blockHash if fromBlock or toBlock are not provided' + ) + await baseRequest(server, req, 200, expectRes, false) - // unknown address - req = params(method, [{ address: '0x0000000000000000000000000000000000000001' }]) - expectRes = (res: any) => { - const msg = 'should return empty logs' - t.equal(res.body.result.length, 0, msg) - } - await baseRequest(t, server, req, 200, expectRes, false) + // unknown address + req = params(method, [{ address: '0x0000000000000000000000000000000000000001' }]) + expectRes = (res: any) => { + const msg = 'should return empty logs' + assert.equal(res.body.result.length, 0, msg) + } + await baseRequest(server, req, 200, expectRes, false) - // invalid topic - req = params(method, [{ topics: ['0x1234'] }]) - expectRes = (res: any) => { - const msg = 'should return empty logs' - t.equal(res.body.result.length, 0, msg) - } - await baseRequest(t, server, req, 200, expectRes, true) // pass endOnFinish=true for last test + // invalid topic + req = params(method, [{ topics: ['0x1234'] }]) + expectRes = (res: any) => { + const msg = 'should return empty logs' + assert.equal(res.body.result.length, 0, msg) + } + await baseRequest(server, req, 200, expectRes, true) // pass endOnFinish=true for last test + }) }) diff --git a/packages/client/test/rpc/eth/getProof.spec.ts b/packages/client/test/rpc/eth/getProof.spec.ts index 71a7f3d3f0..25b9fd7930 100644 --- a/packages/client/test/rpc/eth/getProof.spec.ts +++ b/packages/client/test/rpc/eth/getProof.spec.ts @@ -3,7 +3,7 @@ import { Blockchain } from '@ethereumjs/blockchain' import { Chain, Common, Hardfork } from '@ethereumjs/common' import { LegacyTransaction } from '@ethereumjs/tx' import { Address, bigIntToHex } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' @@ -35,26 +35,27 @@ const expectedProof = { const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) -tape(`${method}: call with valid arguments`, async (t) => { - const blockchain = await Blockchain.create({ - common, - validateBlocks: false, - validateConsensus: false, - }) +describe(method, async () => { + it('call with valid arguments', async () => { + const blockchain = await Blockchain.create({ + common, + validateBlocks: false, + validateConsensus: false, + }) - const client = createClient({ blockchain, commonChain: common, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) + const client = createClient({ blockchain, commonChain: common, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) - const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService - t.notEqual(execution, undefined, 'should have valid execution') - const { vm } = execution + const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService + assert.notEqual(execution, undefined, 'should have valid execution') + const { vm } = execution - // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + // genesis address with balance + const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') - // contract inspired from https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getstorageat/ - /* + // contract inspired from https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getstorageat/ + /* // SPDX-License-Identifier: MIT pragma solidity ^0.7.4; @@ -67,71 +68,72 @@ tape(`${method}: call with valid arguments`, async (t) => { } } */ - const data = - '0x6080604052348015600f57600080fd5b5060bc8061001e6000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c8063975057e714602d575b600080fd5b60336035565b005b6104d260008190555061162e600160003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000208190555056fea2646970667358221220b16fe0abdbdcae31fa05c5717ebc442024b20fb637907d1a05547ea2d8ec8e5964736f6c63430007060033' - - // construct block with tx - const gasLimit = 2000000 - const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) - tx.getSenderAddress = () => { - return address - } - const parent = await blockchain.getCanonicalHeadHeader() - const block = Block.fromBlockData( - { - header: { - parentHash: parent.hash(), - number: 1, - gasLimit, + const data = + '0x6080604052348015600f57600080fd5b5060bc8061001e6000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c8063975057e714602d575b600080fd5b60336035565b005b6104d260008190555061162e600160003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000208190555056fea2646970667358221220b16fe0abdbdcae31fa05c5717ebc442024b20fb637907d1a05547ea2d8ec8e5964736f6c63430007060033' + + // construct block with tx + const gasLimit = 2000000 + const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) + tx.getSenderAddress = () => { + return address + } + const parent = await blockchain.getCanonicalHeadHeader() + const block = Block.fromBlockData( + { + header: { + parentHash: parent.hash(), + number: 1, + gasLimit, + }, }, - }, - { common, calcDifficultyFromHeader: parent } - ) - block.transactions[0] = tx - - // deploy contract - let ranBlock: Block | undefined = undefined - vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) - const { createdAddress } = result.results[0] - await vm.blockchain.putBlock(ranBlock!) - - // call store() method - const funcHash = '975057e7' // store() - const storeTxData = { - to: createdAddress!.toString(), - from: address.toString(), - data: `0x${funcHash}`, - gasLimit: bigIntToHex(BigInt(530000)), - nonce: 1, - } - const storeTx = LegacyTransaction.fromTxData(storeTxData, { common, freeze: false }) - storeTx.getSenderAddress = () => { - return address - } - const block2 = Block.fromBlockData( - { - header: { - parentHash: ranBlock!.hash(), - number: 2, - gasLimit, + { common, calcDifficultyFromHeader: parent } + ) + block.transactions[0] = tx + + // deploy contract + let ranBlock: Block | undefined = undefined + vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) + const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + const { createdAddress } = result.results[0] + await vm.blockchain.putBlock(ranBlock!) + + // call store() method + const funcHash = '975057e7' // store() + const storeTxData = { + to: createdAddress!.toString(), + from: address.toString(), + data: `0x${funcHash}`, + gasLimit: bigIntToHex(BigInt(530000)), + nonce: 1, + } + const storeTx = LegacyTransaction.fromTxData(storeTxData, { common, freeze: false }) + storeTx.getSenderAddress = () => { + return address + } + const block2 = Block.fromBlockData( + { + header: { + parentHash: ranBlock!.hash(), + number: 2, + gasLimit, + }, }, - }, - { common, calcDifficultyFromHeader: block.header } - ) - block2.transactions[0] = storeTx - - // run block - let ranBlock2: Block | undefined = undefined - vm.events.once('afterBlock', (result: any) => (ranBlock2 = result.block)) - await vm.runBlock({ block: block2, generate: true, skipBlockValidation: true }) - await vm.blockchain.putBlock(ranBlock2!) - - // verify proof is accurate - const req = params(method, [createdAddress!.toString(), ['0x0'], 'latest']) - const expectRes = (res: any) => { - const msg = 'should return the correct proof' - t.deepEqual(res.body.result, expectedProof, msg) - } - await baseRequest(t, server, req, 200, expectRes) + { common, calcDifficultyFromHeader: block.header } + ) + block2.transactions[0] = storeTx + + // run block + let ranBlock2: Block | undefined = undefined + vm.events.once('afterBlock', (result: any) => (ranBlock2 = result.block)) + await vm.runBlock({ block: block2, generate: true, skipBlockValidation: true }) + await vm.blockchain.putBlock(ranBlock2!) + + // verify proof is accurate + const req = params(method, [createdAddress!.toString(), ['0x0'], 'latest']) + const expectRes = (res: any) => { + const msg = 'should return the correct proof' + assert.deepEqual(res.body.result, expectedProof, msg) + } + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/getStorageAt.spec.ts b/packages/client/test/rpc/eth/getStorageAt.spec.ts index 45d5aa33e0..8b6c33e2b6 100644 --- a/packages/client/test/rpc/eth/getStorageAt.spec.ts +++ b/packages/client/test/rpc/eth/getStorageAt.spec.ts @@ -1,90 +1,88 @@ import { Block } from '@ethereumjs/block' import { LegacyTransaction } from '@ethereumjs/tx' import { Address } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' -import * as pow from '../../testdata/geth-genesis/pow.json' +import pow from '../../testdata/geth-genesis/pow.json' import { baseRequest, params, setupChain } from '../helpers' import { checkError } from '../util' const method = 'eth_getStorageAt' -tape(`${method}: call with valid arguments`, async (t) => { - const address = Address.fromString(`0x${'11'.repeat(20)}`) - const emptySlotStr = `0x${'00'.repeat(32)}` +describe(method, async () => { + it('call with valid arguments', async () => { + const address = Address.fromString(`0x${'11'.repeat(20)}`) + const emptySlotStr = `0x${'00'.repeat(32)}` - const { execution, common, server, chain } = await setupChain(pow, 'pow') + const { execution, common, server, chain } = await setupChain(pow, 'pow') - let req = params(method, [address.toString(), '0x0', 'latest']) - let expectRes = (res: any) => { - const msg = 'should return the empty slot for nonexistent account' - t.equal(res.body.result, emptySlotStr, msg) - } - await baseRequest(t, server, req, 200, expectRes, false) + let req = params(method, [address.toString(), '0x0', 'latest']) + let expectRes = (res: any) => { + const msg = 'should return the empty slot for nonexistent account' + assert.equal(res.body.result, emptySlotStr, msg) + } + await baseRequest(server, req, 200, expectRes, false) - // easy sample init contract: - // push 00 NOT push 00 sstore - // this stores NOT(0) (=0xfffff..ff) into slot 0 - // Note: previously a contract was initialized which stored 0x64 (100) here - // However, 0x64 in RLP is still 0x64. - // This new storage tests that higher RLP values than 0x80 also get returned correctly - const data = '0x600019600055' - const expectedSlotValue = '0x' + 'ff'.repeat(32) + // sample contract from https://ethereum.stackexchange.com/a/70791 + const data = + '0x608060405234801561001057600080fd5b506040516020806100ef8339810180604052602081101561003057600080fd5b810190808051906020019092919050505080600081905550506098806100576000396000f3fe6080604052600436106039576000357c010000000000000000000000000000000000000000000000000000000090048063a2a9679914603e575b600080fd5b348015604957600080fd5b5060506066565b6040518082815260200191505060405180910390f35b6000548156fea165627a7a72305820fe2ba3506418c87a075f8f3ae19bc636bd4c18ebde0644bcb45199379603a72c00290000000000000000000000000000000000000000000000000000000000000064' + const expectedSlotValue = `0x${data.slice(data.length - 64)}` - // construct block with tx - const gasLimit = 2000000 - const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) - const signedTx = tx.sign(tx.getHashedMessageToSign()) + // construct block with tx + const gasLimit = 2000000 + const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) + const signedTx = tx.sign(tx.getHashedMessageToSign()) - const parent = await chain.blockchain.getCanonicalHeadHeader() - const block = Block.fromBlockData( - { - header: { - parentHash: parent.hash(), - number: 1, - gasLimit, + const parent = await chain.blockchain.getCanonicalHeadHeader() + const block = Block.fromBlockData( + { + header: { + parentHash: parent.hash(), + number: 1, + gasLimit, + }, }, - }, - { common, calcDifficultyFromHeader: parent } - ) - block.transactions[0] = signedTx + { common, calcDifficultyFromHeader: parent } + ) + block.transactions[0] = signedTx - // deploy contract - let ranBlock: Block | undefined = undefined - execution.vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - const result = await execution.vm.runBlock({ block, generate: true, skipBlockValidation: true }) - const { createdAddress } = result.results[0] - await chain.putBlocks([ranBlock as unknown as Block]) + // deploy contract + let ranBlock: Block | undefined = undefined + execution.vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) + const result = await execution.vm.runBlock({ block, generate: true, skipBlockValidation: true }) + const { createdAddress } = result.results[0] + await chain.putBlocks([ranBlock as unknown as Block]) - // call with 'latest tag to see if account storage reflects newly put storage value - req = params(method, [createdAddress!.toString(), '0x0', 'latest']) - expectRes = (res: any) => { - const msg = 'should return the correct slot value' - t.equal(res.body.result, expectedSlotValue, msg) - } - await baseRequest(t, server, req, 200, expectRes, false) + // call with 'latest tag to see if account storage reflects newly put storage value + req = params(method, [createdAddress!.toString(), '0x0', 'latest']) + expectRes = (res: any) => { + const msg = 'should return the correct slot value' + assert.equal(res.body.result, expectedSlotValue, msg) + } + await baseRequest(server, req, 200, expectRes, false) - // call with 'earliest' tag to see if getStorageAt allows addressing blocks that are older than the latest block by tag - req = params(method, [createdAddress!.toString(), '0x0', 'earliest']) - expectRes = (res: any) => { - const msg = - 'should not have new slot value for block that is addressed by "earliest" tag and is older than latest' - t.equal(res.body.result, emptySlotStr, msg) - } - await baseRequest(t, server, req, 200, expectRes, false) + // call with 'earliest' tag to see if getStorageAt allows addressing blocks that are older than the latest block by tag + req = params(method, [createdAddress!.toString(), '0x0', 'earliest']) + expectRes = (res: any) => { + const msg = + 'should not have new slot value for block that is addressed by "earliest" tag and is older than latest' + assert.equal(res.body.result, emptySlotStr, msg) + } + await baseRequest(server, req, 200, expectRes, false) - // call with integer for block number to see if getStorageAt allows addressing blocks by number index - req = params(method, [createdAddress!.toString(), '0x0', '0x1']) - expectRes = (res: any) => { - const msg = - 'should return the correct slot value when addressing the latest block by integer index' - t.equal(res.body.result, expectedSlotValue, msg) - } - await baseRequest(t, server, req, 200, expectRes, false) + // call with integer for block number to see if getStorageAt allows addressing blocks by number index + req = params(method, [createdAddress!.toString(), '0x0', '0x1']) + expectRes = (res: any) => { + const msg = + 'should return the correct slot value when addressing the latest block by integer index' + assert.equal(res.body.result, expectedSlotValue, msg) + } + await baseRequest(server, req, 200, expectRes, false) - // call with unsupported block argument - req = params(method, [address.toString(), '0x0', 'pending']) - expectRes = checkError(t, INVALID_PARAMS, '"pending" is not yet supported') - await baseRequest(t, server, req, 200, expectRes) + // call with unsupported block argument + req = params(method, [address.toString(), '0x0', 'pending']) + expectRes = checkError(INVALID_PARAMS, '"pending" is not yet supported') + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/getTransactionByBlockHashAndIndex.spec.ts b/packages/client/test/rpc/eth/getTransactionByBlockHashAndIndex.spec.ts index c00f49db02..f57cccd5a9 100644 --- a/packages/client/test/rpc/eth/getTransactionByBlockHashAndIndex.spec.ts +++ b/packages/client/test/rpc/eth/getTransactionByBlockHashAndIndex.spec.ts @@ -1,8 +1,8 @@ import { LegacyTransaction } from '@ethereumjs/tx' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' -import * as pow from '../../testdata/geth-genesis/pow.json' +import pow from '../../testdata/geth-genesis/pow.json' import { baseRequest, baseSetup, dummy, params, runBlockWithTxs, setupChain } from '../helpers' import { checkError } from '../util' @@ -32,87 +32,81 @@ async function setUp() { return { server } } -tape(`${method}: call with valid arguments`, async (t) => { - const { server } = await setUp() +describe(method, async () => { + it('call with valid arguments', async () => { + const { server } = await setUp() - const mockBlockHash = '0x572856aae9a653012a7df7aeb56bfb7fe77f5bcb4b69fd971c04e989f6ccf9b1' - const mockTxHash = '0x13548b649129ad9beb57467a819d24b846fa0aa02a955f6e974541e1ebb8b02c' - const mockTxIndex = '0x1' + const mockBlockHash = '0x572856aae9a653012a7df7aeb56bfb7fe77f5bcb4b69fd971c04e989f6ccf9b1' + const mockTxHash = '0x13548b649129ad9beb57467a819d24b846fa0aa02a955f6e974541e1ebb8b02c' + const mockTxIndex = '0x1' - const req = params(method, [mockBlockHash, mockTxIndex]) - const expectRes = (res: any) => { - t.equal(res.body.result.hash, mockTxHash, 'should return the correct tx hash') - } - await baseRequest(t, server, req, 200, expectRes, false) -}) + const req = params(method, [mockBlockHash, mockTxIndex]) + const expectRes = (res: any) => { + assert.equal(res.body.result.hash, mockTxHash, 'should return the correct tx hash') + } + await baseRequest(server, req, 200, expectRes, false) + }) -tape(`${method}: call with no argument`, async (t) => { - const { server } = baseSetup() + it('call with no argument', async () => { + const { server } = baseSetup() - const req = params(method, []) - const expectRes = checkError(t, INVALID_PARAMS, 'missing value for required argument 0') - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, []) + const expectRes = checkError(INVALID_PARAMS, 'missing value for required argument 0') + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with unknown block hash`, async (t) => { - const { server } = await setupChain(pow, 'pow') + it('call with unknown block hash', async () => { + const { server } = await setupChain(pow, 'pow') - const mockBlockHash = '0x89ea5b54111befb936851660a72b686a21bc2fc4889a9a308196ff99d08925a0' - const mockTxIndex = '0x1' + const mockBlockHash = '0x89ea5b54111befb936851660a72b686a21bc2fc4889a9a308196ff99d08925a0' + const mockTxIndex = '0x1' - const req = params(method, [mockBlockHash, mockTxIndex]) - const expectRes = checkError(t, INVALID_PARAMS, 'not found in DB') - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, [mockBlockHash, mockTxIndex]) + const expectRes = checkError(INVALID_PARAMS, 'not found in DB') + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with invalid block hash`, async (t) => { - const { server } = baseSetup() + it('call with invalid block hash', async () => { + const { server } = baseSetup() - const mockBlockHash = 'INVALID_BLOCKHASH' - const mockTxIndex = '0x1' + const mockBlockHash = 'INVALID_BLOCKHASH' + const mockTxIndex = '0x1' - const req = params(method, [mockBlockHash, mockTxIndex]) - const expectRes = checkError( - t, - INVALID_PARAMS, - 'invalid argument 0: hex string without 0x prefix' - ) - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, [mockBlockHash, mockTxIndex]) + const expectRes = checkError(INVALID_PARAMS, 'invalid argument 0: hex string without 0x prefix') + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call without tx hash`, async (t) => { - const { server } = baseSetup() + it('call without tx hash', async () => { + const { server } = baseSetup() - const mockBlockHash = '0x572856aae9a653012a7df7aeb56bfb7fe77f5bcb4b69fd971c04e989f6ccf9b1' + const mockBlockHash = '0x572856aae9a653012a7df7aeb56bfb7fe77f5bcb4b69fd971c04e989f6ccf9b1' - const req = params(method, [mockBlockHash]) - const expectRes = checkError(t, INVALID_PARAMS, 'missing value for required argument 1') - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, [mockBlockHash]) + const expectRes = checkError(INVALID_PARAMS, 'missing value for required argument 1') + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with invalid tx hash`, async (t) => { - const { server } = baseSetup() + it('call with invalid tx hash', async () => { + const { server } = baseSetup() - const mockBlockHash = '0x572856aae9a653012a7df7aeb56bfb7fe77f5bcb4b69fd971c04e989f6ccf9b1' - const mockTxIndex = 'INVALIDA_TXINDEX' - const req = params(method, [mockBlockHash, mockTxIndex]) + const mockBlockHash = '0x572856aae9a653012a7df7aeb56bfb7fe77f5bcb4b69fd971c04e989f6ccf9b1' + const mockTxIndex = 'INVALIDA_TXINDEX' + const req = params(method, [mockBlockHash, mockTxIndex]) - const expectRes = checkError( - t, - INVALID_PARAMS, - 'invalid argument 1: hex string without 0x prefix' - ) - await baseRequest(t, server, req, 200, expectRes) -}) + const expectRes = checkError(INVALID_PARAMS, 'invalid argument 1: hex string without 0x prefix') + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with out-of-bound tx hash `, async (t) => { - const { server } = baseSetup() + it('call with out-of-bound tx hash ', async () => { + const { server } = baseSetup() - const mockBlockHash = '0x572856aae9a653012a7df7aeb56bfb7fe77f5bcb4b69fd971c04e989f6ccf9b1' - const mockTxIndex = '0x10' - const req = params(method, [mockBlockHash, mockTxIndex]) - const expectRes = (res: any) => { - t.equal(res.body.result, null, 'should return null') - } - await baseRequest(t, server, req, 200, expectRes) + const mockBlockHash = '0x572856aae9a653012a7df7aeb56bfb7fe77f5bcb4b69fd971c04e989f6ccf9b1' + const mockTxIndex = '0x10' + const req = params(method, [mockBlockHash, mockTxIndex]) + const expectRes = (res: any) => { + assert.equal(res.body.result, null, 'should return null') + } + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/getTransactionByHash.spec.ts b/packages/client/test/rpc/eth/getTransactionByHash.spec.ts index 69608c3f93..557790cf75 100644 --- a/packages/client/test/rpc/eth/getTransactionByHash.spec.ts +++ b/packages/client/test/rpc/eth/getTransactionByHash.spec.ts @@ -1,8 +1,8 @@ import { FeeMarketEIP1559Transaction, LegacyTransaction } from '@ethereumjs/tx' import { bytesToHex } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' -import * as pow from '../../testdata/geth-genesis/pow.json' +import pow from '../../testdata/geth-genesis/pow.json' import { baseRequest, dummy, @@ -14,83 +14,87 @@ import { const method = 'eth_getTransactionByHash' -tape(`${method}: call with legacy tx`, async (t) => { - const { chain, common, execution, server } = await setupChain(pow, 'pow', { txLookupLimit: 1 }) +describe(method, () => { + it('call with legacy tx', async () => { + const { chain, common, execution, server } = await setupChain(pow, 'pow', { txLookupLimit: 1 }) - // construct tx - const tx = LegacyTransaction.fromTxData( - { gasLimit: 2000000, gasPrice: 100, to: '0x0000000000000000000000000000000000000000' }, - { common } - ).sign(dummy.privKey) + // construct tx + const tx = LegacyTransaction.fromTxData( + { gasLimit: 2000000, gasPrice: 100, to: '0x0000000000000000000000000000000000000000' }, + { common } + ).sign(dummy.privKey) - await runBlockWithTxs(chain, execution, [tx]) + await runBlockWithTxs(chain, execution, [tx]) - // get the tx - let req = params(method, [bytesToHex(tx.hash())]) - let expectRes = (res: any) => { - const msg = 'should return the correct tx' - t.equal(res.body.result.hash, bytesToHex(tx.hash()), msg) - } - await baseRequest(t, server, req, 200, expectRes, false, false) + // get the tx + let req = params(method, [bytesToHex(tx.hash())]) + let expectRes = (res: any) => { + const msg = 'should return the correct tx' + assert.equal(res.body.result.hash, bytesToHex(tx.hash()), msg) + } + await baseRequest(server, req, 200, expectRes, false, false) - // run a block to ensure tx hash index is cleaned up when txLookupLimit=1 - await runBlockWithTxs(chain, execution, []) - req = params(method, [bytesToHex(tx.hash())]) - expectRes = (res: any) => { - const msg = 'should return null when past txLookupLimit' - t.equal(res.body.result, null, msg) - } - await baseRequest(t, server, req, 200, expectRes, true) // pass endOnFinish=true for last test -}) + // run a block to ensure tx hash index is cleaned up when txLookupLimit=1 + await runBlockWithTxs(chain, execution, []) + req = params(method, [bytesToHex(tx.hash())]) + expectRes = (res: any) => { + const msg = 'should return null when past txLookupLimit' + assert.equal(res.body.result, null, msg) + } + await baseRequest(server, req, 200, expectRes, true) // pass endOnFinish=true for last test + }) -tape(`${method}: call with 1559 tx`, async (t) => { - const { chain, common, execution, server } = await setupChain( - gethGenesisStartLondon(pow), - 'powLondon', - { txLookupLimit: 0 } - ) + it('call with 1559 tx', async () => { + const { chain, common, execution, server } = await setupChain( + gethGenesisStartLondon(pow), + 'powLondon', + { txLookupLimit: 0 } + ) - // construct tx - const tx = FeeMarketEIP1559Transaction.fromTxData( - { - gasLimit: 2000000, - maxFeePerGas: 975000000, - maxPriorityFeePerGas: 10, - to: '0x0000000000000000000000000000000000000000', - }, - { common } - ).sign(dummy.privKey) + // construct tx + const tx = FeeMarketEIP1559Transaction.fromTxData( + { + gasLimit: 2000000, + maxFeePerGas: 975000000, + maxPriorityFeePerGas: 10, + to: '0x0000000000000000000000000000000000000000', + }, + { common } + ).sign(dummy.privKey) - await runBlockWithTxs(chain, execution, [tx]) + await runBlockWithTxs(chain, execution, [tx]) - // get the tx - let req = params(method, [bytesToHex(tx.hash())]) - let expectRes = (res: any) => { - const msg = 'should return the correct tx type' - t.equal(res.body.result.type, '0x2', msg) - } - await baseRequest(t, server, req, 200, expectRes, false) + // get the tx + let req = params(method, [bytesToHex(tx.hash())]) + let expectRes = (res: any) => { + const msg = 'should return the correct tx type' + assert.equal(res.body.result.type, '0x2', msg) + } + await baseRequest(server, req, 200, expectRes, false) - // run some blocks to ensure tx hash index is not cleaned up when txLookupLimit=0 - await runBlockWithTxs(chain, execution, []) - await runBlockWithTxs(chain, execution, []) - await runBlockWithTxs(chain, execution, []) - req = params(method, [bytesToHex(tx.hash())]) - expectRes = (res: any) => { - const msg = 'should return the correct tx when txLookupLimit=0' - t.equal(res.body.result.hash, bytesToHex(tx.hash()), msg) - } - await baseRequest(t, server, req, 200, expectRes, true) // pass endOnFinish=true for last test -}) + // run some blocks to ensure tx hash index is not cleaned up when txLookupLimit=0 + await runBlockWithTxs(chain, execution, []) + await runBlockWithTxs(chain, execution, []) + await runBlockWithTxs(chain, execution, []) + req = params(method, [bytesToHex(tx.hash())]) + expectRes = (res: any) => { + const msg = 'should return the correct tx when txLookupLimit=0' + assert.equal(res.body.result.hash, bytesToHex(tx.hash()), msg) + } + await baseRequest(server, req, 200, expectRes, true) // pass endOnFinish=true for last test + }) -tape(`${method}: call with unknown tx hash`, async (t) => { - const { server } = await setupChain(pow, 'pow') + it('call with unknown tx hash', async () => { + const { server } = await setupChain(pow, 'pow') - // get a random tx hash - const req = params(method, ['0x89ea5b54111befb936851660a72b686a21bc2fc4889a9a308196ff99d08925a0']) - const expectRes = (res: any) => { - const msg = 'should return null' - t.equal(res.body.result, null, msg) - } - await baseRequest(t, server, req, 200, expectRes) + // get a random tx hash + const req = params(method, [ + '0x89ea5b54111befb936851660a72b686a21bc2fc4889a9a308196ff99d08925a0', + ]) + const expectRes = (res: any) => { + const msg = 'should return null' + assert.equal(res.body.result, null, msg) + } + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/getTransactionCount.spec.ts b/packages/client/test/rpc/eth/getTransactionCount.spec.ts index a28ef716e2..c59a092c99 100644 --- a/packages/client/test/rpc/eth/getTransactionCount.spec.ts +++ b/packages/client/test/rpc/eth/getTransactionCount.spec.ts @@ -4,7 +4,7 @@ import { Chain, Common, Hardfork } from '@ethereumjs/common' import { getGenesis } from '@ethereumjs/genesis' import { LegacyTransaction } from '@ethereumjs/tx' import { Address } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' @@ -16,85 +16,90 @@ const method = 'eth_getTransactionCount' const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) -tape(`${method}: call with valid arguments`, async (t) => { - const blockchain = await Blockchain.create({ - common, - validateBlocks: false, - validateConsensus: false, - }) - - const client = createClient({ blockchain, commonChain: common, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) - - const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService - t.notEqual(execution, undefined, 'should have valid execution') - const { vm } = execution - - // since synchronizer.run() is not executed in the mock setup, - // manually run stateManager.generateCanonicalGenesis() - const genesisState = getGenesis(Chain.Mainnet) - await vm.stateManager.generateCanonicalGenesis(genesisState) - - // a genesis address - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') - - // verify nonce is 0 - let req = params(method, [address.toString(), 'latest']) - let expectRes = (res: any) => { - const msg = 'should return the correct nonce (0)' - t.equal(res.body.result, '0x0', msg) - } - await baseRequest(t, server, req, 200, expectRes, false) - - // construct block with tx - const tx = LegacyTransaction.fromTxData({ gasLimit: 53000 }, { common, freeze: false }) - tx.getSenderAddress = () => { - return address - } - const parent = await blockchain.getCanonicalHeadHeader() - const block = Block.fromBlockData( - { - header: { - parentHash: parent.hash(), - number: 1, - gasLimit: 2000000, - }, +describe(method, () => { + it( + 'call with valid arguments', + async () => { + const blockchain = await Blockchain.create({ + common, + validateBlocks: false, + validateConsensus: false, + }) + + const client = createClient({ blockchain, commonChain: common, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) + + const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService + assert.notEqual(execution, undefined, 'should have valid execution') + const { vm } = execution + + // since synchronizer.run() is not executed in the mock setup, + // manually run stateManager.generateCanonicalGenesis() + await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + + // a genesis address + const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + + // verify nonce is 0 + let req = params(method, [address.toString(), 'latest']) + let expectRes = (res: any) => { + const msg = 'should return the correct nonce (0)' + assert.equal(res.body.result, '0x0', msg) + } + await baseRequest(server, req, 200, expectRes, false) + + // construct block with tx + const tx = LegacyTransaction.fromTxData({ gasLimit: 53000 }, { common, freeze: false }) + tx.getSenderAddress = () => { + return address + } + const parent = await blockchain.getCanonicalHeadHeader() + const block = Block.fromBlockData( + { + header: { + parentHash: parent.hash(), + number: 1, + gasLimit: 2000000, + }, + }, + { common, calcDifficultyFromHeader: parent } + ) + block.transactions[0] = tx + + let ranBlock: Block | undefined = undefined + vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) + await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + await vm.blockchain.putBlock(ranBlock!) + + // verify nonce increments after a tx + req = params(method, [address.toString(), 'latest']) + expectRes = (res: any) => { + const msg = 'should return the correct nonce (1)' + assert.equal(res.body.result, '0x1', msg) + } + await baseRequest(server, req, 200, expectRes, false) + + // call with nonexistent account + req = params(method, [`0x${'11'.repeat(20)}`, 'latest']) + expectRes = (res: any) => { + const msg = 'should return 0x0 for nonexistent account' + assert.equal(res.body.result, `0x0`, msg) + } + await baseRequest(server, req, 200, expectRes) }, - { common, calcDifficultyFromHeader: parent } + { timeout: 30000 } ) - block.transactions[0] = tx - - let ranBlock: Block | undefined = undefined - vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - await vm.runBlock({ block, generate: true, skipBlockValidation: true }) - await vm.blockchain.putBlock(ranBlock!) - - // verify nonce increments after a tx - req = params(method, [address.toString(), 'latest']) - expectRes = (res: any) => { - const msg = 'should return the correct nonce (1)' - t.equal(res.body.result, '0x1', msg) - } - await baseRequest(t, server, req, 200, expectRes, false) - - // call with nonexistent account - req = params(method, [`0x${'11'.repeat(20)}`, 'latest']) - expectRes = (res: any) => { - const msg = 'should return 0x0 for nonexistent account' - t.equal(res.body.result, `0x0`, msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) -tape(`${method}: call with unsupported block argument`, async (t) => { - const blockchain = await Blockchain.create() + it('call with unsupported block argument', async () => { + const blockchain = await Blockchain.create() - const client = createClient({ blockchain, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) + const client = createClient({ blockchain, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) - const req = params(method, ['0xccfd725760a68823ff1e062f4cc97e1360e8d997', 'pending']) - const expectRes = checkError(t, INVALID_PARAMS, '"pending" is not yet supported') - await baseRequest(t, server, req, 200, expectRes) + const req = params(method, ['0xccfd725760a68823ff1e062f4cc97e1360e8d997', 'pending']) + const expectRes = checkError(INVALID_PARAMS, '"pending" is not yet supported') + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/getTransactionReceipt.spec.ts b/packages/client/test/rpc/eth/getTransactionReceipt.spec.ts index eeb65187f6..a7fb4dfaa1 100644 --- a/packages/client/test/rpc/eth/getTransactionReceipt.spec.ts +++ b/packages/client/test/rpc/eth/getTransactionReceipt.spec.ts @@ -13,10 +13,9 @@ import { randomBytes, } from '@ethereumjs/util' import * as kzg from 'c-kzg' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' -import * as gethGenesis from '../../../../block/test/testdata/4844-hardfork.json' -import * as pow from '../../testdata/geth-genesis/pow.json' +import pow from '../../testdata/geth-genesis/pow.json' import { baseRequest, dummy, @@ -28,115 +27,120 @@ import { const method = 'eth_getTransactionReceipt' -tape(`${method}: call with legacy tx`, async (t) => { - const { chain, common, execution, server } = await setupChain(pow, 'pow') - - // construct tx - const tx = LegacyTransaction.fromTxData( - { - gasLimit: 2000000, - gasPrice: 100, - to: '0x0000000000000000000000000000000000000000', - }, - { common } - ).sign(dummy.privKey) - - await runBlockWithTxs(chain, execution, [tx]) - - // get the tx - const req = params(method, [bytesToHex(tx.hash())]) - const expectRes = (res: any) => { - const msg = 'should return the correct tx' - t.equal(res.body.result.transactionHash, bytesToHex(tx.hash()), msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) +describe(method, () => { + it('call with legacy tx', async () => { + const { chain, common, execution, server } = await setupChain(pow, 'pow') -tape(`${method}: call with 1559 tx`, async (t) => { - const { chain, common, execution, server } = await setupChain( - gethGenesisStartLondon(pow), - 'powLondon' - ) - - // construct tx - const tx = FeeMarketEIP1559Transaction.fromTxData( - { - gasLimit: 2000000, - maxFeePerGas: 975000000, - maxPriorityFeePerGas: 10, - to: '0x1230000000000000000000000000000000000321', - }, - { common } - ).sign(dummy.privKey) - - await runBlockWithTxs(chain, execution, [tx]) - - // get the tx - const req = params(method, [bytesToHex(tx.hash())]) - const expectRes = (res: any) => { - const msg = 'should return the correct tx' - t.equal(res.body.result.transactionHash, bytesToHex(tx.hash()), msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) + // construct tx + const tx = LegacyTransaction.fromTxData( + { + gasLimit: 2000000, + gasPrice: 100, + to: '0x0000000000000000000000000000000000000000', + }, + { common } + ).sign(dummy.privKey) -tape(`${method}: call with unknown tx hash`, async (t) => { - const { server } = await setupChain(pow, 'pow') + await runBlockWithTxs(chain, execution, [tx]) - // get a random tx hash - const req = params(method, ['0x89ea5b54111befb936851660a72b686a21bc2fc4889a9a308196ff99d08925a0']) - const expectRes = (res: any) => { - const msg = 'should return null' - t.equal(res.body.result, null, msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) + // get the tx + const req = params(method, [bytesToHex(tx.hash())]) + const expectRes = (res: any) => { + const msg = 'should return the correct tx' + assert.equal(res.body.result.transactionHash, bytesToHex(tx.hash()), msg) + } + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: get dataGasUsed/dataGasPrice in blob tx receipt`, async (t) => { - const isBrowser = new Function('try {return this===window;}catch(e){ return false;}') - if (isBrowser() === true) { - t.end() - } else { - try { - // Verified KZG is loaded correctly -- NOOP if throws - initKZG(kzg, __dirname + '/../../../src/trustedSetups/devnet6.txt') - //eslint-disable-next-line - } catch {} - const common = Common.fromGethGenesis(gethGenesis, { - chain: 'customChain', - hardfork: Hardfork.Cancun, - }) - const { chain, execution, server } = await setupChain(gethGenesis, 'customChain') - common.setHardfork(Hardfork.Cancun) - - const blobs = getBlobs('hello world') - const commitments = blobsToCommitments(blobs) - const versionedHashes = commitmentsToVersionedHashes(commitments) - const proofs = blobs.map((blob, ctx) => kzg.computeBlobKzgProof(blob, commitments[ctx])) - const tx = BlobEIP4844Transaction.fromTxData( + it('call with 1559 tx', async () => { + const { chain, common, execution, server } = await setupChain( + gethGenesisStartLondon(pow), + 'powLondon' + ) + + // construct tx + const tx = FeeMarketEIP1559Transaction.fromTxData( { - versionedHashes, - blobs, - kzgCommitments: commitments, - kzgProofs: proofs, - maxFeePerDataGas: 1000000n, - gasLimit: 0xffffn, - maxFeePerGas: 10000000n, - maxPriorityFeePerGas: 1000000n, - to: randomBytes(20), - nonce: 0n, + gasLimit: 2000000, + maxFeePerGas: 975000000, + maxPriorityFeePerGas: 10, + to: '0x1230000000000000000000000000000000000321', }, { common } ).sign(dummy.privKey) - await runBlockWithTxs(chain, execution, [tx], true) + await runBlockWithTxs(chain, execution, [tx]) + // get the tx const req = params(method, [bytesToHex(tx.hash())]) const expectRes = (res: any) => { - t.equal(res.body.result.dataGasUsed, '0x20000', 'receipt has correct data gas usage') - t.equal(res.body.result.dataGasPrice, '0x1', 'receipt has correct data gas price') + const msg = 'should return the correct tx' + assert.equal(res.body.result.transactionHash, bytesToHex(tx.hash()), msg) } + await baseRequest(server, req, 200, expectRes) + }) + + it('call with unknown tx hash', async () => { + const { server } = await setupChain(pow, 'pow') - await baseRequest(t, server, req, 200, expectRes) - } + // get a random tx hash + const req = params(method, [ + '0x89ea5b54111befb936851660a72b686a21bc2fc4889a9a308196ff99d08925a0', + ]) + const expectRes = (res: any) => { + const msg = 'should return null' + assert.equal(res.body.result, null, msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('get dataGasUsed/dataGasPrice in blob tx receipt', async () => { + const isBrowser = new Function('try {return this===window;}catch(e){ return false;}') + if (isBrowser() === true) { + assert.ok(true) + } else { + try { + // Verified KZG is loaded correctly -- NOOP if throws + initKZG(kzg, __dirname + '/../../../src/trustedSetups/devnet6.txt') + //eslint-disable-next-line + } catch {} + const gethGenesis = require('../../../../block/test/testdata/4844-hardfork.json') + const common = Common.fromGethGenesis(gethGenesis, { + chain: 'customChain', + hardfork: Hardfork.Cancun, + }) + const { chain, execution, server } = await setupChain(gethGenesis, 'customChain') + common.setHardfork(Hardfork.Cancun) + + const blobs = getBlobs('hello world') + const commitments = blobsToCommitments(blobs) + const versionedHashes = commitmentsToVersionedHashes(commitments) + const proofs = blobs.map((blob, ctx) => kzg.computeBlobKzgProof(blob, commitments[ctx])) + const tx = BlobEIP4844Transaction.fromTxData( + { + versionedHashes, + blobs, + kzgCommitments: commitments, + kzgProofs: proofs, + maxFeePerDataGas: 1000000n, + gasLimit: 0xffffn, + maxFeePerGas: 10000000n, + maxPriorityFeePerGas: 1000000n, + to: randomBytes(20), + nonce: 0n, + }, + { common } + ).sign(dummy.privKey) + + await runBlockWithTxs(chain, execution, [tx], true) + + const req = params(method, [bytesToHex(tx.hash())]) + const expectRes = (res: any) => { + assert.equal(res.body.result.dataGasUsed, '0x20000', 'receipt has correct data gas usage') + assert.equal(res.body.result.dataGasPrice, '0x1', 'receipt has correct data gas price') + } + + await baseRequest(server, req, 200, expectRes) + } + }) }) diff --git a/packages/client/test/rpc/eth/getUncleCountByBlockNumber.spec.ts b/packages/client/test/rpc/eth/getUncleCountByBlockNumber.spec.ts index e8f62ce842..9223c9885d 100644 --- a/packages/client/test/rpc/eth/getUncleCountByBlockNumber.spec.ts +++ b/packages/client/test/rpc/eth/getUncleCountByBlockNumber.spec.ts @@ -1,4 +1,4 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' @@ -24,26 +24,28 @@ function createChain() { const method = 'eth_getUncleCountByBlockNumber' -tape(`${method}: call with valid arguments`, async (t) => { - const mockUncleCount = 3 +describe(method, () => { + it('call with valid arguments', async () => { + const mockUncleCount = 3 - const manager = createManager(createClient({ chain: createChain() })) - const server = startRPC(manager.getMethods()) + const manager = createManager(createClient({ chain: createChain() })) + const server = startRPC(manager.getMethods()) - const req = params(method, ['0x1']) - const expectRes = (res: any) => { - const msg = 'should return the correct number' - t.equal(res.body.result, mockUncleCount, msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, ['0x1']) + const expectRes = (res: any) => { + const msg = 'should return the correct number' + assert.equal(res.body.result, mockUncleCount, msg) + } + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with invalid block number`, async (t) => { - const manager = createManager(createClient({ chain: createChain() })) - const server = startRPC(manager.getMethods()) + it('call with invalid block number', async () => { + const manager = createManager(createClient({ chain: createChain() })) + const server = startRPC(manager.getMethods()) - const req = params(method, ['0x5a']) + const req = params(method, ['0x5a']) - const expectRes = checkError(t, INVALID_PARAMS, 'specified block greater than current height') - await baseRequest(t, server, req, 200, expectRes) + const expectRes = checkError(INVALID_PARAMS, 'specified block greater than current height') + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/protocolVersion.spec.ts b/packages/client/test/rpc/eth/protocolVersion.spec.ts index 842566ba09..377b73b32b 100644 --- a/packages/client/test/rpc/eth/protocolVersion.spec.ts +++ b/packages/client/test/rpc/eth/protocolVersion.spec.ts @@ -1,17 +1,19 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { baseRequest, baseSetup, params } from '../helpers' const method = 'eth_protocolVersion' -tape(`${method}: call`, async (t) => { - const { server } = baseSetup() +describe(method, () => { + it('call', async () => { + const { server } = baseSetup() - const req = params(method, []) - const expectRes = (res: any) => { - const responseBlob = res.body - const msg = 'protocol version should be a string' - t.equal(typeof responseBlob.result, 'string', msg) - } - await baseRequest(t, server, req, 200, expectRes) + const req = params(method, []) + const expectRes = (res: any) => { + const responseBlob = res.body + const msg = 'protocol version should be a string' + assert.equal(typeof responseBlob.result, 'string', msg) + } + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/eth/sendRawTransaction.spec.ts b/packages/client/test/rpc/eth/sendRawTransaction.spec.ts index 7bc7780174..5ace09d87d 100644 --- a/packages/client/test/rpc/eth/sendRawTransaction.spec.ts +++ b/packages/client/test/rpc/eth/sendRawTransaction.spec.ts @@ -17,7 +17,7 @@ import { randomBytes, } from '@ethereumjs/util' import * as kzg from 'c-kzg' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INTERNAL_ERROR, INVALID_PARAMS, PARSE_ERROR } from '../../../src/rpc/error-code' import { baseRequest, baseSetup, params } from '../helpers' @@ -27,271 +27,271 @@ import type { FullEthereumService } from '../../../src/service' const method = 'eth_sendRawTransaction' -tape(`${method}: call with valid arguments`, async (t) => { - // Disable stateroot validation in TxPool since valid state root isn't available - const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot - const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy - DefaultStateManager.prototype.setStateRoot = function (): any {} - DefaultStateManager.prototype.shallowCopy = function () { - return this - } - const common = new Common({ chain: Chain.Mainnet }) - common - .hardforks() - .filter((hf) => hf.timestamp !== undefined) - .map((hf) => { - hf.timestamp = undefined - }) - const syncTargetHeight = common.hardforkBlock(Hardfork.London) - const { server, client } = baseSetup({ syncTargetHeight, includeVM: true }) - - // Mainnet EIP-1559 tx - const txData = - '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' - const transaction = FeeMarketEIP1559Transaction.fromSerializedTx(hexToBytes(txData)) - const address = transaction.getSenderAddress() - const vm = (client.services.find((s) => s.name === 'eth') as FullEthereumService).execution.vm - - await vm.stateManager.putAccount(address, new Account()) - const account = await vm.stateManager.getAccount(address) - account!.balance = BigInt('40100000') - await vm.stateManager.putAccount(address, account!) - - const req = params(method, [txData]) - const expectRes = (res: any) => { - const msg = 'should return the correct tx hash' - t.equal( - res.body.result, - '0xd7217a7d3251880051783f305a3536e368c604aa1f1602e6cd107eb7b87129da', - msg - ) - } - await baseRequest(t, server, req, 200, expectRes) - // Restore setStateRoot - DefaultStateManager.prototype.setStateRoot = originalSetStateRoot - DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy -}) +describe(method, () => { + it('call with valid arguments', async () => { + // Disable stateroot validation in TxPool since valid state root isn't available + const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot + const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy + DefaultStateManager.prototype.setStateRoot = function (): any {} + DefaultStateManager.prototype.shallowCopy = function () { + return this + } + const common = new Common({ chain: Chain.Mainnet }) + common + .hardforks() + .filter((hf) => hf.timestamp !== undefined) + .map((hf) => { + hf.timestamp = undefined + }) + const syncTargetHeight = common.hardforkBlock(Hardfork.London) + const { server, client } = baseSetup({ syncTargetHeight, includeVM: true }) + + // Mainnet EIP-1559 tx + const txData = + '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' + const transaction = FeeMarketEIP1559Transaction.fromSerializedTx(hexToBytes(txData)) + const address = transaction.getSenderAddress() + const vm = (client.services.find((s) => s.name === 'eth') as FullEthereumService).execution.vm + + await vm.stateManager.putAccount(address, new Account()) + const account = await vm.stateManager.getAccount(address) + account!.balance = BigInt('40100000') + await vm.stateManager.putAccount(address, account!) + + const req = params(method, [txData]) + const expectRes = (res: any) => { + const msg = 'should return the correct tx hash' + assert.equal( + res.body.result, + '0xd7217a7d3251880051783f305a3536e368c604aa1f1602e6cd107eb7b87129da', + msg + ) + } + await baseRequest(server, req, 200, expectRes) + // Restore setStateRoot + DefaultStateManager.prototype.setStateRoot = originalSetStateRoot + DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy + }) -tape(`${method}: send local tx with gasprice lower than minimum`, async (t) => { - // Disable stateroot validation in TxPool since valid state root isn't available - const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot - DefaultStateManager.prototype.setStateRoot = (): any => {} - const syncTargetHeight = new Common({ chain: Chain.Mainnet }).hardforkBlock(Hardfork.London) - const { server } = baseSetup({ syncTargetHeight, includeVM: true }) - - const transaction = LegacyTransaction.fromTxData({ - gasLimit: 21000, - gasPrice: 0, - nonce: 0, - }).sign(hexToBytes('0x' + '42'.repeat(32))) - - const txData = bytesToHex(transaction.serialize()) - - const req = params(method, [txData]) - const expectRes = (res: any) => { - t.equal( - res.body.result, - '0xf6798d5ed936a464ef4f49dd5a3abe1ad6947364912bd47c5e56781125d44ac3', - 'local tx with lower gasprice than minimum gasprice added to pool' - ) - } - await baseRequest(t, server, req, 200, expectRes) + it('send local tx with gasprice lower than minimum', async () => { + // Disable stateroot validation in TxPool since valid state root isn't available + const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot + DefaultStateManager.prototype.setStateRoot = (): any => {} + const syncTargetHeight = new Common({ chain: Chain.Mainnet }).hardforkBlock(Hardfork.London) + const { server } = baseSetup({ syncTargetHeight, includeVM: true }) + + const transaction = LegacyTransaction.fromTxData({ + gasLimit: 21000, + gasPrice: 0, + nonce: 0, + }).sign(hexToBytes('0x' + '42'.repeat(32))) + + const txData = bytesToHex(transaction.serialize()) + + const req = params(method, [txData]) + const expectRes = (res: any) => { + assert.equal( + res.body.result, + '0xf6798d5ed936a464ef4f49dd5a3abe1ad6947364912bd47c5e56781125d44ac3', + 'local tx with lower gasprice than minimum gasprice added to pool' + ) + } + await baseRequest(server, req, 200, expectRes) + + // Restore setStateRoot + DefaultStateManager.prototype.setStateRoot = originalSetStateRoot + }) - // Restore setStateRoot - DefaultStateManager.prototype.setStateRoot = originalSetStateRoot -}) + it('call with invalid arguments: not enough balance', async () => { + // Disable stateroot validation in TxPool since valid state root isn't available + const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot + DefaultStateManager.prototype.setStateRoot = (): any => {} + const syncTargetHeight = new Common({ chain: Chain.Mainnet }).hardforkBlock(Hardfork.London) + const { server } = baseSetup({ syncTargetHeight, includeVM: true }) -tape(`${method}: call with invalid arguments: not enough balance`, async (t) => { - // Disable stateroot validation in TxPool since valid state root isn't available - const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot - DefaultStateManager.prototype.setStateRoot = (): any => {} - const syncTargetHeight = new Common({ chain: Chain.Mainnet }).hardforkBlock(Hardfork.London) - const { server } = baseSetup({ syncTargetHeight, includeVM: true }) + // Mainnet EIP-1559 tx + const txData = + '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' - // Mainnet EIP-1559 tx - const txData = - '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' + const req = params(method, [txData]) + const expectRes = checkError(INVALID_PARAMS, 'insufficient balance') + await baseRequest(server, req, 200, expectRes) - const req = params(method, [txData]) - const expectRes = checkError(t, INVALID_PARAMS, 'insufficient balance') - await baseRequest(t, server, req, 200, expectRes) + // Restore setStateRoot + DefaultStateManager.prototype.setStateRoot = originalSetStateRoot + }) - // Restore setStateRoot - DefaultStateManager.prototype.setStateRoot = originalSetStateRoot -}) + it('call with sync target height not set yet', async () => { + const { server, client } = baseSetup() + client.config.synchronized = false -tape(`${method}: call with sync target height not set yet`, async (t) => { - const { server, client } = baseSetup() - client.config.synchronized = false - - // Mainnet EIP-1559 tx - const txData = - '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' - const req = params(method, [txData]) - - const expectRes = checkError( - t, - INTERNAL_ERROR, - 'client is not aware of the current chain height yet (give sync some more time)' - ) - await baseRequest(t, server, req, 200, expectRes) -}) + // Mainnet EIP-1559 tx + const txData = + '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' + const req = params(method, [txData]) -tape(`${method}: call with invalid tx (wrong chain ID)`, async (t) => { - const syncTargetHeight = new Common({ chain: Chain.Mainnet }).hardforkBlock(Hardfork.London) - const { server } = baseSetup({ syncTargetHeight, includeVM: true }) + const expectRes = checkError( + INTERNAL_ERROR, + 'client is not aware of the current chain height yet (give sync some more time)' + ) + await baseRequest(server, req, 200, expectRes) + }) - // Baikal EIP-1559 tx - const txData = - '0x02f9010a82066a8001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' - const req = params(method, [txData]) + it('call with invalid tx (wrong chain ID)', async () => { + const syncTargetHeight = new Common({ chain: Chain.Mainnet }).hardforkBlock(Hardfork.London) + const { server } = baseSetup({ syncTargetHeight, includeVM: true }) - const expectRes = checkError(t, PARSE_ERROR, 'serialized tx data could not be parsed') - await baseRequest(t, server, req, 200, expectRes) -}) + // Baikal EIP-1559 tx + const txData = + '0x02f9010a82066a8001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' + const req = params(method, [txData]) -tape(`${method}: call with unsigned tx`, async (t) => { - const syncTargetHeight = new Common({ chain: Chain.Mainnet }).hardforkBlock(Hardfork.London) - const { server } = baseSetup({ syncTargetHeight }) - - // Mainnet EIP-1559 tx - const txData = - '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - const tx = FeeMarketEIP1559Transaction.fromSerializedTx(hexToBytes(txData), { - common, - freeze: false, + const expectRes = checkError(PARSE_ERROR, 'serialized tx data could not be parsed') + await baseRequest(server, req, 200, expectRes) }) - ;(tx as any).v = undefined - ;(tx as any).r = undefined - ;(tx as any).s = undefined - const txHex = bytesToHex(tx.serialize()) - const req = params(method, [txHex]) - - const expectRes = checkError(t, INVALID_PARAMS, 'tx needs to be signed') - await baseRequest(t, server, req, 200, expectRes) -}) -tape(`${method}: call with no peers`, async (t) => { - // Disable stateroot validation in TxPool since valid state root isn't available - const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot - DefaultStateManager.prototype.setStateRoot = (): any => {} - const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy - DefaultStateManager.prototype.shallowCopy = function () { - return this - } - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - - const syncTargetHeight = common.hardforkBlock(Hardfork.London) - const { server, client } = baseSetup({ - commonChain: common, - syncTargetHeight, - includeVM: true, - noPeers: true, + it('call with unsigned tx', async () => { + const syncTargetHeight = new Common({ chain: Chain.Mainnet }).hardforkBlock(Hardfork.London) + const { server } = baseSetup({ syncTargetHeight }) + + // Mainnet EIP-1559 tx + const txData = + '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' + const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const tx = FeeMarketEIP1559Transaction.fromSerializedTx(hexToBytes(txData), { + common, + freeze: false, + }) + ;(tx as any).v = undefined + ;(tx as any).r = undefined + ;(tx as any).s = undefined + const txHex = bytesToHex(tx.serialize()) + const req = params(method, [txHex]) + + const expectRes = checkError(INVALID_PARAMS, 'tx needs to be signed') + await baseRequest(server, req, 200, expectRes) }) - // Mainnet EIP-1559 tx - const txData = - '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' - const transaction = FeeMarketEIP1559Transaction.fromSerializedTx(hexToBytes(txData)) - const address = transaction.getSenderAddress() - const vm = (client.services.find((s) => s.name === 'eth') as FullEthereumService).execution.vm + it('call with no peers', async () => { + // Disable stateroot validation in TxPool since valid state root isn't available + const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot + DefaultStateManager.prototype.setStateRoot = (): any => {} + const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy + DefaultStateManager.prototype.shallowCopy = function () { + return this + } + const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + + const syncTargetHeight = common.hardforkBlock(Hardfork.London) + const { server, client } = baseSetup({ + commonChain: common, + syncTargetHeight, + includeVM: true, + noPeers: true, + }) - await vm.stateManager.putAccount(address, new Account()) - const account = await vm.stateManager.getAccount(address) - account!.balance = BigInt('40100000') - await vm.stateManager.putAccount(address, account!) + // Mainnet EIP-1559 tx + const txData = + '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' + const transaction = FeeMarketEIP1559Transaction.fromSerializedTx(hexToBytes(txData)) + const address = transaction.getSenderAddress() + const vm = (client.services.find((s) => s.name === 'eth') as FullEthereumService).execution.vm - const req = params(method, [txData]) + await vm.stateManager.putAccount(address, new Account()) + const account = await vm.stateManager.getAccount(address) + account!.balance = BigInt('40100000') + await vm.stateManager.putAccount(address, account!) - const expectRes = checkError(t, INTERNAL_ERROR, 'no peer connection available') - await baseRequest(t, server, req, 200, expectRes) + const req = params(method, [txData]) - // Restore setStateRoot - DefaultStateManager.prototype.setStateRoot = originalSetStateRoot - DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy -}) + const expectRes = checkError(INTERNAL_ERROR, 'no peer connection available') + await baseRequest(server, req, 200, expectRes) -tape('blob EIP 4844 transaction', async (t) => { - t.plan(2) - // Disable stateroot validation in TxPool since valid state root isn't available - const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot - DefaultStateManager.prototype.setStateRoot = (): any => {} - const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy - DefaultStateManager.prototype.shallowCopy = function () { - return this - } - // Disable block header consensus format validation - const consensusFormatValidation = (BlockHeader as any).prototype._consensusFormatValidation - ;(BlockHeader as any).prototype._consensusFormatValidation = (): any => {} - try { - initKZG(kzg, __dirname + '/../../../src/trustedSetups/devnet6.txt') - // eslint-disable-next-line - } catch {} - const gethGenesis = require('../../../../block/test/testdata/4844-hardfork.json') - const common = Common.fromGethGenesis(gethGenesis, { - chain: 'customChain', - hardfork: Hardfork.Cancun, + // Restore setStateRoot + DefaultStateManager.prototype.setStateRoot = originalSetStateRoot + DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy }) - common.setHardfork(Hardfork.Cancun) - const { server, client } = baseSetup({ - commonChain: common, - includeVM: true, - syncTargetHeight: 100n, + + it('blob EIP 4844 transaction', async () => { + // Disable stateroot validation in TxPool since valid state root isn't available + const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot + DefaultStateManager.prototype.setStateRoot = (): any => {} + const originalStateManagerCopy = DefaultStateManager.prototype.shallowCopy + DefaultStateManager.prototype.shallowCopy = function () { + return this + } + // Disable block header consensus format validation + const consensusFormatValidation = BlockHeader.prototype['_consensusFormatValidation'] + BlockHeader.prototype['_consensusFormatValidation'] = (): any => {} + try { + initKZG(kzg, __dirname + '/../../../src/trustedSetups/devnet6.txt') + // eslint-disable-next-line + } catch {} + const gethGenesis = require('../../../../block/test/testdata/4844-hardfork.json') + const common = Common.fromGethGenesis(gethGenesis, { + chain: 'customChain', + hardfork: Hardfork.Cancun, + }) + common.setHardfork(Hardfork.Cancun) + const { server, client } = baseSetup({ + commonChain: common, + includeVM: true, + syncTargetHeight: 100n, + }) + const blobs = getBlobs('hello world') + const commitments = blobsToCommitments(blobs) + const versionedHashes = commitmentsToVersionedHashes(commitments) + const proofs = blobs.map((blob, ctx) => kzg.computeBlobKzgProof(blob, commitments[ctx])) + const pk = randomBytes(32) + const tx = BlobEIP4844Transaction.fromTxData( + { + versionedHashes, + blobs, + kzgCommitments: commitments, + kzgProofs: proofs, + maxFeePerDataGas: 1000000n, + gasLimit: 0xffffn, + maxFeePerGas: 10000000n, + maxPriorityFeePerGas: 1000000n, + to: randomBytes(20), + }, + { common } + ).sign(pk) + + const replacementTx = BlobEIP4844Transaction.fromTxData( + { + versionedHashes, + blobs, + kzgCommitments: commitments, + kzgProofs: proofs, + maxFeePerDataGas: 1000000n, + gasLimit: 0xfffffn, + maxFeePerGas: 100000000n, + maxPriorityFeePerGas: 10000000n, + to: randomBytes(20), + }, + { common } + ).sign(pk) + const vm = (client.services.find((s) => s.name === 'eth') as FullEthereumService).execution.vm + await vm.stateManager.putAccount(tx.getSenderAddress(), new Account()) + const account = await vm.stateManager.getAccount(tx.getSenderAddress()) + account!.balance = BigInt(0xfffffffffffff) + await vm.stateManager.putAccount(tx.getSenderAddress(), account!) + + const req = params(method, [bytesToHex(tx.serializeNetworkWrapper())]) + const req2 = params(method, [bytesToHex(replacementTx.serializeNetworkWrapper())]) + const expectRes = (res: any) => { + assert.equal(res.body.error, undefined, 'initial blob transaction accepted') + } + + const expectRes2 = checkError(INVALID_PARAMS, 'replacement data gas too low') + + await baseRequest(server, req, 200, expectRes, false) + await baseRequest(server, req2, 200, expectRes2) + + // Restore stubbed out functionality + DefaultStateManager.prototype.setStateRoot = originalSetStateRoot + DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy + BlockHeader.prototype['_consensusFormatValidation'] = consensusFormatValidation }) - const blobs = getBlobs('hello world') - const commitments = blobsToCommitments(blobs) - const versionedHashes = commitmentsToVersionedHashes(commitments) - const proofs = blobs.map((blob, ctx) => kzg.computeBlobKzgProof(blob, commitments[ctx])) - const pk = randomBytes(32) - const tx = BlobEIP4844Transaction.fromTxData( - { - versionedHashes, - blobs, - kzgCommitments: commitments, - kzgProofs: proofs, - maxFeePerDataGas: 1000000n, - gasLimit: 0xffffn, - maxFeePerGas: 10000000n, - maxPriorityFeePerGas: 1000000n, - to: randomBytes(20), - }, - { common } - ).sign(pk) - - const replacementTx = BlobEIP4844Transaction.fromTxData( - { - versionedHashes, - blobs, - kzgCommitments: commitments, - kzgProofs: proofs, - maxFeePerDataGas: 1000000n, - gasLimit: 0xfffffn, - maxFeePerGas: 100000000n, - maxPriorityFeePerGas: 10000000n, - to: randomBytes(20), - }, - { common } - ).sign(pk) - const vm = (client.services.find((s) => s.name === 'eth') as FullEthereumService).execution.vm - await vm.stateManager.putAccount(tx.getSenderAddress(), new Account()) - const account = await vm.stateManager.getAccount(tx.getSenderAddress()) - account!.balance = BigInt(0xfffffffffffff) - await vm.stateManager.putAccount(tx.getSenderAddress(), account!) - - const req = params(method, [bytesToHex(tx.serializeNetworkWrapper())]) - const req2 = params(method, [bytesToHex(replacementTx.serializeNetworkWrapper())]) - const expectRes = (res: any) => { - t.equal(res.body.error, undefined, 'initial blob transaction accepted') - } - - const expectRes2 = checkError(t, INVALID_PARAMS, 'replacement data gas too low') - - await baseRequest(t, server, req, 200, expectRes, false) - await baseRequest(t, server, req2, 200, expectRes2) - - // Restore stubbed out functionality - DefaultStateManager.prototype.setStateRoot = originalSetStateRoot - DefaultStateManager.prototype.shallowCopy = originalStateManagerCopy - ;(BlockHeader as any).prototype._consensusFormatValidation = consensusFormatValidation }) diff --git a/packages/client/test/rpc/eth/syncing.spec.ts b/packages/client/test/rpc/eth/syncing.spec.ts index fe8d737859..46ce00d71a 100644 --- a/packages/client/test/rpc/eth/syncing.spec.ts +++ b/packages/client/test/rpc/eth/syncing.spec.ts @@ -1,5 +1,5 @@ -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { INTERNAL_ERROR } from '../../../src/rpc/error-code' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' @@ -9,88 +9,89 @@ import type { FullSynchronizer } from '../../../src/sync' const method = 'eth_syncing' -tape(`${method}: should return false when the client is synchronized`, async (t) => { - const client = createClient() - const manager = createManager(client) - const server = startRPC(manager.getMethods()) - - client.config.synchronized = false - t.equals(client.config.synchronized, false, 'not synchronized yet') - client.config.synchronized = true - t.equals(client.config.synchronized, true, 'synchronized') - - const req = params(method, []) - const expectRes = (res: any) => { - const msg = 'should return false' - t.equal(res.body.result, false, msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) - -tape(`${method}: should return no peer available error`, async (t) => { - const client = createClient({ noPeers: true }) - const manager = createManager(client) - const rpcServer = startRPC(manager.getMethods()) - - client.config.synchronized = false - t.equals(client.config.synchronized, false, 'not synchronized yet') - - const req = params(method, []) - - const expectRes = checkError(t, INTERNAL_ERROR, 'no peer available for synchronization') - await baseRequest(t, rpcServer, req, 200, expectRes) -}) - -tape(`${method}: should return highest block header unavailable error`, async (t) => { - const client = createClient() - const manager = createManager(client) - const rpcServer = startRPC(manager.getMethods()) - - const synchronizer = client.services[0].synchronizer! - synchronizer.best = td.func() - td.when(synchronizer.best()).thenResolve('peer') - - client.config.synchronized = false - t.equals(client.config.synchronized, false, 'not synchronized yet') - - const req = params(method, []) - - const expectRes = checkError(t, INTERNAL_ERROR, 'highest block header unavailable') - await baseRequest(t, rpcServer, req, 200, expectRes) -}) - -tape(`${method}: should return syncing status object when unsynced`, async (t) => { - const client = createClient() - const manager = createManager(client) - const rpcServer = startRPC(manager.getMethods()) - - const synchronizer = client.services[0].synchronizer as FullSynchronizer - synchronizer.best = td.func() - synchronizer.latest = td.func() - td.when(synchronizer.best()).thenResolve('peer') - td.when(synchronizer.latest('peer' as any)).thenResolve({ number: BigInt(2) }) - - client.config.synchronized = false - t.equals(client.config.synchronized, false, 'not synchronized yet') - - const req = params(method, []) - const expectRes = (res: any) => { - const msg = 'should return syncing status object' - if ( - res.body.result.startingBlock === '0x0' && - res.body.result.currentBlock === '0x0' && - res.body.result.highestBlock === '0x2' - ) { - t.pass(msg) - } else { - t.fail(msg) +describe(method, () => { + it('should return false when the client is synchronized', async () => { + const client = createClient() + const manager = createManager(client) + const server = startRPC(manager.getMethods()) + + client.config.synchronized = false + assert.equal(client.config.synchronized, false, 'not synchronized yet') + client.config.synchronized = true + assert.equal(client.config.synchronized, true, 'synchronized') + + const req = params(method, []) + const expectRes = (res: any) => { + const msg = 'should return false' + assert.equal(res.body.result, false, msg) + } + await baseRequest(server, req, 200, expectRes) + }) + + it('should return no peer available error', async () => { + const client = createClient({ noPeers: true }) + const manager = createManager(client) + const rpcServer = startRPC(manager.getMethods()) + + client.config.synchronized = false + assert.equal(client.config.synchronized, false, 'not synchronized yet') + + const req = params(method, []) + + const expectRes = checkError(INTERNAL_ERROR, 'no peer available for synchronization') + await baseRequest(rpcServer, req, 200, expectRes) + }) + + it('should return highest block header unavailable error', async () => { + const client = createClient() + const manager = createManager(client) + const rpcServer = startRPC(manager.getMethods()) + + const synchronizer = client.services[0].synchronizer! + synchronizer.best = td.func() + td.when(synchronizer.best()).thenResolve('peer') + + client.config.synchronized = false + assert.equal(client.config.synchronized, false, 'not synchronized yet') + + const req = params(method, []) + + const expectRes = checkError(INTERNAL_ERROR, 'highest block header unavailable') + await baseRequest(rpcServer, req, 200, expectRes) + }) + + it('should return syncing status object when unsynced', async () => { + const client = createClient() + const manager = createManager(client) + const rpcServer = startRPC(manager.getMethods()) + + const synchronizer = client.services[0].synchronizer as FullSynchronizer + synchronizer.best = td.func() + synchronizer.latest = td.func() + td.when(synchronizer.best()).thenResolve('peer') + td.when(synchronizer.latest('peer' as any)).thenResolve({ number: BigInt(2) }) + + client.config.synchronized = false + assert.equal(client.config.synchronized, false, 'not synchronized yet') + + const req = params(method, []) + const expectRes = (res: any) => { + const msg = 'should return syncing status object' + if ( + res.body.result.startingBlock === '0x0' && + res.body.result.currentBlock === '0x0' && + res.body.result.highestBlock === '0x2' + ) { + assert.ok(true, msg) + } else { + assert.fail(msg) + } } - } - await baseRequest(t, rpcServer, req, 200, expectRes) -}) + await baseRequest(rpcServer, req, 200, expectRes) + }) -tape('should reset td', (t) => { - td.reset() - t.end() + it('should reset td', () => { + td.reset() + }) }) diff --git a/packages/client/test/rpc/helpers.ts b/packages/client/test/rpc/helpers.ts index e5ef810d79..157964e82f 100644 --- a/packages/client/test/rpc/helpers.ts +++ b/packages/client/test/rpc/helpers.ts @@ -5,6 +5,7 @@ import { getGenesis } from '@ethereumjs/genesis' import { Address, KECCAK256_RLP, hexToBytes, parseGethGenesisState } from '@ethereumjs/util' import { Server as RPCServer } from 'jayson/promise' import { MemoryLevel } from 'memory-level' +import { assert } from 'vitest' import { Chain } from '../../src/blockchain/chain' import { Config } from '../../src/config' @@ -24,7 +25,6 @@ import type { TypedTransaction } from '@ethereumjs/tx' import type { GenesisState } from '@ethereumjs/util' import type { IncomingMessage } from 'connect' import type { HttpServer } from 'jayson/promise' -import type * as tape from 'tape' const request = require('supertest') @@ -48,7 +48,7 @@ type createClientArgs = { } export function startRPC( methods: any, - opts: StartRPCOpts = { port: 3001 }, + opts: StartRPCOpts = { port: 0 }, withEngineMiddleware?: WithEngineMiddleware ) { const { port, wsServer } = opts @@ -193,7 +193,6 @@ export function params(method: string, params: Array = []) { } export async function baseRequest( - t: tape.Test, server: HttpServer, req: Object, expect: number, @@ -212,11 +211,11 @@ export async function baseRequest( closeRPC(server) } if (endOnFinish) { - t.end() + assert.ok(true) } } catch (err) { closeRPC(server) - t.end(err) + assert.notOk(err) } } diff --git a/packages/client/test/rpc/net/listening.spec.ts b/packages/client/test/rpc/net/listening.spec.ts index 514049f185..f7ac3421f3 100644 --- a/packages/client/test/rpc/net/listening.spec.ts +++ b/packages/client/test/rpc/net/listening.spec.ts @@ -1,35 +1,37 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' const method = 'net_listening' -tape(`${method}: call while listening`, async (t) => { - const manager = createManager(createClient({ opened: true })) - const server = startRPC(manager.getMethods()) +describe(method, () => { + it('call while listening', async () => { + const manager = createManager(createClient({ opened: true })) + const server = startRPC(manager.getMethods()) - const req = params(method, []) - const expectRes = (res: any) => { - const { result } = res.body - let msg = 'result should be a boolean' - t.equal(typeof result, 'boolean', msg) - msg = 'should be listening' - t.equal(result, true, msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, []) + const expectRes = (res: any) => { + const { result } = res.body + let msg = 'result should be a boolean' + assert.equal(typeof result, 'boolean', msg) + msg = 'should be listening' + assert.equal(result, true, msg) + } + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call while not listening`, async (t) => { - const manager = createManager(createClient({ opened: false })) - const server = startRPC(manager.getMethods()) + it('call while not listening', async () => { + const manager = createManager(createClient({ opened: false })) + const server = startRPC(manager.getMethods()) - const req = params(method, []) - const expectRes = (res: any) => { - const { result } = res.body - let msg = 'result should be a boolean' - t.equal(typeof result, 'boolean', msg) - msg = 'should not be listening' - t.equal(result, false, msg) - } - await baseRequest(t, server, req, 200, expectRes) + const req = params(method, []) + const expectRes = (res: any) => { + const { result } = res.body + let msg = 'result should be a boolean' + assert.equal(typeof result, 'boolean', msg) + msg = 'should not be listening' + assert.equal(result, false, msg) + } + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/net/peerCount.spec.ts b/packages/client/test/rpc/net/peerCount.spec.ts index f58468480f..e04d9312ec 100644 --- a/packages/client/test/rpc/net/peerCount.spec.ts +++ b/packages/client/test/rpc/net/peerCount.spec.ts @@ -1,18 +1,20 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' const method = 'net_peerCount' -tape(`${method}: call`, async (t) => { - const manager = createManager(createClient({ opened: true })) - const server = startRPC(manager.getMethods()) +describe(method, () => { + it('call', async () => { + const manager = createManager(createClient({ opened: true })) + const server = startRPC(manager.getMethods()) - const req = params(method, []) - const expectRes = (res: any) => { - const { result } = res.body - const msg = 'result should be a hex number' - t.equal(result.substring(0, 2), '0x', msg) - } - await baseRequest(t, server, req, 200, expectRes) + const req = params(method, []) + const expectRes = (res: any) => { + const { result } = res.body + const msg = 'result should be a hex number' + assert.equal(result.substring(0, 2), '0x', msg) + } + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/net/version.spec.ts b/packages/client/test/rpc/net/version.spec.ts index 6ae2cdd633..4dad1f989c 100644 --- a/packages/client/test/rpc/net/version.spec.ts +++ b/packages/client/test/rpc/net/version.spec.ts @@ -1,7 +1,7 @@ import { BlockHeader } from '@ethereumjs/block' import { Chain, Common } from '@ethereumjs/common' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { baseRequest, baseSetup, createClient, createManager, params, startRPC } from '../helpers' @@ -9,56 +9,60 @@ const method = 'net_version' const originalValidate = (BlockHeader as any).prototype._consensusFormatValidation -function compareResult(t: any, result: any, chainId: any) { +function compareResult(result: any, chainId: any) { let msg = 'result should be a string' - t.equal(typeof result, 'string', msg) + assert.equal(typeof result, 'string', msg) msg = 'result string should not be empty' - t.notEqual(result.length, 0, msg) + assert.notEqual(result.length, 0, msg) msg = `should be the correct chain ID (expected: ${chainId}, received: ${result})` - t.equal(result, chainId, msg) + assert.equal(result, chainId, msg) } -tape(`${method}: call on mainnnet`, async (t) => { - const manager = createManager( - createClient({ opened: true, commonChain: new Common({ chain: Chain.Goerli }) }) - ) - const server = startRPC(manager.getMethods()) +describe(method, () => { + it('call on mainnet', async () => { + const { server } = baseSetup() - const req = params(method, []) - const expectRes = (res: any) => { - const { result } = res.body - compareResult(t, result, '5') - } - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, []) + const expectRes = (res: any) => { + const { result } = res.body + compareResult(result, '1') + } + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call on mainnet`, async (t) => { - const { server } = baseSetup() + it('call on sepolia', async () => { + // Stub out block consensusFormatValidation checks + BlockHeader.prototype['_consensusFormatValidation'] = td.func() + const manager = createManager( + createClient({ opened: true, commonChain: new Common({ chain: Chain.Sepolia }) }) + ) + const server = startRPC(manager.getMethods()) - const req = params(method, []) - const expectRes = (res: any) => { - const { result } = res.body - compareResult(t, result, '1') - } - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, []) + const expectRes = (res: any) => { + const { result } = res.body + compareResult(result, '11155111') + } + await baseRequest(server, req, 200, expectRes) + td.reset() + }) -tape(`${method}: call on goerli`, async (t) => { - const manager = createManager( - createClient({ opened: true, commonChain: new Common({ chain: Chain.Goerli }) }) - ) - const server = startRPC(manager.getMethods()) + it('call on goerli', async () => { + const manager = createManager( + createClient({ opened: true, commonChain: new Common({ chain: Chain.Goerli }) }) + ) + const server = startRPC(manager.getMethods()) - const req = params(method, []) - const expectRes = (res: any) => { - const { result } = res.body - compareResult(t, result, '5') - } - await baseRequest(t, server, req, 200, expectRes) -}) + const req = params(method, []) + const expectRes = (res: any) => { + const { result } = res.body + compareResult(result, '5') + } + await baseRequest(server, req, 200, expectRes) + }) -tape('reset TD', (t) => { - ;(BlockHeader as any).prototype._consensusFormatValidation = originalValidate - td.reset() - t.end() + it('reset TD', () => { + BlockHeader.prototype['_consensusFormatValidation'] = originalValidate + td.reset() + }) }) diff --git a/packages/client/test/rpc/rpc.spec.ts b/packages/client/test/rpc/rpc.spec.ts index e399d7924b..91275fcabb 100644 --- a/packages/client/test/rpc/rpc.spec.ts +++ b/packages/client/test/rpc/rpc.spec.ts @@ -1,6 +1,6 @@ import { randomBytes } from '@ethereumjs/util' import { encode } from 'jwt-simple' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { METHOD_NOT_FOUND } from '../../src/rpc/error-code' @@ -12,212 +12,214 @@ const request = require('supertest') const jwtSecret = randomBytes(32) -tape('call JSON-RPC without Content-Type header', (t) => { - const server = startRPC({}) - const req = 'plaintext' - - request(server) - .post('/') - .send(req) - .expect(415) - .end((err: any) => { - closeRPC(server) - t.end(err) - }) -}) - -tape('call JSON-RPC auth protected server without any auth headers', (t) => { - const server = startRPC({}, undefined, { jwtSecret }) - const req = 'plaintext' - - request(server) - .post('/') - .send(req) - .expect(401) - .end((err: any) => { - closeRPC(server) - t.end(err) - }) -}) +describe('JSON-RPC call', () => { + it('without Content-Type header', () => { + const server = startRPC({}) + const req = 'plaintext' + + request(server) + .post('/') + .send(req) + .expect(415) + .end((err: any) => { + closeRPC(server) + assert.notOk(err) + }) + }) -tape('call JSON-RPC auth protected server with invalid token', (t) => { - const server = startRPC({}, undefined, { jwtSecret }) - const req = 'plaintext' - - request(server) - .post('/') - .set('Authorization', 'Bearer invalidtoken') - .send(req) - .expect(401) - .end((err: any) => { - closeRPC(server) - t.end(err) - }) -}) + it('auth protected server without any auth headers', () => { + const server = startRPC({}, undefined, { jwtSecret }) + const req = 'plaintext' + + request(server) + .post('/') + .send(req) + .expect(401) + .end((err: any) => { + closeRPC(server) + assert.notOk(err) + }) + }) -tape('call JSON-RPC auth protected server with an invalid algorithm token', (t) => { - const server = startRPC({}, undefined, { jwtSecret }) - const req = 'plaintext' - const claims = { iat: Math.floor(new Date().getTime() / 1000) } - const token = encode(claims, jwtSecret as never as string, 'HS512' as TAlgorithm) - - request(server) - .post('/') - .set('Authorization', `Bearer ${token}`) - .send(req) - .expect(401) - .end((err: any) => { - closeRPC(server) - t.end(err) - }) -}) + it('auth protected server with invalid token', () => { + const server = startRPC({}, undefined, { jwtSecret }) + const req = 'plaintext' + + request(server) + .post('/') + .set('Authorization', 'Bearer invalidtoken') + .send(req) + .expect(401) + .end((err: any) => { + closeRPC(server) + assert.notOk(err) + }) + }) -tape('call JSON-RPC auth protected server with a valid token', (t) => { - const server = startRPC({}, undefined, { jwtSecret }) - const req = 'plaintext' - const claims = { iat: Math.floor(new Date().getTime() / 1000) } - const token = encode(claims, jwtSecret as never as string, 'HS256' as TAlgorithm) - - request(server) - .post('/') - .set('Authorization', `Bearer ${token}`) - .send(req) - .expect(415) - .end((err: any) => { - closeRPC(server) - t.end(err) - }) -}) + it('auth protected server with an invalid algorithm token', () => { + const server = startRPC({}, undefined, { jwtSecret }) + const req = 'plaintext' + const claims = { iat: Math.floor(new Date().getTime() / 1000) } + const token = encode(claims, jwtSecret as never as string, 'HS512' as TAlgorithm) + + request(server) + .post('/') + .set('Authorization', `Bearer ${token}`) + .send(req) + .expect(401) + .end((err: any) => { + closeRPC(server) + assert.notOk(err) + }) + }) -tape('call JSON-RPC auth protected server with a valid but stale token', (t) => { - const server = startRPC({}, undefined, { jwtSecret }) - const req = 'plaintext' - const claims = { iat: Math.floor(new Date().getTime() / 1000 - 61) } - const token = encode(claims, jwtSecret as never as string, 'HS256' as TAlgorithm) - - request(server) - .post('/') - .set('Authorization', `Bearer ${token}`) - .send(req) - .expect(401) - .end((err: any) => { - closeRPC(server) - t.end(err) - }) -}) + it('auth protected server with a valid token', () => { + const server = startRPC({}, undefined, { jwtSecret }) + const req = 'plaintext' + const claims = { iat: Math.floor(new Date().getTime() / 1000) } + const token = encode(claims, jwtSecret as never as string, 'HS256' as TAlgorithm) + + request(server) + .post('/') + .set('Authorization', `Bearer ${token}`) + .send(req) + .expect(415) + .end((err: any) => { + closeRPC(server) + assert.notOk(err) + }) + }) -tape('call JSON-RPC without Content-Type header', (t) => { - const server = startRPC({}, undefined, { jwtSecret }) - const req = 'plaintext' - - request(server) - .post('/') - .send(req) - .expect(401) - .end((err: any) => { - closeRPC(server) - t.end(err) - }) -}) + it('auth protected server with a valid but stale token', () => { + const server = startRPC({}, undefined, { jwtSecret }) + const req = 'plaintext' + const claims = { iat: Math.floor(new Date().getTime() / 1000 - 61) } + const token = encode(claims, jwtSecret as never as string, 'HS256' as TAlgorithm) + + request(server) + .post('/') + .set('Authorization', `Bearer ${token}`) + .send(req) + .expect(401) + .end((err: any) => { + closeRPC(server) + assert.notOk(err) + }) + }) -tape('call JSON RPC with nonexistent method', (t) => { - const server = startRPC({}) - const req = { - jsonrpc: '2.0', - method: 'METHOD_DOES_NOT_EXIST', - params: ['0x1', true], - id: 1, - } - - request(server) - .post('/') - .set('Content-Type', 'application/json') - .send(req) - .expect((res: any) => { - if (res.body.error === undefined) { - throw new Error('should return an error object') - } - if (res.body.error.code !== METHOD_NOT_FOUND) { - throw new Error(`should have an error code ${METHOD_NOT_FOUND}`) - } - }) - .end((err: any) => { - closeRPC(server) - t.end(err) - }) -}) + it('without Content-Type header', () => { + const server = startRPC({}, undefined, { jwtSecret }) + const req = 'plaintext' + + request(server) + .post('/') + .send(req) + .expect(401) + .end((err: any) => { + closeRPC(server) + assert.notOk(err) + }) + }) -tape('call JSON-RPC auth protected server with unprotected method without token', (t) => { - const server = startRPC({}, undefined, { - jwtSecret, - unlessFn: (req: any) => req.body.method.includes('unprotected_'), + it('with nonexistent method', () => { + const server = startRPC({}) + const req = { + jsonrpc: '2.0', + method: 'METHOD_DOES_NOT_EXIST', + params: ['0x1', true], + id: 1, + } + + request(server) + .post('/') + .set('Content-Type', 'application/json') + .send(req) + .expect((res: any) => { + if (res.body.error === undefined) { + throw new Error('should return an error object') + } + if (res.body.error.code !== METHOD_NOT_FOUND) { + throw new Error(`should have an error code ${METHOD_NOT_FOUND}`) + } + }) + .end((err: any) => { + closeRPC(server) + assert.notOk(err) + }) }) - const req = { - jsonrpc: '2.0', - method: 'unprotected_METHOD_DOES_NOT_EXIST', - params: ['0x1', true], - id: 1, - } - - request(server) - .post('/') - .set('Content-Type', 'application/json') - .send(req) - .expect(200) - .end((err: any) => { - closeRPC(server) - t.end(err) + it('auth protected server with unprotected method without token', () => { + const server = startRPC({}, undefined, { + jwtSecret, + unlessFn: (req: any) => req.body.method.includes('unprotected_'), }) -}) -tape('call JSON-RPC auth protected server with protected method without token', (t) => { - const server = startRPC({}, undefined, { - jwtSecret, - unlessFn: (req: any) => !(req.body.method as string).includes('protected_'), + const req = { + jsonrpc: '2.0', + method: 'unprotected_METHOD_DOES_NOT_EXIST', + params: ['0x1', true], + id: 1, + } + + request(server) + .post('/') + .set('Content-Type', 'application/json') + .send(req) + .expect(200) + .end((err: any) => { + closeRPC(server) + assert.notOk(err) + }) }) - const req = { - jsonrpc: '2.0', - method: 'protected_METHOD_DOES_NOT_EXIST', - params: ['0x1', true], - id: 1, - } - - request(server) - .post('/') - .set('Content-Type', 'application/json') - .send(req) - .expect(401) - .end((err: any) => { - closeRPC(server) - t.end(err) + it('auth protected server with protected method without token', () => { + const server = startRPC({}, undefined, { + jwtSecret, + unlessFn: (req: any) => !(req.body.method as string).includes('protected_'), }) -}) -tape('call JSON-RPC auth protected server with protected method with token', (t) => { - const server = startRPC({}, undefined, { - jwtSecret, - unlessFn: (req: any) => !(req.body.method as string).includes('protected_'), + const req = { + jsonrpc: '2.0', + method: 'protected_METHOD_DOES_NOT_EXIST', + params: ['0x1', true], + id: 1, + } + + request(server) + .post('/') + .set('Content-Type', 'application/json') + .send(req) + .expect(401) + .end((err: any) => { + closeRPC(server) + assert.notOk(err) + }) }) - const req = { - jsonrpc: '2.0', - method: 'protected_METHOD_DOES_NOT_EXIST', - params: ['0x1', true], - id: 1, - } - const claims = { iat: Math.floor(new Date().getTime() / 1000) } - const token = encode(claims, jwtSecret as never as string, 'HS256' as TAlgorithm) - - request(server) - .post('/') - .set('Content-Type', 'application/json') - .set('Authorization', `Bearer ${token}`) - .send(req) - .expect(200) - .end((err: any) => { - closeRPC(server) - t.end(err) + it('auth protected server with protected method with token', () => { + const server = startRPC({}, undefined, { + jwtSecret, + unlessFn: (req: any) => !(req.body.method as string).includes('protected_'), }) + + const req = { + jsonrpc: '2.0', + method: 'protected_METHOD_DOES_NOT_EXIST', + params: ['0x1', true], + id: 1, + } + const claims = { iat: Math.floor(new Date().getTime() / 1000) } + const token = encode(claims, jwtSecret as never as string, 'HS256' as TAlgorithm) + + request(server) + .post('/') + .set('Content-Type', 'application/json') + .set('Authorization', `Bearer ${token}`) + .send(req) + .expect(200) + .end((err: any) => { + closeRPC(server) + assert.notOk(err) + }) + }) }) diff --git a/packages/client/test/rpc/txpool/content.spec.ts b/packages/client/test/rpc/txpool/content.spec.ts index 536cfa0802..b04647dcda 100644 --- a/packages/client/test/rpc/txpool/content.spec.ts +++ b/packages/client/test/rpc/txpool/content.spec.ts @@ -1,9 +1,10 @@ import { Block, BlockHeader } from '@ethereumjs/block' import { Blockchain } from '@ethereumjs/blockchain' import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { getGenesis } from '@ethereumjs/genesis' import { TransactionFactory } from '@ethereumjs/tx' import { randomBytes } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { baseRequest, createClient, createManager, params, startRPC } from '../helpers' @@ -11,74 +12,80 @@ import type { FullEthereumService } from '../../../src/service' const method = 'txpool_content' -tape(`${method}: call with valid arguments`, async (t) => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - const blockchain = await Blockchain.create({ - common, - validateBlocks: false, - validateConsensus: false, - }) +describe(method, () => { + it( + 'call with valid arguments', + async () => { + const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const blockchain = await Blockchain.create({ + common, + validateBlocks: false, + validateConsensus: false, + }) - const client = createClient({ blockchain, commonChain: common, includeVM: true }) - const manager = createManager(client) - const server = startRPC(manager.getMethods()) - const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService - t.notEqual(execution, undefined, 'should have valid execution') - const { vm } = execution - await vm.stateManager.generateCanonicalGenesis({}) - const gasLimit = 2000000 - const parent = await blockchain.getCanonicalHeadHeader() - const block = Block.fromBlockData( - { - header: { - parentHash: parent.hash(), - number: 1, - gasLimit, - }, - }, - { common, calcDifficultyFromHeader: parent } - ) - - let ranBlock: Block | undefined = undefined - vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - await vm.runBlock({ block, generate: true, skipBlockValidation: true }) - await vm.blockchain.putBlock(ranBlock!) - const service = client.services[0] as FullEthereumService - service.execution.vm.common.setHardfork('london') - service.chain.config.chainCommon.setHardfork('london') - const headBlock = await service.chain.getCanonicalHeadBlock() - const londonBlock = Block.fromBlockData( - { - header: BlockHeader.fromHeaderData( + const client = createClient({ blockchain, commonChain: common, includeVM: true }) + const manager = createManager(client) + const server = startRPC(manager.getMethods()) + const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService + assert.notEqual(execution, undefined, 'should have valid execution') + const { vm } = execution + await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + const gasLimit = 2000000 + const parent = await blockchain.getCanonicalHeadHeader() + const block = Block.fromBlockData( { - baseFeePerGas: 1000000000n, - number: 2n, - parentHash: headBlock.header.hash(), + header: { + parentHash: parent.hash(), + number: 1, + gasLimit, + }, }, + { common, calcDifficultyFromHeader: parent } + ) + + let ranBlock: Block | undefined = undefined + vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) + await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + await vm.blockchain.putBlock(ranBlock!) + const service = client.services[0] as FullEthereumService + service.execution.vm.common.setHardfork('london') + service.chain.config.chainCommon.setHardfork('london') + const headBlock = await service.chain.getCanonicalHeadBlock() + const londonBlock = Block.fromBlockData( { - common: service.chain.config.chainCommon, - skipConsensusFormatValidation: true, - calcDifficultyFromHeader: headBlock.header, - } - ), - }, - { common: service.chain.config.chainCommon } - ) + header: BlockHeader.fromHeaderData( + { + baseFeePerGas: 1000000000n, + number: 2n, + parentHash: headBlock.header.hash(), + }, + { + common: service.chain.config.chainCommon, + skipConsensusFormatValidation: true, + calcDifficultyFromHeader: headBlock.header, + } + ), + }, + { common: service.chain.config.chainCommon } + ) - vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - await vm.runBlock({ block: londonBlock, generate: true, skipBlockValidation: true }) - await vm.blockchain.putBlock(ranBlock!) - ;(service.txPool as any).validate = () => {} - await service.txPool.add(TransactionFactory.fromTxData({ type: 2 }, {}).sign(randomBytes(32))) + vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) + await vm.runBlock({ block: londonBlock, generate: true, skipBlockValidation: true }) + await vm.blockchain.putBlock(ranBlock!) + ;(service.txPool as any).validate = () => {} + await service.txPool.add(TransactionFactory.fromTxData({ type: 2 }, {}).sign(randomBytes(32))) - const req = params(method, []) - const expectedRes = (res: any) => { - t.equal( - Object.keys(res.body.result.pending).length, - 1, - 'received one pending transaction back from response' - ) - } + const req = params(method, []) + const expectedRes = (res: any) => { + assert.equal( + Object.keys(res.body.result.pending).length, + 1, + 'received one pending transaction back from response' + ) + } - await baseRequest(t, server, req, 200, expectedRes) + await baseRequest(server, req, 200, expectedRes) + }, + { timeout: 30000 } + ) }) diff --git a/packages/client/test/rpc/util.ts b/packages/client/test/rpc/util.ts index fd3197ced2..65eb7eecdc 100644 --- a/packages/client/test/rpc/util.ts +++ b/packages/client/test/rpc/util.ts @@ -1,6 +1,6 @@ -import type * as tape from 'tape' +import { assert } from 'vitest' -export function checkError(t: tape.Test, expectedCode: number, expectedMessage?: string) { +export function checkError(expectedCode: number, expectedMessage?: string) { return (res: any) => { if (res.body.error === undefined) { throw new Error('should return an error object') @@ -16,6 +16,6 @@ export function checkError(t: tape.Test, expectedCode: number, expectedMessage?: `should have an error message "${expectedMessage}", got "${res.body.error.message}"` ) } - t.pass('should return error object with error code and message') + assert.ok(true, 'should return error object with error code and message') } } diff --git a/packages/client/test/rpc/util/CLConnectionManager.spec.ts b/packages/client/test/rpc/util/CLConnectionManager.spec.ts index 1d1cb3c9d0..7ac888a37b 100644 --- a/packages/client/test/rpc/util/CLConnectionManager.spec.ts +++ b/packages/client/test/rpc/util/CLConnectionManager.spec.ts @@ -1,10 +1,10 @@ import { Common, parseGethGenesis } from '@ethereumjs/common' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Config } from '../../../src' import { CLConnectionManager } from '../../../src/rpc/util/CLConnectionManager' import { Event } from '../../../src/types' -import * as genesisJSON from '../../testdata/geth-genesis/post-merge.json' +import genesisJSON from '../../testdata/geth-genesis/post-merge.json' const payload = { payload: { @@ -33,15 +33,14 @@ const update = { }, } -tape('[CLConnectionManager]', (t) => { - t.test('Initialization', async (st) => { - st.plan(5) +describe('[CLConnectionManager]', () => { + it('Initialization', async () => { let config = new Config() let manager = new CLConnectionManager({ config }) manager.start() - st.ok(manager.running, 'should start') + assert.ok(manager.running, 'should start') manager.stop() - st.ok(!manager.running, 'should stop') + assert.ok(!manager.running, 'should stop') const prevMergeForkBlock = (genesisJSON.config as any).mergeForkBlock ;(genesisJSON.config as any).mergeForkBlock = 0 const params = parseGethGenesis(genesisJSON, 'post-merge', false) @@ -52,7 +51,7 @@ tape('[CLConnectionManager]', (t) => { common.setHardforkBy({ blockNumber: 0 }) config = new Config({ common }) manager = new CLConnectionManager({ config }) - st.ok(manager.running, 'starts on instantiation if hardfork is MergeForkBlock') + assert.ok(manager.running, 'starts on instantiation if hardfork is MergeForkBlock') manager.stop() ;(genesisJSON.config as any).mergeForkBlock = 10 common = new Common({ @@ -63,27 +62,26 @@ tape('[CLConnectionManager]', (t) => { manager = new CLConnectionManager({ config }) config.chainCommon.setHardforkBy({ blockNumber: 11 }) config.events.on(Event.CHAIN_UPDATED, () => { - st.ok(manager.running, 'connection manager started on chain update on mergeBlock') + assert.ok(manager.running, 'connection manager started on chain update on mergeBlock') }) config.events.on(Event.CLIENT_SHUTDOWN, () => { - st.ok(!manager.running, 'connection manager stopped on client shutdown') + assert.ok(!manager.running, 'connection manager stopped on client shutdown') }) config.events.emit(Event.CHAIN_UPDATED) config.events.emit(Event.CLIENT_SHUTDOWN) - // reset prevMergeForkBlock as it seems to be polluting other tests + // reset prevMergeForkBlock as it seems to be polluting other test ;(genesisJSON.config as any).mergeForkBlock = prevMergeForkBlock ;(genesisJSON.config as any).mergeForkBlock = prevMergeForkBlock }) - t.test('Status updates', async (st) => { - st.plan(2) + it('Status updates', async () => { const config = new Config() const manager = new CLConnectionManager({ config }) config.logger.on('data', (chunk) => { if ((chunk.message as string).includes('consensus forkchoice update head=0x67b9')) { - st.pass('received last fork choice message') + assert.ok(true, 'received last fork choice message') } if ((chunk.message as string).includes('consensus payload received number=55504')) { - st.pass('received last payload message') + assert.ok(true, 'received last payload message') manager.stop() config.logger.removeAllListeners() } diff --git a/packages/client/test/rpc/validation.spec.ts b/packages/client/test/rpc/validation.spec.ts index 0c19395cc5..b3fb1232a8 100644 --- a/packages/client/test/rpc/validation.spec.ts +++ b/packages/client/test/rpc/validation.spec.ts @@ -1,5 +1,5 @@ import { bytesToHex, bytesToUnprefixedHex, randomBytes } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../src/rpc/error-code' import { middleware, validators } from '../../src/rpc/validation' @@ -9,539 +9,651 @@ import { checkError } from './util' const prefix = 'rpc/validation:' -tape(`${prefix} should work without \`params\` when it's optional`, async (t) => { - const mockMethodName = 'mock' - const server = startRPC({ - [mockMethodName]: middleware((_params: any) => true, 0, []), +describe(prefix, () => { + it('should work without `params` when it is optional', async () => { + const mockMethodName = 'mock' + const server = startRPC({ + [mockMethodName]: middleware((_params: any) => true, 0, []), + }) + + const req = { + jsonrpc: '2.0', + method: mockMethodName, + id: 1, + } + const expectRes = (res: any) => { + assert.equal(res.body.error, undefined, 'should not return an error object') + } + await baseRequest(server, req, 200, expectRes) }) - const req = { - jsonrpc: '2.0', - method: mockMethodName, - id: 1, - } - const expectRes = (res: any) => { - t.equal(res.body.error, undefined, 'should not return an error object') - } - await baseRequest(t, server, req, 200, expectRes) -}) + it('should return error without `params` when it is required', async () => { + const mockMethodName = 'mock' + const server = startRPC({ + [mockMethodName]: middleware((_params: any) => true, 1, []), + }) -tape(`${prefix} should return error without \`params\` when it's required`, async (t) => { - const mockMethodName = 'mock' - const server = startRPC({ - [mockMethodName]: middleware((_params: any) => true, 1, []), - }) + const req = { + jsonrpc: '2.0', + method: mockMethodName, + id: 1, + } - const req = { - jsonrpc: '2.0', - method: mockMethodName, - id: 1, - } + const expectRes = checkError(INVALID_PARAMS, 'missing value for required argument 0') - const expectRes = checkError(t, INVALID_PARAMS, 'missing value for required argument 0') + await baseRequest(server, req, 200, expectRes) + }) - await baseRequest(t, server, req, 200, expectRes) -}) + const validatorResult = (result: Object | undefined) => { + // result is valid if validator returns undefined + // result is invalid if validator returns object + return result === undefined ? true : false + } -const validatorResult = (result: Object | undefined) => { - // result is valid if validator returns undefined - // result is invalid if validator returns object - return result === undefined ? true : false -} - -tape(`${prefix} address`, (t) => { - // valid - // zero address - t.ok(validatorResult(validators.address(['0x0000000000000000000000000000000000000000'], 0))) - // lowercase address - t.ok(validatorResult(validators.address(['0xa7d8d9ef8d8ce8992df33d8b8cf4aebabd5bd270'], 0))) - // checksummed address - t.ok(validatorResult(validators.address(['0xa7d8d9ef8D8Ce8992Df33D8b8CF4Aebabd5bD270'], 0))) - - // invalid - t.notOk(validatorResult(validators.address(['0x'], 0))) - t.notOk(validatorResult(validators.address(['0x0'], 0))) - t.notOk(validatorResult(validators.address(['0x00'], 0))) - t.notOk(validatorResult(validators.address(['0x1'], 0))) - // invalid length: 38 chars - t.notOk(validatorResult(validators.address(['0x00000000000000000000000000000000000000'], 0))) - // invalidlength: 39 chars - t.notOk(validatorResult(validators.address(['0x000000000000000000000000000000000000000'], 0))) - // invalidlength: 41 chars - t.notOk(validatorResult(validators.address(['0x00000000000000000000000000000000000000000'], 0))) - // invalid length: 42 chars - t.notOk(validatorResult(validators.address(['0x00000000000000000000000000000000000000000'], 0))) - // invalid character - t.notOk(validatorResult(validators.address(['0x62223651d6a33d58be70eb9876c3caf7096169ez'], 0))) - - t.end() -}) + const bytes = (byteLength: number, prefix: boolean = true) => { + return prefix ? '0x'.padEnd(byteLength * 2 + 2, '0') : ''.padEnd(byteLength * 2, '0') + } + const badhex = (byteLength: number) => { + return '0x'.padEnd(byteLength * 2 + 2, 'G') + } -tape(`${prefix} blockHash`, (t) => { - // valid - t.ok( - validatorResult( - validators.blockHash( - ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249'], - 0 - ) + it('address', () => { + // valid + // zero address + assert.ok( + validatorResult(validators.address(['0x0000000000000000000000000000000000000000'], 0)) ) - ) - t.ok( - validatorResult( - validators.blockHash( - ['0xf79d019c58d58a4efcfdf100c9596dd38014dcec6cf6f52000d4fae4e139b703'], - 0 - ) + // lowercase address + assert.ok( + validatorResult(validators.address(['0xa7d8d9ef8d8ce8992df33d8b8cf4aebabd5bd270'], 0)) ) - ) - // invalid length - t.notOk( - validatorResult( - validators.blockHash(['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a2'], 0) - ) - ) - t.notOk( - validatorResult( - validators.blockHash(['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a24'], 0) - ) - ) - t.notOk( - validatorResult( - validators.blockHash( - ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a2499'], - 0 - ) + // checksummed address + assert.ok( + validatorResult(validators.address(['0xa7d8d9ef8D8Ce8992Df33D8b8CF4Aebabd5bD270'], 0)) ) - ) - t.notOk( - validatorResult( - validators.blockHash( - ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a24999'], - 0 - ) + + // invalid + assert.notOk(validatorResult(validators.address(['0x'], 0))) + assert.notOk(validatorResult(validators.address(['0x0'], 0))) + assert.notOk(validatorResult(validators.address(['0x00'], 0))) + assert.notOk(validatorResult(validators.address(['0x1'], 0))) + // invalid length: 38 chars + assert.notOk( + validatorResult(validators.address(['0x00000000000000000000000000000000000000'], 0)) ) - ) - // invalid character - t.notOk( - validatorResult( - validators.blockHash( - ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66z249'], - 0 - ) + // invalidlength: 39 chars + assert.notOk( + validatorResult(validators.address(['0x000000000000000000000000000000000000000'], 0)) ) - ) - - t.end() -}) - -tape(`${prefix} blockOption`, (t) => { - // valid - t.ok(validatorResult(validators.blockOption(['latest'], 0))) - t.ok(validatorResult(validators.blockOption(['earliest'], 0))) - t.ok(validatorResult(validators.blockOption(['pending'], 0))) - t.ok( - validatorResult( - validators.blockOption( - ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249'], - 0 - ) + // invalidlength: 41 chars + assert.notOk( + validatorResult(validators.address(['0x00000000000000000000000000000000000000000'], 0)) ) - ) - t.ok(validatorResult(validators.blockOption(['0x1'], 0))) - t.ok(validatorResult(validators.blockOption(['0x01'], 0))) - - // invalid - t.notOk(validatorResult(validators.blockOption(['lates'], 0))) - t.notOk(validatorResult(validators.blockOption(['arliest'], 0))) - t.notOk(validatorResult(validators.blockOption(['pendin'], 0))) - t.notOk(validatorResult(validators.blockOption(['0'], 0))) - t.notOk(validatorResult(validators.blockOption(['00'], 0))) - t.notOk(validatorResult(validators.blockOption(['1'], 0))) - t.notOk(validatorResult(validators.blockOption(['11'], 0))) - t.notOk( - validatorResult( - validators.blockOption( - ['573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249'], - 0 - ) + // invalid length: 42 chars + assert.notOk( + validatorResult(validators.address(['0x00000000000000000000000000000000000000000'], 0)) ) - ) - - t.end() -}) - -tape(`${prefix} bool`, (t) => { - // valid - t.ok(validatorResult(validators.bool([true], 0))) - t.ok(validatorResult(validators.bool([false], 0))) - - // invalid - t.notOk(validatorResult(validators.bool(['true'], 0))) - t.notOk(validatorResult(validators.bool(['false'], 0))) - t.notOk(validatorResult(validators.bool(['tru'], 0))) - t.notOk(validatorResult(validators.bool(['fals'], 0))) - - t.end() -}) - -tape(`${prefix} hex`, (t) => { - // valid - t.ok(validatorResult(validators.hex(['0x0'], 0))) - t.ok(validatorResult(validators.hex(['0x00'], 0))) - t.ok(validatorResult(validators.hex(['0x1'], 0))) + // invalid character + assert.notOk( + validatorResult(validators.address(['0x62223651d6a33d58be70eb9876c3caf7096169ez'], 0)) + ) + assert.ok(validatorResult(validators.bytes8([bytesToHex(randomBytes(8))], 0))) + assert.ok(validatorResult(validators.bytes8([bytes(8)], 0))) + assert.ok(validatorResult(validators.bytes8([bytes(1)], 0))) + assert.ok(validatorResult(validators.bytes8([bytes(2)], 0))) + assert.ok(validatorResult(validators.bytes8([bytes(4)], 0))) + // invalid + assert.notOk(validatorResult(validators.bytes8([bytes(10)], 0))) + assert.notOk(validatorResult(validators.bytes8([bytes(8, false)], 0))) + assert.notOk(validatorResult(validators.bytes8([bytesToUnprefixedHex(randomBytes(8))], 0))) + }) - // invalid - t.notOk(validatorResult(validators.hex(['0'], 0))) - t.notOk(validatorResult(validators.hex(['00'], 0))) - t.notOk(validatorResult(validators.hex(['1'], 0))) - t.notOk(validatorResult(validators.hex(['1'], 0))) + it('Uint64', () => { + // valid + assert.ok(validatorResult(validators.uint64([bytesToHex(randomBytes(8))], 0))) + assert.ok(validatorResult(validators.uint64([bytes(8)], 0))) + assert.ok(validatorResult(validators.uint64([bytes(1)], 0))) + assert.ok(validatorResult(validators.uint64([bytes(2)], 0))) + assert.ok(validatorResult(validators.uint64([bytes(4)], 0))) - t.end() -}) -tape(`${prefix} byteVectors`, (t) => { - const bytes = (byteLength: number, prefix: boolean = true) => { - return prefix ? '0x'.padEnd(byteLength * 2 + 2, '0') : ''.padEnd(byteLength * 2, '0') - } - const badhex = (byteLength: number) => { - return '0x'.padEnd(byteLength * 2 + 2, 'G') - } - t.test('Bytes8', (st) => { + // invalid + assert.notOk(validatorResult(validators.bytes8([badhex(8)], 0))) + assert.notOk(validatorResult(validators.uint64([bytes(10)], 0))) + assert.notOk(validatorResult(validators.uint64([bytes(8, false)], 0))) + assert.notOk(validatorResult(validators.uint64([bytesToUnprefixedHex(randomBytes(8))], 0))) + }) + it('Bytes16', () => { // valid - st.ok(validatorResult(validators.bytes8([bytesToHex(randomBytes(8))], 0))) - st.ok(validatorResult(validators.bytes8([bytes(8)], 0))) - st.ok(validatorResult(validators.bytes8([bytes(1)], 0))) - st.ok(validatorResult(validators.bytes8([bytes(2)], 0))) - st.ok(validatorResult(validators.bytes8([bytes(4)], 0))) + assert.ok(validatorResult(validators.bytes16([bytesToHex(randomBytes(16))], 0))) + assert.ok(validatorResult(validators.bytes16([bytes(16)], 0))) + assert.ok(validatorResult(validators.bytes16([bytes(1)], 0))) + assert.ok(validatorResult(validators.bytes16([bytes(2)], 0))) + assert.ok(validatorResult(validators.bytes16([bytes(4)], 0))) + assert.ok(validatorResult(validators.bytes16([bytes(8)], 0))) // invalid - st.notOk(validatorResult(validators.bytes8([bytes(10)], 0))) - st.notOk(validatorResult(validators.bytes8([bytes(8, false)], 0))) - st.notOk(validatorResult(validators.bytes8([bytesToUnprefixedHex(randomBytes(8))], 0))) - st.end() + assert.notOk(validatorResult(validators.bytes16([badhex(16)], 0))) + assert.notOk(validatorResult(validators.bytes16([bytes(20)], 0))) + assert.notOk(validatorResult(validators.bytes16([bytes(16, false)], 0))) + assert.notOk(validatorResult(validators.bytes16([bytesToUnprefixedHex(randomBytes(16))], 0))) }) - t.test('Uint64', (st) => { + it('Bytes20', () => { // valid - st.ok(validatorResult(validators.uint64([bytesToHex(randomBytes(8))], 0))) - st.ok(validatorResult(validators.uint64([bytes(8)], 0))) - st.ok(validatorResult(validators.uint64([bytes(1)], 0))) - st.ok(validatorResult(validators.uint64([bytes(2)], 0))) - st.ok(validatorResult(validators.uint64([bytes(4)], 0))) - + assert.ok(validatorResult(validators.bytes20([bytes(20)], 0))) + assert.ok(validatorResult(validators.bytes20([bytesToHex(randomBytes(20))], 0))) + assert.ok(validatorResult(validators.bytes20([bytes(8)], 0))) + assert.ok(validatorResult(validators.bytes20([bytes(16)], 0))) // invalid - st.notOk(validatorResult(validators.bytes8([badhex(8)], 0))) - st.notOk(validatorResult(validators.uint64([bytes(10)], 0))) - st.notOk(validatorResult(validators.uint64([bytes(8, false)], 0))) - st.notOk(validatorResult(validators.uint64([bytesToUnprefixedHex(randomBytes(8))], 0))) - st.end() + assert.notOk(validatorResult(validators.bytes20([badhex(20)], 0))) + assert.notOk(validatorResult(validators.bytes20([bytes(20, false)], 0))) + assert.notOk(validatorResult(validators.bytes20([bytes(32)], 0))) + assert.notOk(validatorResult(validators.bytes20([bytesToUnprefixedHex(randomBytes(20))], 0))) }) - t.test('Bytes16', (st) => { + it('Bytes32', () => { // valid - st.ok(validatorResult(validators.bytes16([bytesToHex(randomBytes(16))], 0))) - st.ok(validatorResult(validators.bytes16([bytes(16)], 0))) - st.ok(validatorResult(validators.bytes16([bytes(1)], 0))) - st.ok(validatorResult(validators.bytes16([bytes(2)], 0))) - st.ok(validatorResult(validators.bytes16([bytes(4)], 0))) - st.ok(validatorResult(validators.bytes16([bytes(8)], 0))) + assert.ok(validatorResult(validators.bytes32([bytesToHex(randomBytes(32))], 0))) + assert.ok(validatorResult(validators.bytes32([bytes(32)], 0))) + assert.ok(validatorResult(validators.bytes32([bytes(8)], 0))) + assert.ok(validatorResult(validators.bytes32([bytes(16)], 0))) + assert.ok(validatorResult(validators.bytes32([bytes(20)], 0))) // invalid - st.notOk(validatorResult(validators.bytes16([badhex(16)], 0))) - st.notOk(validatorResult(validators.bytes16([bytes(20)], 0))) - st.notOk(validatorResult(validators.bytes16([bytes(16, false)], 0))) - st.notOk(validatorResult(validators.bytes16([bytesToUnprefixedHex(randomBytes(16))], 0))) - st.end() + assert.notOk(validatorResult(validators.bytes32([badhex(32)], 0))) + assert.notOk(validatorResult(validators.bytes32([bytes(48)], 0))) + assert.notOk(validatorResult(validators.bytes32([bytes(32, false)], 0))) + assert.notOk(validatorResult(validators.bytes32([bytesToUnprefixedHex(randomBytes(32))], 0))) }) - t.test('Bytes20', (st) => { + it('Uint256', () => { // valid - st.ok(validatorResult(validators.bytes20([bytes(20)], 0))) - st.ok(validatorResult(validators.bytes20([bytesToHex(randomBytes(20))], 0))) - st.ok(validatorResult(validators.bytes20([bytes(8)], 0))) - st.ok(validatorResult(validators.bytes20([bytes(16)], 0))) + assert.ok(validatorResult(validators.uint256([bytesToHex(randomBytes(32))], 0))) + assert.ok(validatorResult(validators.uint256([bytes(32)], 0))) + assert.ok(validatorResult(validators.uint256([bytes(8)], 0))) + assert.ok(validatorResult(validators.uint256([bytes(16)], 0))) + assert.ok(validatorResult(validators.uint256([bytes(20)], 0))) // invalid - st.notOk(validatorResult(validators.bytes20([badhex(20)], 0))) - st.notOk(validatorResult(validators.bytes20([bytes(20, false)], 0))) - st.notOk(validatorResult(validators.bytes20([bytes(32)], 0))) - st.notOk(validatorResult(validators.bytes20([bytesToUnprefixedHex(randomBytes(20))], 0))) - st.end() + assert.notOk(validatorResult(validators.uint256([badhex(32)], 0))) + assert.notOk(validatorResult(validators.uint256([bytes(48)], 0))) + assert.notOk(validatorResult(validators.uint256([bytes(32, false)], 0))) + assert.notOk(validatorResult(validators.uint256([bytesToUnprefixedHex(randomBytes(32))], 0))) }) - t.test('Bytes32', (st) => { + it('Bytes48', () => { // valid - st.ok(validatorResult(validators.bytes32([bytesToHex(randomBytes(32))], 0))) - st.ok(validatorResult(validators.bytes32([bytes(32)], 0))) - st.ok(validatorResult(validators.bytes32([bytes(8)], 0))) - st.ok(validatorResult(validators.bytes32([bytes(16)], 0))) - st.ok(validatorResult(validators.bytes32([bytes(20)], 0))) + assert.ok(validatorResult(validators.bytes48([bytesToHex(randomBytes(48))], 0))) + assert.ok(validatorResult(validators.bytes48([bytes(48)], 0))) + assert.ok(validatorResult(validators.bytes48([bytes(8)], 0))) + assert.ok(validatorResult(validators.bytes48([bytes(16)], 0))) + assert.ok(validatorResult(validators.bytes48([bytes(20)], 0))) + assert.ok(validatorResult(validators.bytes48([bytes(32)], 0))) + // invalid - st.notOk(validatorResult(validators.bytes32([badhex(32)], 0))) - st.notOk(validatorResult(validators.bytes32([bytes(48)], 0))) - st.notOk(validatorResult(validators.bytes32([bytes(32, false)], 0))) - st.notOk(validatorResult(validators.bytes32([bytesToUnprefixedHex(randomBytes(32))], 0))) - st.end() + assert.notOk(validatorResult(validators.bytes48([badhex(48)], 0))) + assert.notOk(validatorResult(validators.bytes48([bytes(64)], 0))) + assert.notOk(validatorResult(validators.bytes48([bytes(48, false)], 0))) + assert.notOk(validatorResult(validators.bytes48([bytesToUnprefixedHex(randomBytes(48))], 0))) }) - t.test('Uint256', (st) => { + it('Bytes256', () => { // valid - st.ok(validatorResult(validators.uint256([bytesToHex(randomBytes(32))], 0))) - st.ok(validatorResult(validators.uint256([bytes(32)], 0))) - st.ok(validatorResult(validators.uint256([bytes(8)], 0))) - st.ok(validatorResult(validators.uint256([bytes(16)], 0))) - st.ok(validatorResult(validators.uint256([bytes(20)], 0))) + assert.ok(validatorResult(validators.bytes256([bytesToHex(randomBytes(256))], 0))) + assert.ok(validatorResult(validators.bytes256([bytes(256)], 0))) + assert.ok(validatorResult(validators.bytes256([bytes(8)], 0))) + assert.ok(validatorResult(validators.bytes256([bytes(16)], 0))) + assert.ok(validatorResult(validators.bytes256([bytes(32)], 0))) + assert.ok(validatorResult(validators.bytes256([bytes(64)], 0))) + assert.ok(validatorResult(validators.bytes256([bytes(128)], 0))) + // invalid - st.notOk(validatorResult(validators.uint256([badhex(32)], 0))) - st.notOk(validatorResult(validators.uint256([bytes(48)], 0))) - st.notOk(validatorResult(validators.uint256([bytes(32, false)], 0))) - st.notOk(validatorResult(validators.uint256([bytesToUnprefixedHex(randomBytes(32))], 0))) - st.end() + assert.notOk(validatorResult(validators.bytes256([badhex(256)], 0))) + assert.notOk(validatorResult(validators.bytes256([bytes(512)], 0))) + assert.notOk(validatorResult(validators.bytes256([bytes(256, false)], 0))) + assert.notOk(validatorResult(validators.bytes256([bytesToUnprefixedHex(randomBytes(256))], 0))) + }) + + it('blockHash', () => { + // valid + assert.ok( + validatorResult( + validators.blockHash( + ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249'], + 0 + ) + ) + ) + assert.ok( + validatorResult( + validators.blockHash( + ['0xf79d019c58d58a4efcfdf100c9596dd38014dcec6cf6f52000d4fae4e139b703'], + 0 + ) + ) + ) + // invalid length + assert.notOk( + validatorResult( + validators.blockHash( + ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a2'], + 0 + ) + ) + ) + assert.notOk( + validatorResult( + validators.blockHash( + ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a24'], + 0 + ) + ) + ) + assert.notOk( + validatorResult( + validators.blockHash( + ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a2499'], + 0 + ) + ) + ) + assert.notOk( + validatorResult( + validators.blockHash( + ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a24999'], + 0 + ) + ) + ) + // invalid character + assert.notOk( + validatorResult( + validators.blockHash( + ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66z249'], + 0 + ) + ) + ) }) - t.test('Bytes48', (st) => { + + it('blockOption', () => { // valid - st.ok(validatorResult(validators.bytes48([bytesToHex(randomBytes(48))], 0))) - st.ok(validatorResult(validators.bytes48([bytes(48)], 0))) - st.ok(validatorResult(validators.bytes48([bytes(8)], 0))) - st.ok(validatorResult(validators.bytes48([bytes(16)], 0))) - st.ok(validatorResult(validators.bytes48([bytes(20)], 0))) - st.ok(validatorResult(validators.bytes48([bytes(32)], 0))) + assert.ok(validatorResult(validators.blockOption(['latest'], 0))) + assert.ok(validatorResult(validators.blockOption(['earliest'], 0))) + assert.ok(validatorResult(validators.blockOption(['pending'], 0))) + assert.ok( + validatorResult( + validators.blockOption( + ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249'], + 0 + ) + ) + ) + assert.ok(validatorResult(validators.blockOption(['0x1'], 0))) + assert.ok(validatorResult(validators.blockOption(['0x01'], 0))) // invalid - st.notOk(validatorResult(validators.bytes48([badhex(48)], 0))) - st.notOk(validatorResult(validators.bytes48([bytes(64)], 0))) - st.notOk(validatorResult(validators.bytes48([bytes(48, false)], 0))) - st.notOk(validatorResult(validators.bytes48([bytesToUnprefixedHex(randomBytes(48))], 0))) - st.end() + assert.notOk(validatorResult(validators.blockOption(['lates'], 0))) + assert.notOk(validatorResult(validators.blockOption(['arliest'], 0))) + assert.notOk(validatorResult(validators.blockOption(['pendin'], 0))) + assert.notOk(validatorResult(validators.blockOption(['0'], 0))) + assert.notOk(validatorResult(validators.blockOption(['00'], 0))) + assert.notOk(validatorResult(validators.blockOption(['1'], 0))) + assert.notOk(validatorResult(validators.blockOption(['11'], 0))) + assert.notOk( + validatorResult( + validators.blockOption( + ['573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249'], + 0 + ) + ) + ) }) - t.test('Bytes256', (st) => { + + it('bool', () => { // valid - st.ok(validatorResult(validators.bytes256([bytesToHex(randomBytes(256))], 0))) - st.ok(validatorResult(validators.bytes256([bytes(256)], 0))) - st.ok(validatorResult(validators.bytes256([bytes(8)], 0))) - st.ok(validatorResult(validators.bytes256([bytes(16)], 0))) - st.ok(validatorResult(validators.bytes256([bytes(32)], 0))) - st.ok(validatorResult(validators.bytes256([bytes(64)], 0))) - st.ok(validatorResult(validators.bytes256([bytes(128)], 0))) + assert.ok(validatorResult(validators.bool([true], 0))) + assert.ok(validatorResult(validators.bool([false], 0))) // invalid - st.notOk(validatorResult(validators.bytes256([badhex(256)], 0))) - st.notOk(validatorResult(validators.bytes256([bytes(512)], 0))) - st.notOk(validatorResult(validators.bytes256([bytes(256, false)], 0))) - st.notOk(validatorResult(validators.bytes256([bytesToUnprefixedHex(randomBytes(256))], 0))) - st.end() + assert.notOk(validatorResult(validators.bool(['true'], 0))) + assert.notOk(validatorResult(validators.bool(['false'], 0))) + assert.notOk(validatorResult(validators.bool(['tru'], 0))) + assert.notOk(validatorResult(validators.bool(['fals'], 0))) }) - t.end() -}) + it('hex', () => { + // valid + assert.ok(validatorResult(validators.hex(['0x0'], 0))) + assert.ok(validatorResult(validators.hex(['0x00'], 0))) + assert.ok(validatorResult(validators.hex(['0x1'], 0))) + + // invalid + assert.notOk(validatorResult(validators.hex(['0'], 0))) + assert.notOk(validatorResult(validators.hex(['00'], 0))) + assert.notOk(validatorResult(validators.hex(['1'], 0))) + assert.notOk(validatorResult(validators.hex(['1'], 0))) + }) + describe('byteVectors', () => { + const bytes = (byteLength: number, prefix: boolean = true) => { + return prefix ? '0x'.padEnd(byteLength * 2 + 2, '0') : ''.padEnd(byteLength * 2, '0') + } + const badhex = (byteLength: number) => { + return '0x'.padEnd(byteLength * 2 + 2, 'G') + } + it('Bytes8', () => { + // valid + assert.ok(validatorResult(validators.bytes8([bytesToHex(randomBytes(8))], 0))) + assert.ok(validatorResult(validators.bytes8([bytes(8)], 0))) + assert.ok(validatorResult(validators.bytes8([bytes(1)], 0))) + assert.ok(validatorResult(validators.bytes8([bytes(2)], 0))) + assert.ok(validatorResult(validators.bytes8([bytes(4)], 0))) + // invalid + assert.notOk(validatorResult(validators.bytes8([bytes(10)], 0))) + assert.notOk(validatorResult(validators.bytes8([bytes(8, false)], 0))) + }) + it('Uint64', () => { + // valid + assert.ok(validatorResult(validators.uint64([bytesToHex(randomBytes(8))], 0))) + assert.ok(validatorResult(validators.uint64([bytes(8)], 0))) + assert.ok(validatorResult(validators.uint64([bytes(1)], 0))) + assert.ok(validatorResult(validators.uint64([bytes(2)], 0))) + assert.ok(validatorResult(validators.uint64([bytes(4)], 0))) + + // invalid + assert.notOk(validatorResult(validators.bytes8([badhex(8)], 0))) + assert.notOk(validatorResult(validators.uint64([bytes(10)], 0))) + assert.notOk(validatorResult(validators.uint64([bytes(8, false)], 0))) + }) + it('Bytes16', () => { + // valid + assert.ok(validatorResult(validators.bytes16([bytesToHex(randomBytes(16))], 0))) + assert.ok(validatorResult(validators.bytes16([bytes(16)], 0))) + assert.ok(validatorResult(validators.bytes16([bytes(1)], 0))) + assert.ok(validatorResult(validators.bytes16([bytes(2)], 0))) + assert.ok(validatorResult(validators.bytes16([bytes(4)], 0))) + assert.ok(validatorResult(validators.bytes16([bytes(8)], 0))) + // invalid + assert.notOk(validatorResult(validators.bytes16([badhex(16)], 0))) + assert.notOk(validatorResult(validators.bytes16([bytes(20)], 0))) + assert.notOk(validatorResult(validators.bytes16([bytes(16, false)], 0))) + }) + it('Bytes20', () => { + // valid + assert.ok(validatorResult(validators.bytes20([bytes(20)], 0))) + assert.ok(validatorResult(validators.bytes20([bytesToHex(randomBytes(20))], 0))) + assert.ok(validatorResult(validators.bytes20([bytes(8)], 0))) + assert.ok(validatorResult(validators.bytes20([bytes(16)], 0))) + // invalid + assert.notOk(validatorResult(validators.bytes20([badhex(20)], 0))) + assert.notOk(validatorResult(validators.bytes20([bytes(20, false)], 0))) + assert.notOk(validatorResult(validators.bytes20([bytes(32)], 0))) + }) + it('Bytes32', () => { + // valid + assert.ok(validatorResult(validators.bytes32([bytesToHex(randomBytes(32))], 0))) + assert.ok(validatorResult(validators.bytes32([bytes(32)], 0))) + assert.ok(validatorResult(validators.bytes32([bytes(8)], 0))) + assert.ok(validatorResult(validators.bytes32([bytes(16)], 0))) + assert.ok(validatorResult(validators.bytes32([bytes(20)], 0))) + // invalid + assert.notOk(validatorResult(validators.bytes32([badhex(32)], 0))) + assert.notOk(validatorResult(validators.bytes32([bytes(48)], 0))) + assert.notOk(validatorResult(validators.bytes32([bytes(32, false)], 0))) + }) + it('Uint256', () => { + // valid + assert.ok(validatorResult(validators.uint256([bytesToHex(randomBytes(32))], 0))) + assert.ok(validatorResult(validators.uint256([bytes(32)], 0))) + assert.ok(validatorResult(validators.uint256([bytes(8)], 0))) + assert.ok(validatorResult(validators.uint256([bytes(16)], 0))) + assert.ok(validatorResult(validators.uint256([bytes(20)], 0))) + // invalid + assert.notOk(validatorResult(validators.uint256([badhex(32)], 0))) + assert.notOk(validatorResult(validators.uint256([bytes(48)], 0))) + assert.notOk(validatorResult(validators.uint256([bytes(32, false)], 0))) + }) + it('Bytes48', () => { + // valid + assert.ok(validatorResult(validators.bytes48([bytesToHex(randomBytes(48))], 0))) + assert.ok(validatorResult(validators.bytes48([bytes(48)], 0))) + assert.ok(validatorResult(validators.bytes48([bytes(8)], 0))) + assert.ok(validatorResult(validators.bytes48([bytes(16)], 0))) + assert.ok(validatorResult(validators.bytes48([bytes(20)], 0))) + assert.ok(validatorResult(validators.bytes48([bytes(32)], 0))) + + // invalid + assert.notOk(validatorResult(validators.bytes48([badhex(48)], 0))) + assert.notOk(validatorResult(validators.bytes48([bytes(64)], 0))) + assert.notOk(validatorResult(validators.bytes48([bytes(48, false)], 0))) + }) + it('Bytes256', () => { + // valid + assert.ok(validatorResult(validators.bytes256([bytesToHex(randomBytes(256))], 0))) + assert.ok(validatorResult(validators.bytes256([bytes(256)], 0))) + assert.ok(validatorResult(validators.bytes256([bytes(8)], 0))) + assert.ok(validatorResult(validators.bytes256([bytes(16)], 0))) + assert.ok(validatorResult(validators.bytes256([bytes(32)], 0))) + assert.ok(validatorResult(validators.bytes256([bytes(64)], 0))) + assert.ok(validatorResult(validators.bytes256([bytes(128)], 0))) + + // invalid + assert.notOk(validatorResult(validators.bytes256([badhex(256)], 0))) + assert.notOk(validatorResult(validators.bytes256([bytes(512)], 0))) + assert.notOk(validatorResult(validators.bytes256([bytes(256, false)], 0))) + }) + }) -tape(`${prefix} transaction`, (t) => { - // valid - t.ok(validatorResult(validators.transaction([])([{}], 0))) - t.ok( - validatorResult( - validators.transaction([])( - [ - { - gas: '0xcf08', - }, - ], - 0 + it('transaction', () => { + // valid + assert.ok(validatorResult(validators.transaction([])([{}], 0))) + assert.ok( + validatorResult( + validators.transaction([])( + [ + { + gas: '0xcf08', + }, + ], + 0 + ) ) ) - ) - t.ok( - validatorResult( - validators.transaction(['to'])([{ to: '0x0000000000000000000000000000000000000000' }], 0) - ) - ) - - // invalid - t.notOk(validatorResult(validators.transaction([])([], 0))) - t.notOk(validatorResult(validators.transaction(['to'])([{}], 0))) - t.notOk(validatorResult(validators.transaction(['to'])([{ gas: '0xcf08' }], 0))) - t.notOk(validatorResult(validators.transaction(['to'])([{ to: '0x' }], 0))) - t.notOk(validatorResult(validators.transaction(['to'])([{ to: '0x0' }], 0))) - t.notOk(validatorResult(validators.transaction(['to'])([{ to: '0x00' }], 0))) - t.notOk( - validatorResult( - validators.transaction(['to'])( - [ - { - to: '0x0000000000000000000000000000000000000000', - from: '0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249', - }, - ], - 0 + assert.ok( + validatorResult( + validators.transaction(['to'])([{ to: '0x0000000000000000000000000000000000000000' }], 0) ) ) - ) - t.notOk( - validatorResult( - validators.transaction(['to'])( - [{ from: '0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249' }], - 0 + + // invalid + assert.notOk(validatorResult(validators.transaction([])([], 0))) + assert.notOk(validatorResult(validators.transaction(['to'])([{}], 0))) + assert.notOk(validatorResult(validators.transaction(['to'])([{ gas: '0xcf08' }], 0))) + assert.notOk(validatorResult(validators.transaction(['to'])([{ to: '0x' }], 0))) + assert.notOk(validatorResult(validators.transaction(['to'])([{ to: '0x0' }], 0))) + assert.notOk(validatorResult(validators.transaction(['to'])([{ to: '0x00' }], 0))) + assert.notOk( + validatorResult( + validators.transaction(['to'])( + [ + { + to: '0x0000000000000000000000000000000000000000', + from: '0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249', + }, + ], + 0 + ) ) ) - ) - t.notOk(validatorResult(validators.transaction([])([{ gas: '12' }], 0))) - t.notOk(validatorResult(validators.transaction([])([{ gasPrice: '12' }], 0))) - t.notOk(validatorResult(validators.transaction([])([{ value: '12' }], 0))) - t.notOk(validatorResult(validators.transaction([])([{ data: '12' }], 0))) - - t.end() -}) - -tape(`${prefix} object`, (t) => { - // valid - t.ok( - validatorResult( - validators.object({ - address: validators.address, - blockHash: validators.blockHash, - bool: validators.bool, - hex: validators.hex, - })( - [ - { - address: '0x25ed58c027921e14d86380ea2646e3a1b5c55a8b', - blockHash: '0x4152dae052dceaeaeff588aec17a88679fc61aa0c0ca362a2572b94f9c542b24', - bool: true, - hex: '0x1', - }, - ], - 0 + assert.notOk( + validatorResult( + validators.transaction(['to'])( + [{ from: '0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249' }], + 0 + ) ) ) - ) + assert.notOk(validatorResult(validators.transaction([])([{ gas: '12' }], 0))) + assert.notOk(validatorResult(validators.transaction([])([{ gasPrice: '12' }], 0))) + assert.notOk(validatorResult(validators.transaction([])([{ value: '12' }], 0))) + assert.notOk(validatorResult(validators.transaction([])([{ data: '12' }], 0))) + }) - // invalid - t.notOk( - validatorResult(validators.object({ address: validators.address })([{ address: '0x0' }], 0)) - ) - t.notOk( - validatorResult( - validators.object({ blockHash: validators.blockHash })([{ blockHash: '0x0' }], 0) + it('object', () => { + // valid + assert.ok( + validatorResult( + validators.object({ + address: validators.address, + blockHash: validators.blockHash, + bool: validators.bool, + hex: validators.hex, + })( + [ + { + address: '0x25ed58c027921e14d86380ea2646e3a1b5c55a8b', + blockHash: '0x4152dae052dceaeaeff588aec17a88679fc61aa0c0ca362a2572b94f9c542b24', + bool: true, + hex: '0x1', + }, + ], + 0 + ) + ) ) - ) - t.notOk(validatorResult(validators.object({ bool: validators.bool })([{ bool: '0x0' }], 0))) - t.notOk(validatorResult(validators.object({ hex: validators.hex })([{ hex: '1' }], 0))) - t.end() -}) + // invalid + assert.notOk( + validatorResult(validators.object({ address: validators.address })([{ address: '0x0' }], 0)) + ) + assert.notOk( + validatorResult( + validators.object({ blockHash: validators.blockHash })([{ blockHash: '0x0' }], 0) + ) + ) + assert.notOk( + validatorResult(validators.object({ bool: validators.bool })([{ bool: '0x0' }], 0)) + ) + assert.notOk(validatorResult(validators.object({ hex: validators.hex })([{ hex: '1' }], 0))) + }) -tape(`${prefix} array`, (t) => { - // valid - t.ok(validatorResult(validators.array(validators.hex)([['0x0', '0x1', '0x2']], 0))) - t.ok( - validatorResult( - validators.array(validators.address)( - [ + it('array', () => { + // valid + assert.ok(validatorResult(validators.array(validators.hex)([['0x0', '0x1', '0x2']], 0))) + assert.ok( + validatorResult( + validators.array(validators.address)( [ - '0xb7e390864a90b7b923c9f9310c6f98aafe43f707', - '0xda4a22ad0d0e9aff0846ca54225637ada5bf7a14', + [ + '0xb7e390864a90b7b923c9f9310c6f98aafe43f707', + '0xda4a22ad0d0e9aff0846ca54225637ada5bf7a14', + ], ], - ], - 0 + 0 + ) ) ) - ) - t.ok( - validatorResult( - validators.array(validators.blockHash)( - [['0xb6dbbc1c702583de187e1284a00a23f9d322bf96f70fd4968b6339d0ace066b3']], - 0 + assert.ok( + validatorResult( + validators.array(validators.blockHash)( + [['0xb6dbbc1c702583de187e1284a00a23f9d322bf96f70fd4968b6339d0ace066b3']], + 0 + ) ) ) - ) - t.ok(validatorResult(validators.array(validators.bool)([[true, false]], 0))) + assert.ok(validatorResult(validators.array(validators.bool)([[true, false]], 0))) - // invalid - t.notOk(validatorResult(validators.array(validators.hex)([['0x0', '0x1', '0x2', 'true']], 0))) - t.notOk( - validatorResult( - validators.array(validators.address)( - [['0xb7e390864a90b7b923c9f9310c6f98aafe43f707', '0x0']], - 0 + // invalid + assert.notOk( + validatorResult(validators.array(validators.hex)([['0x0', '0x1', '0x2', 'true']], 0)) + ) + assert.notOk( + validatorResult( + validators.array(validators.address)( + [['0xb7e390864a90b7b923c9f9310c6f98aafe43f707', '0x0']], + 0 + ) ) ) - ) - t.notOk(validatorResult(validators.array(validators.blockHash)([['0xb6dbbc1cd0ace066b3']], 0))) - t.notOk(validatorResult(validators.array(validators.bool)([['0x123', '0x456', '0x789']], 0))) - t.notOk(validatorResult(validators.array(validators.bool)([[true, 'true']], 0))) + assert.notOk( + validatorResult(validators.array(validators.blockHash)([['0xb6dbbc1cd0ace066b3']], 0)) + ) + assert.notOk( + validatorResult(validators.array(validators.bool)([['0x123', '0x456', '0x789']], 0)) + ) + assert.notOk(validatorResult(validators.array(validators.bool)([[true, 'true']], 0))) + }) - t.end() -}) + it('values', () => { + // valid + assert.ok(validatorResult(validators.values(['VALID', 'INVALID'])(['VALID'], 0))) + assert.ok(validatorResult(validators.values(['VALID', 'INVALID'])(['INVALID'], 0))) -tape(`${prefix} values`, (t) => { - // valid - t.ok(validatorResult(validators.values(['VALID', 'INVALID'])(['VALID'], 0))) - t.ok(validatorResult(validators.values(['VALID', 'INVALID'])(['INVALID'], 0))) + // invalid + assert.notOk(validatorResult(validators.values(['VALID', 'INVALID'])(['ANOTHER'], 0))) + assert.notOk(validatorResult(validators.values(['VALID', 'INVALID'])(['valid'], 0))) + }) - // invalid - t.notOk(validatorResult(validators.values(['VALID', 'INVALID'])(['ANOTHER'], 0))) - t.notOk(validatorResult(validators.values(['VALID', 'INVALID'])(['valid'], 0))) + it('optional', () => { + // valid + assert.ok(validatorResult(validators.optional(validators.bool)([true], 0))) + assert.ok(validatorResult(validators.optional(validators.bool)([], 0))) + assert.ok(validatorResult(validators.optional(validators.blockHash)([''], 0))) + assert.ok(validatorResult(validators.optional(validators.blockHash)([], 0))) + assert.ok( + validatorResult( + validators.optional(validators.blockHash)( + ['0x0000000000000000000000000000000000000000000000000000000000000000'], + 0 + ) + ) + ) + assert.ok( + validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))(['INVALID'], 0)) + ) + assert.ok( + validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))([''], 0)) + ) + assert.ok(validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))([], 0))) - t.end() -}) + // invalid + assert.notOk(validatorResult(validators.optional(validators.bool)(['hey'], 0))) + assert.notOk(validatorResult(validators.optional(validators.blockHash)(['0x0'], 0))) + assert.notOk( + validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))(['ANOTHER'], 0)) + ) + }) -tape(`${prefix} optional`, (t) => { - // valid - t.ok(validatorResult(validators.optional(validators.bool)([true], 0))) - t.ok(validatorResult(validators.optional(validators.bool)([], 0))) - t.ok(validatorResult(validators.optional(validators.blockHash)([''], 0))) - t.ok(validatorResult(validators.optional(validators.blockHash)([], 0))) - t.ok( - validatorResult( - validators.optional(validators.blockHash)( - ['0x0000000000000000000000000000000000000000000000000000000000000000'], - 0 + it('either', () => { + // valid + assert.ok(validatorResult(validators.either(validators.bool, validators.blockHash)([true], 0))) + assert.ok(validatorResult(validators.either(validators.bool, validators.hex)(['0xaaa'], 0))) + assert.ok( + validatorResult( + validators.either( + validators.bool, + validators.hex, + validators.array(validators.hex) + )([['0xaaa']], 0) ) ) - ) - t.ok( - validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))(['INVALID'], 0)) - ) - t.ok(validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))([''], 0))) - t.ok(validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))([], 0))) - - // invalid - t.notOk(validatorResult(validators.optional(validators.bool)(['hey'], 0))) - t.notOk(validatorResult(validators.optional(validators.blockHash)(['0x0'], 0))) - t.notOk( - validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))(['ANOTHER'], 0)) - ) - - t.end() -}) - -tape(`${prefix} either`, (t) => { - // valid - t.ok(validatorResult(validators.either(validators.bool, validators.blockHash)([true], 0))) - t.ok(validatorResult(validators.either(validators.bool, validators.hex)(['0xaaa'], 0))) - t.ok( - validatorResult( - validators.either( - validators.bool, - validators.hex, - validators.array(validators.hex) - )([['0xaaa']], 0) - ) - ) - t.ok( - validatorResult( - validators.either(validators.bool, validators.blockHash)( - ['0x0000000000000000000000000000000000000000000000000000000000000000'], - 0 + assert.ok( + validatorResult( + validators.either(validators.bool, validators.blockHash)( + ['0x0000000000000000000000000000000000000000000000000000000000000000'], + 0 + ) ) ) - ) - // invalid - t.notOk(validatorResult(validators.either(validators.bool, validators.blockHash)(['0xabc'], 0))) - t.notOk(validatorResult(validators.either(validators.bool, validators.hex)(['abc'], 0))) - t.notOk(validatorResult(validators.either(validators.hex, validators.blockHash)([true], 0))) - t.notOk( - validatorResult( - validators.either( - validators.hex, - validators.blockHash, - validators.array(validators.hex) - )([[false]], 0) + // invalid + assert.notOk( + validatorResult(validators.either(validators.bool, validators.blockHash)(['0xabc'], 0)) ) - ) - - t.end() + assert.notOk(validatorResult(validators.either(validators.bool, validators.hex)(['abc'], 0))) + assert.notOk( + validatorResult(validators.either(validators.hex, validators.blockHash)([true], 0)) + ) + assert.notOk( + validatorResult( + validators.either( + validators.hex, + validators.blockHash, + validators.array(validators.hex) + )([[false]], 0) + ) + ) + }) }) diff --git a/packages/client/test/rpc/web3/clientVersion.spec.ts b/packages/client/test/rpc/web3/clientVersion.spec.ts index e977219393..0e7f76fd0b 100644 --- a/packages/client/test/rpc/web3/clientVersion.spec.ts +++ b/packages/client/test/rpc/web3/clientVersion.spec.ts @@ -1,34 +1,36 @@ import { platform } from 'os' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { baseRequest, baseSetup, params } from '../helpers' const method = 'web3_clientVersion' -tape(`${method}: call`, async (t) => { - const { server } = baseSetup() +describe(method, () => { + it('call', async () => { + const { server } = baseSetup() - const req = params(method, []) - const expectRes = (res: any) => { - const { result } = res.body - const { version } = require('../../../package.json') - const expectedClientTitle = 'EthereumJS' - const expectedPackageVersion = version - const expectedPlatform = platform() - const expectedNodeVersion = `node${process.version.substring(1)}` + const req = params(method, []) + const expectRes = (res: any) => { + const { result } = res.body + const { version } = require('../../../package.json') + const expectedClientTitle = 'EthereumJS' + const expectedPackageVersion = version + const expectedPlatform = platform() + const expectedNodeVersion = `node${process.version.substring(1)}` - let msg = 'result string should not be empty' - t.notEqual(result.length, 0, msg) - const [actualClientTitle, actualPackageVersion, actualPlatform, actualNodeVersion] = - result.split('/') - msg = 'client title should be correct' - t.equal(actualClientTitle, expectedClientTitle, msg) - msg = 'package version should be correct' - t.equal(actualPackageVersion, expectedPackageVersion, msg) - msg = 'platform should be correct' - t.equal(actualPlatform, expectedPlatform, msg) - msg = 'Node.js version should be correct' - t.equal(actualNodeVersion, expectedNodeVersion, msg) - } - await baseRequest(t, server, req, 200, expectRes) + let msg = 'result string should not be empty' + assert.notEqual(result.length, 0, msg) + const [actualClientTitle, actualPackageVersion, actualPlatform, actualNodeVersion] = + result.split('/') + msg = 'client title should be correct' + assert.equal(actualClientTitle, expectedClientTitle, msg) + msg = 'package version should be correct' + assert.equal(actualPackageVersion, expectedPackageVersion, msg) + msg = 'platform should be correct' + assert.equal(actualPlatform, expectedPlatform, msg) + msg = 'Node.js version should be correct' + assert.equal(actualNodeVersion, expectedNodeVersion, msg) + } + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/web3/sha3.spec.ts b/packages/client/test/rpc/web3/sha3.spec.ts index 6273aebeca..45b41902bd 100644 --- a/packages/client/test/rpc/web3/sha3.spec.ts +++ b/packages/client/test/rpc/web3/sha3.spec.ts @@ -1,60 +1,66 @@ -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { baseRequest, baseSetup, params } from '../helpers' const method = 'web3_sha3' -function compareErrorCode(t: any, error: any, errorCode: any) { +function compareErrorCode(error: any, errorCode: any) { const msg = `should return the correct error code (expected: ${errorCode}, received: ${error.code})` - t.equal(error.code, errorCode, msg) + assert.equal(error.code, errorCode, msg) } -function compareErrorMsg(t: any, error: any, errorMsg: any) { +function compareErrorMsg(error: any, errorMsg: any) { const msg = `should return "${errorMsg}" error message` - t.equal(error.message, errorMsg, msg) + assert.equal(error.message, errorMsg, msg) } -tape(`${method}: call with one valid parameter`, async (t) => { - const { server } = baseSetup() +describe(method, () => { + it('call with one valid parameter', async () => { + const { server } = baseSetup() - const req = params(method, ['0x68656c6c6f20776f726c64']) - const expectRes = (res: any) => { - const { result } = res.body - let msg = 'result string should not be empty' - t.notEqual(result.length, 0, msg) + const req = params(method, ['0x68656c6c6f20776f726c64']) + const expectRes = (res: any) => { + const { result } = res.body + let msg = 'result string should not be empty' + assert.notEqual(result.length, 0, msg) - msg = 'should return the correct hash value' - t.equal(result, '0x47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad', msg) - } - await baseRequest(t, server, req, 200, expectRes) -}) + msg = 'should return the correct hash value' + assert.equal( + result, + '0x47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad', + msg + ) + } + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with one non-hex parameter`, async (t) => { - const { server } = baseSetup() + it('call with one non-hex parameter', async () => { + const { server } = baseSetup() - const req = params(method, ['hello world']) - const expectRes = (res: any) => { - const { error } = res.body + const req = params(method, ['hello world']) + const expectRes = (res: any) => { + const { error } = res.body - compareErrorCode(t, error, -32602) + compareErrorCode(error, -32602) - const errorMsg = 'invalid argument 0: hex string without 0x prefix' - compareErrorMsg(t, error, errorMsg) - } - await baseRequest(t, server, req, 200, expectRes) -}) + const errorMsg = 'invalid argument 0: hex string without 0x prefix' + compareErrorMsg(error, errorMsg) + } + await baseRequest(server, req, 200, expectRes) + }) -tape(`${method}: call with no parameters`, async (t) => { - const { server } = baseSetup() + it('call with no parameters', async () => { + const { server } = baseSetup() - const req = params(method, []) - const expectRes = (res: any) => { - const { error } = res.body + const req = params(method, []) + const expectRes = (res: any) => { + const { error } = res.body - compareErrorCode(t, error, -32602) + compareErrorCode(error, -32602) - const errorMsg = 'missing value for required argument 0' - compareErrorMsg(t, error, errorMsg) - } - await baseRequest(t, server, req, 200, expectRes) + const errorMsg = 'missing value for required argument 0' + compareErrorMsg(error, errorMsg) + } + await baseRequest(server, req, 200, expectRes) + }) }) diff --git a/packages/client/test/rpc/websocket.spec.ts b/packages/client/test/rpc/websocket.spec.ts index a88fe61f45..293fe1897c 100644 --- a/packages/client/test/rpc/websocket.spec.ts +++ b/packages/client/test/rpc/websocket.spec.ts @@ -1,6 +1,6 @@ import { randomBytes } from '@ethereumjs/util' import { encode } from 'jwt-simple' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { METHOD_NOT_FOUND } from '../../src/rpc/error-code' @@ -11,76 +11,77 @@ import type { TAlgorithm } from 'jwt-simple' const request = require('superwstest') const jwtSecret = randomBytes(32) -const wsPort = 3000 -tape('call JSON-RPC auth protected server with valid token', (t) => { - const server = startRPC({}, { wsServer: true }, { jwtSecret }) - const claims = { iat: Math.floor(new Date().getTime() / 1000) } - const token = encode(claims, jwtSecret as never as string, 'HS256' as TAlgorithm) - const req = { - jsonrpc: '2.0', - method: 'METHOD_DOES_NOT_EXIST', - params: ['0x1', true], - id: 1, - } +describe('JSON-RPC call', () => { + it('auth protected server with valid token', () => { + const server = startRPC({}, { wsServer: true }, { jwtSecret }) + const claims = { iat: Math.floor(new Date().getTime() / 1000) } + const token = encode(claims, jwtSecret as never as string, 'HS256' as TAlgorithm) + const req = { + jsonrpc: '2.0', + method: 'METHOD_DOES_NOT_EXIST', + params: ['0x1', true], + id: 1, + } - const testFn = async () => { - try { - await request(server) - .ws('/') - .set('Authorization', `Bearer ${token}`) - .sendJson(req) - .expectJson((res: any) => res.error.code === METHOD_NOT_FOUND) - .close() - .expectClosed() - t.end() - } catch (err) { - t.end(err) - } finally { - closeRPC(server) + const testFn = async () => { + try { + await request(server) + .ws('/') + .set('Authorization', `Bearer ${token}`) + .sendJson(req) + .expectJson((res: any) => res.error.code === METHOD_NOT_FOUND) + .close() + .expectClosed() + assert.ok(true) + } catch (err) { + assert.notOk(err) + } finally { + closeRPC(server) + } } - } - server.listen(wsPort, 'localhost', testFn) -}) + server.listen(0, 'localhost', testFn) + }) -tape('call JSON-RPC auth protected server without any auth headers', (t) => { - const server = startRPC({}, { wsServer: true }, { jwtSecret }) - const testFn = async () => { - try { - await request(server).ws('/').expectConnectionError(401) - t.end() - } catch (err) { - t.end(err) - } finally { - closeRPC(server) + it('auth protected server without any auth headers', () => { + const server = startRPC({}, { wsServer: true }, { jwtSecret }) + const testFn = async () => { + try { + await request(server).ws('/').expectConnectionError(401) + assert.ok(true) + } catch (err) { + assert.notOk(err) + } finally { + closeRPC(server) + } } - } - server.listen(wsPort, 'localhost', testFn) -}) + server.listen(0, 'localhost', testFn) + }) -tape('call JSON-RPC server without any auth headers', (t) => { - const server = startRPC({}, { wsServer: true }) - const req = { - jsonrpc: '2.0', - method: 'METHOD_DOES_NOT_EXIST', - params: ['0x1', true], - id: 1, - } + it('server without any auth headers', () => { + const server = startRPC({}, { wsServer: true }) + const req = { + jsonrpc: '2.0', + method: 'METHOD_DOES_NOT_EXIST', + params: ['0x1', true], + id: 1, + } - const testFn = async () => { - try { - await request(server) - .ws('/') - .sendJson(req) - .expectJson((res: any) => res.error.code === METHOD_NOT_FOUND) - .close() - .expectClosed() - t.end() - } catch (err) { - t.end(err) - } finally { - closeRPC(server) + const testFn = async () => { + try { + await request(server) + .ws('/') + .sendJson(req) + .expectJson((res: any) => res.error.code === METHOD_NOT_FOUND) + .close() + .expectClosed() + assert.ok(true) + } catch (err) { + assert.notOk(err) + } finally { + closeRPC(server) + } } - } - server.listen(wsPort, 'localhost', testFn) + server.listen(0, 'localhost', testFn) + }) }) diff --git a/packages/client/test/service/fullethereumservice.spec.ts b/packages/client/test/service/fullethereumservice.spec.ts index 3adc2c811d..aa3d3d09f7 100644 --- a/packages/client/test/service/fullethereumservice.spec.ts +++ b/packages/client/test/service/fullethereumservice.spec.ts @@ -1,136 +1,133 @@ import { Common, Hardfork } from '@ethereumjs/common' import { TransactionFactory, TransactionType } from '@ethereumjs/tx' import { equalsBytes, hexToBytes, randomBytes } from '@ethereumjs/util' -import * as tape from 'tape' -import * as td from 'testdouble' +import { assert, describe, expect, it, vi } from 'vitest' import { Chain } from '../../src/blockchain' import { Config } from '../../src/config' +import { RlpxServer } from '../../src/net/server' import { Event } from '../../src/types' -import * as genesisJSON from '../testdata/geth-genesis/post-merge.json' +import genesisJSON from '../testdata/geth-genesis/post-merge.json' +import type { BeaconSynchronizer } from '../../src/sync' import type { Log } from '@ethereumjs/evm' -tape('[FullEthereumService]', async (t) => { - class PeerPool { - open() {} - close() {} - start() {} - stop() {} - } - PeerPool.prototype.open = td.func() - PeerPool.prototype.close = td.func() - PeerPool.prototype.start = td.func() - PeerPool.prototype.stop = td.func() - td.replace('../../src/net/peerpool', { PeerPool }) - const MockChain = td.constructor([] as any) - MockChain.prototype.open = td.func() - td.replace('../../src/blockchain', { Chain: MockChain }) - const EthProtocol = td.constructor([] as any) - const LesProtocol = td.constructor([] as any) - td.replace('../../src/net/protocol/ethprotocol', { EthProtocol }) - td.replace('../../src/net/protocol/lesprotocol', { LesProtocol }) - class FullSynchronizer { - start() {} - stop() {} - open() {} - close() {} - handleNewBlock() {} - handleNewBlockHashes() {} - } - FullSynchronizer.prototype.start = td.func() - FullSynchronizer.prototype.stop = td.func() - FullSynchronizer.prototype.open = td.func() - FullSynchronizer.prototype.close = td.func() - FullSynchronizer.prototype.handleNewBlock = td.func() - FullSynchronizer.prototype.handleNewBlockHashes = td.func() - class BeaconSynchronizer { - start() {} - stop() {} - open() {} - close() {} - } - BeaconSynchronizer.prototype.start = td.func() - BeaconSynchronizer.prototype.stop = td.func() - BeaconSynchronizer.prototype.open = td.func() - BeaconSynchronizer.prototype.close = td.func() - td.replace('../../src/sync', { FullSynchronizer, BeaconSynchronizer }) - - class Block { - static fromValuesArray() { - return {} +describe('[FullEthereumService]', async () => { + vi.mock('../../src/net/peerpool', () => { + const PeerPool = vi.fn() + PeerPool.prototype.open = vi.fn() + PeerPool.prototype.close = vi.fn() + PeerPool.prototype.start = vi.fn() + PeerPool.prototype.stop = vi.fn() + return { PeerPool } + }) + + vi.mock('../../src/net/protocol/ethprotocol', () => { + const EthProtocol = vi.fn() + EthProtocol.prototype.name = 'eth' + return { EthProtocol } + }) + + vi.mock('../../src/net/protocol/lesprotocol', () => { + const LesProtocol = vi.fn() + LesProtocol.prototype.name = 'les' + return { LesProtocol } + }) + + vi.mock('../../src/sync/fullsync', () => { + const FullSynchronizer = vi.fn() + FullSynchronizer.prototype.start = vi.fn() + FullSynchronizer.prototype.stop = vi.fn() + FullSynchronizer.prototype.open = vi.fn() + FullSynchronizer.prototype.close = vi.fn() + FullSynchronizer.prototype.handleNewBlock = vi.fn() + FullSynchronizer.prototype.handleNewBlockHashes = vi.fn() + FullSynchronizer.prototype.type = 'full' + + return { FullSynchronizer } + }) + vi.mock('../../src/sync/beaconsync', () => { + const BeaconSynchronizer = vi.fn() + BeaconSynchronizer.prototype.start = vi.fn() + BeaconSynchronizer.prototype.stop = vi.fn() + BeaconSynchronizer.prototype.open = vi.fn() + BeaconSynchronizer.prototype.close = vi.fn() + BeaconSynchronizer.prototype.type = 'beacon' + return { + BeaconSynchronizer, } - } - td.replace('@ethereumjs/block', { Block }) + }) + + vi.mock('@ethereumjs/block') + vi.mock('../../src/net/server') + vi.mock('../../src/execution') const { FullEthereumService } = await import('../../src/service/fullethereumservice') - t.test('should initialize correctly', async (t) => { + it('should initialize correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const service = new FullEthereumService({ config, chain }) - t.ok(service.synchronizer instanceof FullSynchronizer, 'full mode') - t.equals(service.name, 'eth', 'got name') - t.end() + + assert.equal('full', service.synchronizer?.type, 'full mode') + assert.equal(service.name, 'eth', 'got name') }) - t.test('should get protocols', async (t) => { + it('should get protocols', async () => { let config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) let service = new FullEthereumService({ config, chain }) - t.ok(service.protocols.filter((p) => p instanceof EthProtocol).length > 0, 'full protocol') - t.notOk( - service.protocols.filter((p) => p instanceof LesProtocol).length > 0, - 'no light protocol' - ) + assert.ok(service.protocols.filter((p) => p.name === 'eth').length > 0, 'full protocol') + assert.notOk(service.protocols.filter((p) => p.name === 'les').length > 0, 'no light protocol') config = new Config({ transports: [], lightserv: true }) service = new FullEthereumService({ config, chain }) - t.ok(service.protocols.filter((p) => p instanceof EthProtocol).length > 0, 'full protocol') - t.ok( - service.protocols.filter((p) => p instanceof LesProtocol).length > 0, - 'lightserv protocols' - ) - t.end() + assert.ok(service.protocols.filter((p) => p.name === 'eth').length > 0, 'full protocol') + assert.ok(service.protocols.filter((p) => p.name === 'les').length > 0, 'lightserv protocols') }) - t.test('should open', async (t) => { - t.plan(3) - const server = td.object() as any + it('should open', async () => { + const server = new RlpxServer({} as any) const config = new Config({ servers: [server], accountCache: 10000, storageCache: 1000 }) + const chain = await Chain.create({ config }) + chain.open = vi.fn() const service = new FullEthereumService({ config, chain }) + await service.open() - td.verify(service.synchronizer!.open()) - td.verify(server.addProtocols(td.matchers.anything())) - service.config.events.on(Event.SYNC_SYNCHRONIZED, () => t.pass('synchronized')) + expect(service.synchronizer!.open).toBeCalled() + expect(server.addProtocols).toBeCalled() + service.config.events.on(Event.SYNC_SYNCHRONIZED, () => assert.ok(true, 'synchronized')) service.config.events.on(Event.SYNC_ERROR, (err) => { - if (err.message === 'error0') t.pass('got error 1') + if (err.message === 'error0') assert.ok(true, 'got error 1') }) service.config.events.emit(Event.SYNC_SYNCHRONIZED, BigInt(0)) service.config.events.emit(Event.SYNC_ERROR, new Error('error0')) service.config.events.on(Event.SERVER_ERROR, (err) => { - if (err.message === 'error1') t.pass('got error 2') + if (err.message === 'error1') assert.ok(true, 'got error 2') }) service.config.events.emit(Event.SERVER_ERROR, new Error('error1'), server) await service.close() }) - t.test('should start/stop', async (t) => { - const server = td.object() as any + it('should start/stop', async () => { + const server = new RlpxServer({} as any) const config = new Config({ servers: [server], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) + const service = new FullEthereumService({ config, chain }) await service.start() - td.verify(service.synchronizer!.start()) - t.notOk(await service.start(), 'already started') + + expect(service.synchronizer!.start).toBeCalled() + assert.notOk(await service.start(), 'already started') await service.stop() - td.verify(service.synchronizer!.stop()) - t.notOk(await service.stop(), 'already stopped') - t.end() + expect(service.synchronizer!.stop).toBeCalled() + assert.notOk(await service.stop(), 'already stopped') }) - t.test('should correctly handle GetBlockHeaders', async (t) => { + it('should correctly handle GetBlockHeaders', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) + vi.unmock('../../src/blockchain') + await import('../../src/blockchain') const chain = await Chain.create({ config }) chain.getHeaders = () => [{ number: 1n }] as any const service = new FullEthereumService({ config, chain }) @@ -143,7 +140,7 @@ tape('[FullEthereumService]', async (t) => { { eth: { send: (title: string, msg: any) => { - t.ok( + assert.ok( title === 'BlockHeaders' && msg.headers.length === 0, 'sent empty headers when block height is too high' ) @@ -166,67 +163,53 @@ tape('[FullEthereumService]', async (t) => { { eth: { send: (title: string, msg: any) => { - t.ok( + assert.ok( title === 'BlockHeaders' && msg.headers.length === 1, 'sent 1 header when requested' ) - t.end() }, } as any, } as any ) }) - t.test( - 'should call handleNewBlock on NewBlock and handleNewBlockHashes on NewBlockHashes', - async (t) => { - const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) - const chain = await Chain.create({ config }) - const service = new FullEthereumService({ config, chain }) - await service.handle({ name: 'NewBlock', data: [{}, BigInt(1)] }, 'eth', undefined as any) - td.verify((service.synchronizer as any).handleNewBlock({}, undefined)) - await service.handle( - { name: 'NewBlockHashes', data: [{}, BigInt(1)] }, - 'eth', - undefined as any - ) - td.verify((service.synchronizer as any).handleNewBlockHashes([{}, BigInt(1)])) - // should not call when using BeaconSynchronizer - // (would error if called since handleNewBlock and handleNewBlockHashes are not available on BeaconSynchronizer) - await service.switchToBeaconSync() - t.ok(service.synchronizer instanceof BeaconSynchronizer, 'switched to BeaconSynchronizer') - t.ok(service.beaconSync, 'can access BeaconSynchronizer') - await service.handle({ name: 'NewBlock', data: [{}, BigInt(1)] }, 'eth', undefined as any) - await service.handle( - { name: 'NewBlockHashes', data: [{}, BigInt(1)] }, - 'eth', - undefined as any - ) - t.end() - } - ) + it('should call handleNewBlock on NewBlock and handleNewBlockHashes on NewBlockHashes', async () => { + const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) + const chain = await Chain.create({ config }) + const service = new FullEthereumService({ config, chain }) + await service.handle({ name: 'NewBlock', data: [{}, BigInt(1)] }, 'eth', undefined as any) + expect((service.synchronizer as any).handleNewBlock).toBeCalled() + await service.handle({ name: 'NewBlockHashes', data: [{}, BigInt(1)] }, 'eth', undefined as any) + expect((service.synchronizer as any).handleNewBlockHashes).toBeCalledWith([{}, BigInt(1)]) + // should not call when using BeaconSynchronizer + // (would error if called since handleNewBlock and handleNewBlockHashes are not available on BeaconSynchronizer) + await service.switchToBeaconSync() + assert.ok( + (service.synchronizer as BeaconSynchronizer).type === 'beacon', + 'switched to BeaconSynchronizer' + ) + assert.ok(service.beaconSync, 'can access BeaconSynchronizer') + await service.handle({ name: 'NewBlock', data: [{}, BigInt(1)] }, 'eth', undefined as any) + await service.handle({ name: 'NewBlockHashes', data: [{}, BigInt(1)] }, 'eth', undefined as any) + }) - t.test('should ban peer for sending NewBlock/NewBlockHashes after merge', async (t) => { - t.plan(2) + it('should ban peer for sending NewBlock/NewBlockHashes after merge', async () => { const common = new Common({ chain: 'mainnet', hardfork: Hardfork.Paris }) const config = new Config({ common, transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const service = new FullEthereumService({ config, chain }) service.pool.ban = () => { - t.pass('banned peer when NewBlock/NewBlockHashes announced after Merge') + assert.ok(true, 'banned peer when NewBlock/NewBlockHashes announced after Merge') } await service.handle({ name: 'NewBlock', data: [{}, BigInt(1)] }, 'eth', { id: 1 } as any) await service.handle({ name: 'NewBlockHashes', data: [] }, 'eth', { id: 1 } as any) }) - t.test('should send Receipts on GetReceipts', async (t) => { + it('should send Receipts on GetReceipts', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const service = new FullEthereumService({ config, chain }) - service.execution = { - receiptsManager: { getReceipts: td.func() }, - } as any const blockHash = new Uint8Array(32).fill(1) const receipts = [ { @@ -256,26 +239,25 @@ tape('[FullEthereumService]', async (t) => { txType: TransactionType.Legacy, }, ] - td.when(service.execution.receiptsManager!.getReceipts(blockHash, true, true)).thenResolve( - receipts - ) - const peer = { eth: { send: td.func() } } as any + service.execution = { + receiptsManager: { getReceipts: vi.fn().mockReturnValue(receipts) }, + } as any + + const peer = { eth: { send: vi.fn() } } as any await service.handle({ name: 'GetReceipts', data: [BigInt(1), [blockHash]] }, 'eth', peer) - td.verify(peer.eth.send('Receipts', { reqId: BigInt(1), receipts })) - t.end() + expect(peer.eth.send).toBeCalledWith('Receipts', { reqId: BigInt(1), receipts }) }) - t.test('should handle Transactions', async (st) => { + it('should handle Transactions', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const service = new FullEthereumService({ config, chain }) service.txPool.handleAnnouncedTxs = async (msg, _peer, _pool) => { - st.deepEqual( + assert.deepEqual( msg[0], TransactionFactory.fromTxData({ type: 2 }), 'handled Transactions message' ) - st.end() } await service.handle( @@ -288,13 +270,12 @@ tape('[FullEthereumService]', async (t) => { ) }) - t.test('should handle NewPooledTransactionHashes', async (st) => { + it('should handle NewPooledTransactionHashes', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const service = new FullEthereumService({ config, chain }) service.txPool.handleAnnouncedTxHashes = async (msg, _peer, _pool) => { - st.deepEqual(msg[0], hexToBytes('0xabcd'), 'handled NewPooledTransactionhashes') - st.end() + assert.deepEqual(msg[0], hexToBytes('0xabcd'), 'handled NewPooledTransactionhashes') } await service.handle( @@ -311,7 +292,7 @@ tape('[FullEthereumService]', async (t) => { ) }) - t.test('should handle GetPooledTransactions', async (st) => { + it('should handle GetPooledTransactions', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const service = new FullEthereumService({ config, chain }) @@ -326,85 +307,71 @@ tape('[FullEthereumService]', async (t) => { { eth: { send: (_: string, data: any): any => { - st.ok(equalsBytes(data.txs[0].hash(), tx.hash()), 'handled getPooledTransactions') - st.end() + assert.ok(equalsBytes(data.txs[0].hash(), tx.hash()), 'handled getPooledTransactions') }, } as any, } as any ) }) - t.test( - 'should handle decoding NewPooledTransactionHashes with eth/68 message format', - async (st) => { - const txHash = randomBytes(32) - - const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) - const chain = await Chain.create({ config }) - const service = new FullEthereumService({ config, chain }) - ;(service.txPool as any).validate = () => {} - ;(service.txPool as any).handleAnnouncedTxHashes = ( - hashes: Uint8Array[], - _peer: any, - _pool: any - ) => { - st.deepEqual(hashes[0], txHash, 'should get correct tx hash from eth68 message') - st.end() - } - - await service.handle( - { name: 'NewPooledTransactionHashes', data: [[1], [100], [txHash]] }, - 'eth', + it('should handle decoding NewPooledTransactionHashes with eth/68 message format', async () => { + const txHash = randomBytes(32) + + const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) + const chain = await Chain.create({ config }) + const service = new FullEthereumService({ config, chain }) + ;(service.txPool as any).validate = () => {} + ;(service.txPool as any).handleAnnouncedTxHashes = ( + hashes: Uint8Array[], + _peer: any, + _pool: any + ) => { + assert.deepEqual(hashes[0], txHash, 'should get correct tx hash from eth68 message') + } + + await service.handle( + { name: 'NewPooledTransactionHashes', data: [[1], [100], [txHash]] }, + 'eth', + { + eth: { + versions: [67, 68], + }, + } as any + ) + }) + + it('should handle structuring NewPooledTransactionHashes with eth/68 message format', async () => { + const txHash = randomBytes(32) + + const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) + const chain = await Chain.create({ config }) + const service = new FullEthereumService({ config, chain }) + ;(service.txPool as any).validate = () => {} + + await service.txPool.sendNewTxHashes( + [[1], [100], [txHash]], + [ { eth: { versions: [67, 68], - }, - } as any - ) - } - ) - - t.test( - 'should handle structuring NewPooledTransactionHashes with eth/68 message format', - async (st) => { - const txHash = randomBytes(32) - - const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) - const chain = await Chain.create({ config }) - const service = new FullEthereumService({ config, chain }) - ;(service.txPool as any).validate = () => {} - - await service.txPool.sendNewTxHashes( - [[1], [100], [txHash]], - [ - { - eth: { - versions: [67, 68], - request: (_: string, data: any): any => { - st.ok(equalsBytes(data[0][2], txHash), 'handled getPooledTransactions') - st.end() - }, + request: (_: string, data: any): any => { + assert.ok(equalsBytes(data[0][2], txHash), 'handled getPooledTransactions') }, - } as any, - ] - ) - } - ) + }, + } as any, + ] + ) + }) - t.test('should start on beacon sync when past merge', async (t) => { + it('should start on beacon sync when past merge', async () => { const common = Common.fromGethGenesis(genesisJSON, { chain: 'post-merge' }) common.setHardforkBy({ blockNumber: BigInt(0), td: BigInt(0) }) const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000, common }) const chain = await Chain.create({ config }) let service = new FullEthereumService({ config, chain }) - t.ok(service.beaconSync, 'beacon sync should be available') + assert.ok(service.beaconSync, 'beacon sync should be available') const configDisableBeaconSync = new Config({ transports: [], common, disableBeaconSync: true }) service = new FullEthereumService({ config: configDisableBeaconSync, chain }) - t.notOk(service.beaconSync, 'beacon sync should not be available') - }) - - t.test('should reset td', (t) => { - td.reset() - t.end() + assert.notOk(service.beaconSync, 'beacon sync should not be available') }) }) diff --git a/packages/client/test/service/lightethereumservice.spec.ts b/packages/client/test/service/lightethereumservice.spec.ts index 4c22394719..4d86bd5e85 100644 --- a/packages/client/test/service/lightethereumservice.spec.ts +++ b/packages/client/test/service/lightethereumservice.spec.ts @@ -1,92 +1,81 @@ -import * as tape from 'tape' -import * as td from 'testdouble' +import { assert, describe, expect, it, vi } from 'vitest' -import { Chain } from '../../src/blockchain' +import { Chain } from '../../src/blockchain/chain' import { Config } from '../../src/config' +import { LesProtocol } from '../../src/net/protocol' +import { RlpxServer } from '../../src/net/server' +import { LightSynchronizer } from '../../src/sync/lightsync' import { Event } from '../../src/types' +describe('[LightEthereumService]', async () => { + vi.mock('../../src/net/peerpool') -tape('[LightEthereumService]', async (t) => { - class PeerPool { - open() {} - close() {} - } - PeerPool.prototype.open = td.func() - PeerPool.prototype.close = td.func() - td.replace('../../src/net/peerpool', { PeerPool }) - const MockChain = td.constructor([] as any) - MockChain.prototype.open = td.func() - td.replace('../../src/blockchain', { MockChain }) - const LesProtocol = td.constructor([] as any) - td.replace('../../src/net/protocol/lesprotocol', { LesProtocol }) - class LightSynchronizer { - start() {} - stop() {} - open() {} - close() {} - } - LightSynchronizer.prototype.start = td.func() - LightSynchronizer.prototype.stop = td.func() - LightSynchronizer.prototype.open = td.func() - LightSynchronizer.prototype.close = td.func() - td.replace('../../src/sync/lightsync', { LightSynchronizer }) + vi.mock('../../src/net/server') + vi.mock('../../src/blockchain', () => { + const Chain = vi.fn() + Chain.prototype.open = vi.fn() + return { Chain } + }) + vi.mock('../../src/net/protocol/lesprotocol', () => { + const LesProtocol = vi.fn() + return { LesProtocol } + }) + vi.mock('../../src/sync/lightsync', () => { + const LightSynchronizer = vi.fn() + LightSynchronizer.prototype.start = vi.fn() + LightSynchronizer.prototype.stop = vi.fn() + LightSynchronizer.prototype.open = vi.fn() + LightSynchronizer.prototype.close = vi.fn() + return { LightSynchronizer } + }) const { LightEthereumService } = await import('../../src/service/lightethereumservice') - t.test('should initialize correctly', async (t) => { + it('should initialize correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const service = new LightEthereumService({ config, chain }) - t.ok(service.synchronizer instanceof LightSynchronizer, 'light sync') - t.equals(service.name, 'eth', 'got name') - t.end() + assert.ok(service.synchronizer instanceof LightSynchronizer, 'light sync') + assert.equal(service.name, 'eth', 'got name') }) - t.test('should get protocols', async (t) => { + it('should get protocols', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const service = new LightEthereumService({ config, chain }) - t.ok(service.protocols[0] instanceof LesProtocol, 'light protocols') - t.end() + assert.ok(service.protocols[0] instanceof LesProtocol, 'light protocols') }) - t.test('should open', async (t) => { - t.plan(3) - const server = td.object() as any + it('should open', async () => { + const server = new RlpxServer({} as any) const config = new Config({ servers: [server], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const service = new LightEthereumService({ config, chain }) await service.open() - td.verify(service.synchronizer.open()) - td.verify(server.addProtocols(td.matchers.anything())) - service.config.events.on(Event.SYNC_SYNCHRONIZED, () => t.pass('synchronized')) + expect(service.synchronizer.open).toHaveBeenCalled() + expect(server.addProtocols).toBeCalled() + service.config.events.on(Event.SYNC_SYNCHRONIZED, () => assert.ok(true, 'synchronized')) service.config.events.on(Event.SYNC_ERROR, (err: Error) => { - if (err.message === 'error0') t.pass('got error 1') + if (err.message === 'error0') assert.ok(true, 'got error 1') }) service.config.events.emit(Event.SYNC_SYNCHRONIZED, BigInt(0)) service.config.events.emit(Event.SYNC_ERROR, new Error('error0')) service.config.events.on(Event.SERVER_ERROR, (err: Error) => { - if (err.message === 'error1') t.pass('got error 2') + if (err.message === 'error1') assert.ok(true, 'got error 2') }) service.config.events.emit(Event.SERVER_ERROR, new Error('error1'), server) await service.close() }) - t.test('should start/stop', async (t) => { - const server = td.object() as any + it('should start/stop', async () => { + const server = new RlpxServer({} as any) const config = new Config({ servers: [server], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const service = new LightEthereumService({ config, chain }) await service.start() - td.verify(service.synchronizer.start()) - t.notOk(await service.start(), 'already started') + expect(service.synchronizer.start).toBeCalled() + assert.notOk(await service.start(), 'already started') await service.stop() - td.verify(service.synchronizer.stop()) - t.notOk(await service.stop(), 'already stopped') - t.end() - }) - - t.test('should reset td', (t) => { - td.reset() - t.end() + expect(service.synchronizer.stop).toBeCalled() + assert.notOk(await service.stop(), 'already stopped') }) }) diff --git a/packages/client/test/sim/4844devnet5.spec.ts b/packages/client/test/sim/4844devnet5.spec.ts index 766403c82d..d6da94fe1e 100644 --- a/packages/client/test/sim/4844devnet5.spec.ts +++ b/packages/client/test/sim/4844devnet5.spec.ts @@ -2,7 +2,7 @@ import { Common } from '@ethereumjs/common' import { bytesToHex, hexToBytes, privateToAddress } from '@ethereumjs/util' import { Client } from 'jayson/promise' import { randomBytes } from 'node:crypto' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { createBlobTxs, @@ -42,31 +42,31 @@ export async function runTx(data: string, to?: string, value?: bigint) { return runTxHelper({ client, common, sender, pkey }, data, to, value) } -tape(`running txes on ${rpcUrl}`, async (t) => { +describe(`running txes on ${rpcUrl}`, async () => { const { teardownCallBack, result } = await startNetwork(network, client, { filterKeywords, filterOutWords, externalRun: 'true', }) - t.pass(`connected to client ${result}`) + assert.ok(true, `connected to client ${result}`) console.log(`Checking for network running...`) try { await waitForELStart(client) - t.pass(`${result} confirmed running`) + assert.ok(true, `${result} confirmed running`) } catch (e) { - t.fail(`failed to confirm ${result} running`) + assert.fail(`failed to confirm ${result} running`) throw e } - t.test('run blob transactions', async (st) => { + it('run blob transactions', async () => { const nonceFetch = await client.request( 'eth_getTransactionCount', [sender.toString(), 'latest'], 2.0 ) const nonce = Number(nonceFetch.result) - st.pass(`fetched ${sender}'s nonce=${nonce} for blob txs`) + assert.ok(true, `fetched ${sender}'s nonce=${nonce} for blob txs`) const txns = await createBlobTxs( numTxs - 1, @@ -88,24 +88,21 @@ tape(`running txes on ${rpcUrl}`, async (t) => { const res = await client.request('eth_sendRawTransaction', [txn], 2.0) if (res.result === undefined) { console.log('eth_sendRawTransaction returned invalid response', res) - st.fail(`Unable to post all txs`) + assert.fail(`Unable to post all txs`) break } - st.pass(`posted tx with hash=${res.result}`) + assert.ok(true, `posted tx with hash=${res.result}`) txHashes.push(res.result) } - st.pass(`posted txs=${txHashes.length}`) + assert.ok(true, `posted txs=${txHashes.length}`) }) - t.test('cleanup', async (st) => { + it('cleanup', async () => { try { await teardownCallBack() - st.pass('script terminated') + assert.ok(true, 'script terminated') } catch (e) { - st.fail('could not terminate properly') + assert.fail('could not terminate properly') } - st.end() }) - - t.end() }) diff --git a/packages/client/test/sim/eof.spec.ts b/packages/client/test/sim/eof.spec.ts index a676608de4..5d2b5f1086 100644 --- a/packages/client/test/sim/eof.spec.ts +++ b/packages/client/test/sim/eof.spec.ts @@ -1,7 +1,7 @@ import { Common } from '@ethereumjs/common' import { bytesToHex, hexToBytes, privateToAddress } from '@ethereumjs/util' import { Client } from 'jayson/promise' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { filterKeywords, @@ -23,7 +23,7 @@ export async function runTx(data: string, to?: string, value?: bigint) { return runTxHelper({ client, common, sender, pkey }, data, to, value) } -tape('EOF ephemeral hardfork tests', async (t) => { +describe('EOF ephemeral hardfork tests', async () => { const { teardownCallBack, result } = await startNetwork(network, client, { filterKeywords, filterOutWords, @@ -31,79 +31,78 @@ tape('EOF ephemeral hardfork tests', async (t) => { }) if (result.includes('EthereumJS')) { - t.pass('connected to client') + assert.ok(true, 'connected to client') } else { - t.fail('connected to wrong client') + assert.fail('connected to wrong client') } console.log(`Waiting for network to start...`) try { await waitForELStart(client) - t.pass('ethereumjs<>lodestar started successfully') + assert.ok(true, 'ethereumjs<>lodestar started successfully') } catch (e) { - t.fail('ethereumjs<>lodestar failed to start') + assert.fail('ethereumjs<>lodestar failed to start') throw e } // ------------Sanity checks-------------------------------- - t.test('Simple transfer - sanity check', async (st) => { + it('Simple transfer - sanity check', async () => { await runTx('', '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 1000000n) let balance = await client.request('eth_getBalance', [ '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 'latest', ]) - st.equal(BigInt(balance.result), 1000000n, 'sent a simple ETH transfer') + assert.equal(BigInt(balance.result), 1000000n, 'sent a simple ETH transfer') await runTx('', '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 1000000n) balance = await client.request('eth_getBalance', [ '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 'latest', ]) - st.equal(BigInt(balance.result), 2000000n, 'sent a simple ETH transfer 2x') - st.end() + assert.equal(BigInt(balance.result), 2000000n, 'sent a simple ETH transfer 2x') }) // ------------EIP 3670 tests------------------------------- - t.test(' EIP 3670 tests', async (st) => { + it(' EIP 3670 tests', async () => { const data = '0x67EF0001010001006060005260086018F3' const res = await runTx(data) - st.ok(res.contractAddress !== undefined, 'created contract') + assert.ok(res.contractAddress !== undefined, 'created contract') const code = await client.request('eth_getCode', [res.contractAddress, 'latest']) - st.equal(code.result, '0x', 'no code was deposited for invalid EOF code') - st.end() + assert.equal(code.result, '0x', 'no code was deposited for invalid EOF code') }) // ------------EIP 3540 tests------------------------------- - t.test('EIP 3540 tests', async (st) => { + it('EIP 3540 tests', async () => { const data = '0x6B' + 'EF0001' + '01000102000100' + '00' + 'AA' + '600052600C6014F3' const res = await runTx(data) const code = await client.request('eth_getCode', [res.contractAddress, 'latest']) - st.equal(code.result, '0XEF00010100010200010000AA'.toLowerCase(), 'deposited valid EOF1 code') - st.end() + assert.equal( + code.result, + '0XEF00010100010200010000AA'.toLowerCase(), + 'deposited valid EOF1 code' + ) }) // ------------EIP 3860 tests------------------------------- - t.test('EIP 3860 tests', async (st) => { + it('EIP 3860 tests', async () => { const data = '0x7F6000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060005260206000F' const res = await runTx(data) const code = await client.request('eth_getCode', [res.contractAddress, 'latest']) - st.equal(code.result, '0x', 'no code deposited with invalid init code') - st.end() + assert.equal(code.result, '0x', 'no code deposited with invalid init code') }) // ------------EIP 3855 tests------------------------------- - t.test('EIP 3855 tests', async (st) => { + it('EIP 3855 tests', async () => { const push1res = await runTx('0x6000') const push0res = await runTx('0x5F') - st.ok( + assert.ok( BigInt(push1res.gasUsed) > BigInt(push0res.gasUsed), 'PUSH1 transaction costs higher gas than PUSH0' ) - st.end() }) // ------------EIP 3651 tests------------------------------- - t.test('EIP 3651 tests', async (st) => { + it('EIP 3651 tests', async () => { /** * Solidity code for below contract calls * @@ -132,22 +131,18 @@ tape('EOF ephemeral hardfork tests', async (t) => { '0x5caba0a40000000000000000000000004242424242424242424242424242424242424242', contractAddress ) - st.ok( + assert.ok( BigInt(readCold.gasUsed) > BigInt(readWarmCoinbase.gasUsed), 'read cold storage tx should have higher cumulative gas than than read coinbase tx' ) - st.end() }) - t.test('should reset td', async (st) => { + it('should reset td', async () => { try { await teardownCallBack() - st.pass('network cleaned') + assert.ok(true, 'network cleaned') } catch (e) { - st.fail('network not cleaned properly') + assert.fail('network not cleaned properly') } - st.end() }) - - t.end() }) diff --git a/packages/client/test/sim/mainnet.spec.ts b/packages/client/test/sim/mainnet.spec.ts index 1c4198970c..90f00ee532 100644 --- a/packages/client/test/sim/mainnet.spec.ts +++ b/packages/client/test/sim/mainnet.spec.ts @@ -1,7 +1,7 @@ import { Common } from '@ethereumjs/common' import { bytesToHex, hexToBytes, privateToAddress } from '@ethereumjs/util' import { Client } from 'jayson/promise' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { filterKeywords, @@ -24,7 +24,7 @@ export async function runTx(data: string, to?: string, value?: bigint) { return runTxHelper({ client, common, sender, pkey }, data, to, value) } -tape('simple mainnet test run', async (t) => { +describe('simple mainnet test run', async () => { const { teardownCallBack, result } = await startNetwork(network, client, { filterKeywords, filterOutWords, @@ -32,29 +32,29 @@ tape('simple mainnet test run', async (t) => { }) if (result.includes('EthereumJS')) { - t.pass('connected to client') + assert.ok(true, 'connected to client') } else { - t.fail('connected to wrong client') + assert.fail('connected to wrong client') } console.log(`Waiting for network to start...`) try { await waitForELStart(client) - t.pass('ethereumjs<>lodestar started successfully') + assert.ok(true, 'ethereumjs<>lodestar started successfully') } catch (e) { - t.fail('ethereumjs<>lodestar failed to start') + assert.fail('ethereumjs<>lodestar failed to start') throw e } const blockHashes: string[] = [] // ------------Sanity checks-------------------------------- - t.test('Simple transfer - sanity check', async (st) => { + it('Simple transfer - sanity check', async () => { await runTx('', '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 1000000n) let balance = await client.request('eth_getBalance', [ '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 'latest', ]) - st.equal(BigInt(balance.result), 1000000n, 'sent a simple ETH transfer') + assert.equal(BigInt(balance.result), 1000000n, 'sent a simple ETH transfer') await runTx('', '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 1000000n) balance = await client.request('eth_getBalance', [ '0x3dA33B9A0894b908DdBb00d96399e506515A1009', @@ -64,13 +64,12 @@ tape('simple mainnet test run', async (t) => { '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 'latest', ]) - st.equal(BigInt(balance.result), 2000000n, 'sent a simple ETH transfer 2x') + assert.equal(BigInt(balance.result), 2000000n, 'sent a simple ETH transfer 2x') const latestBlock = await client.request('eth_getBlockByNumber', ['latest', false]) blockHashes.push(latestBlock.result.hash) - st.end() }) - t.test('Validate execution hashes present in beacon headers', async (st) => { + it('Validate execution hashes present in beacon headers', async () => { const eth2res = await (await fetch('http://127.0.0.1:9596/eth/v1/beacon/headers')).json() await validateBlockHashesInclusionInBeacon( 'http://127.0.0.1:9596', @@ -78,18 +77,14 @@ tape('simple mainnet test run', async (t) => { parseInt(eth2res.data[0].header.message.slot), blockHashes ) - st.end() }) - t.test('should reset td', async (st) => { + it('should reset td', async () => { try { await teardownCallBack() - st.pass('network cleaned') + assert.ok(true, 'network cleaned') } catch (e) { - st.fail('network not cleaned properly') + assert.fail('network not cleaned properly') } - st.end() }) - - t.end() }) diff --git a/packages/client/test/sim/sharding.spec.ts b/packages/client/test/sim/sharding.spec.ts index b214d2f348..06f3c7a9de 100644 --- a/packages/client/test/sim/sharding.spec.ts +++ b/packages/client/test/sim/sharding.spec.ts @@ -3,7 +3,7 @@ import { TransactionFactory } from '@ethereumjs/tx' import { bytesToHex, hexToBytes, privateToAddress } from '@ethereumjs/util' import { Client } from 'jayson/promise' import { randomBytes } from 'node:crypto' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { createBlobTxs, @@ -28,7 +28,7 @@ export async function runTx(data: string, to?: string, value?: bigint) { return runTxHelper({ client, common, sender, pkey }, data, to, value) } -tape('sharding/eip4844 hardfork tests', async (t) => { +describe('sharding/eip4844 hardfork tests', async () => { if (process.env.EXTRA_CL_PARAMS === undefined) { process.env.EXTRA_CL_PARAMS = '--params.CAPELLA_FORK_EPOCH 0 --params.DENEB_FORK_EPOCH 0' } @@ -40,21 +40,21 @@ tape('sharding/eip4844 hardfork tests', async (t) => { }) if (result.includes('EthereumJS')) { - t.pass('connected to client') + assert.ok(true, 'connected to client') } else { - t.fail('connected to wrong client') + assert.fail('connected to wrong client') } console.log(`Waiting for network to start...`) try { await waitForELStart(client) - t.pass('ethereumjs<>lodestar started successfully') + assert.ok(true, 'ethereumjs<>lodestar started successfully') } catch (e) { - t.fail('ethereumjs<>lodestar failed to start') + assert.fail('ethereumjs<>lodestar failed to start') throw e } - t.test('Simple blob tx', async (st) => { + it('Simple blob tx', async () => { const txResult = await runBlobTx( client, 2 ** 14, @@ -93,15 +93,14 @@ tape('sharding/eip4844 hardfork tests', async (t) => { } } - st.equal( + assert.equal( eth2kzgs[0], bytesToHex(txResult.tx.kzgCommitments![0]), 'found expected blob commitments on CL' ) - st.end() }) - t.test('data gas fee market tests', async (st) => { + it('data gas fee market tests', async () => { const txns = await createBlobTxs( 4, 4096, @@ -137,10 +136,10 @@ tape('sharding/eip4844 hardfork tests', async (t) => { [txReceipt.result.blockHash, false], 2.0 ) - st.ok(BigInt(block1.result.excessDataGas) > 0n, 'block1 has excess data gas > 0') + assert.ok(BigInt(block1.result.excessDataGas) > 0n, 'block1 has excess data gas > 0') }) - t.test('point precompile contract test', async (st) => { + it('point precompile contract test', async () => { const nonce = await client.request( 'eth_getTransactionCount', [sender.toString(), 'latest'], @@ -183,28 +182,25 @@ tape('sharding/eip4844 hardfork tests', async (t) => { receipt = await client.request('eth_getTransactionReceipt', [txResult.result], 2.0) await sleep(1000) } - st.ok( + assert.ok( receipt.result.contractAddress !== undefined, 'successfully deployed contract that calls precompile' ) }) /* - t.test('multipeer setup', async (st) => { + it('multipeer setup', async () => { const multiPeer = Client.http({ port: 8947 }) const res = await multiPeer.request('eth_syncing', [], 2.0) console.log(res) - st.equal(res.result, 'false', 'multipeer is up and running') + assert.equal(res.result, 'false', 'multipeer is up and running') })*/ - t.test('should reset td', async (st) => { + it('should reset td', async () => { try { await teardownCallBack() - st.pass('network cleaned') + assert.ok(true, 'network cleaned') } catch (e) { - st.fail('network not cleaned properly') + assert.fail('network not cleaned properly') } - st.end() }) - - t.end() }) diff --git a/packages/client/test/sim/snapsync.spec.ts b/packages/client/test/sim/snapsync.spec.ts index 4152350cc7..af0fe5763f 100644 --- a/packages/client/test/sim/snapsync.spec.ts +++ b/packages/client/test/sim/snapsync.spec.ts @@ -2,7 +2,7 @@ import { Common } from '@ethereumjs/common' import { bytesToHex, hexToBytes, parseGethGenesisState, privateToAddress } from '@ethereumjs/util' import debug from 'debug' import { Client } from 'jayson/promise' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Config } from '../../src/config' import { getLogger } from '../../src/logging' @@ -35,7 +35,7 @@ export async function runTx(data: string, to?: string, value?: bigint) { return runTxHelper({ client, common, sender, pkey }, data, to, value) } -tape('simple mainnet test run', async (t) => { +describe('simple mainnet test run', async () => { // Better add it as a option in startnetwork process.env.NETWORKID = `${common.networkId()}` const { teardownCallBack, result } = await startNetwork(network, client, { @@ -46,65 +46,60 @@ tape('simple mainnet test run', async (t) => { }) if (result.includes('Geth')) { - t.pass('connected to Geth') + assert.ok(true, 'connected to Geth') } else { - t.fail('connected to wrong client') + assert.fail('connected to wrong client') } const nodeInfo = (await client.request('admin_nodeInfo', [])).result - t.ok(nodeInfo.enode !== undefined, 'fetched enode for peering') + assert.ok(nodeInfo.enode !== undefined, 'fetched enode for peering') console.log(`Waiting for network to start...`) try { await waitForELStart(client) - t.pass('geth<>lodestar started successfully') + assert.ok(true, 'geth<>lodestar started successfully') } catch (e) { - t.fail('geth<>lodestar failed to start') + assert.fail('geth<>lodestar failed to start') throw e } // ------------Sanity checks-------------------------------- - t.test( - 'add some EOA transfers', - { skip: process.env.ADD_EOA_STATE === undefined }, - async (st) => { - const startBalance = await client.request('eth_getBalance', [ - '0x3dA33B9A0894b908DdBb00d96399e506515A1009', - 'latest', - ]) - st.ok( - startBalance.result !== undefined, - `fetched 0x3dA33B9A0894b908DdBb00d96399e506515A1009 balance=${startBalance.result}` - ) - await runTx('', '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 1000000n) - let balance = await client.request('eth_getBalance', [ - '0x3dA33B9A0894b908DdBb00d96399e506515A1009', - 'latest', - ]) - st.equal( - BigInt(balance.result), - BigInt(startBalance.result) + 1000000n, - 'sent a simple ETH transfer' - ) - await runTx('', '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 1000000n) - balance = await client.request('eth_getBalance', [ - '0x3dA33B9A0894b908DdBb00d96399e506515A1009', - 'latest', - ]) - balance = await client.request('eth_getBalance', [ - '0x3dA33B9A0894b908DdBb00d96399e506515A1009', - 'latest', - ]) - st.equal( - BigInt(balance.result), - BigInt(startBalance.result) + 2000000n, - 'sent a simple ETH transfer 2x' - ) - st.end() - } - ) + it('add some EOA transfers', { skip: process.env.ADD_EOA_STATE === undefined }, async () => { + const startBalance = await client.request('eth_getBalance', [ + '0x3dA33B9A0894b908DdBb00d96399e506515A1009', + 'latest', + ]) + assert.ok( + startBalance.result !== undefined, + `fetched 0x3dA33B9A0894b908DdBb00d96399e506515A1009 balance=${startBalance.result}` + ) + await runTx('', '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 1000000n) + let balance = await client.request('eth_getBalance', [ + '0x3dA33B9A0894b908DdBb00d96399e506515A1009', + 'latest', + ]) + assert.equal( + BigInt(balance.result), + BigInt(startBalance.result) + 1000000n, + 'sent a simple ETH transfer' + ) + await runTx('', '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 1000000n) + balance = await client.request('eth_getBalance', [ + '0x3dA33B9A0894b908DdBb00d96399e506515A1009', + 'latest', + ]) + balance = await client.request('eth_getBalance', [ + '0x3dA33B9A0894b908DdBb00d96399e506515A1009', + 'latest', + ]) + assert.equal( + BigInt(balance.result), + BigInt(startBalance.result) + 2000000n, + 'sent a simple ETH transfer 2x' + ) + }) - t.test('setup snap sync', { skip: process.env.SNAP_SYNC === undefined }, async (st) => { + it('setup snap sync', { skip: process.env.SNAP_SYNC === undefined }, async () => { // start client inline here for snap sync, no need for beacon const { ejsInlineClient, peerConnectedPromise, snapSyncCompletedPromise } = // eslint-disable-next-line @typescript-eslint/no-use-before-define @@ -117,23 +112,22 @@ tape('simple mainnet test run', async (t) => { } ejsClient = ejsInlineClient snapCompleted = snapSyncCompletedPromise - st.ok(ejsClient !== null, 'ethereumjs client started') + assert.ok(ejsClient !== null, 'ethereumjs client started') const enode = (ejsClient!.server('rlpx') as RlpxServer)!.getRlpxInfo().enode const res = await client.request('admin_addPeer', [enode]) - st.equal(res.result, true, 'successfully requested Geth add EthereumJS as peer') + assert.equal(res.result, true, 'successfully requested Geth add EthereumJS as peer') const peerConnectTimeout = new Promise((_resolve, reject) => setTimeout(reject, 10000)) try { await Promise.race([peerConnectedPromise, peerConnectTimeout]) - st.pass('connected to geth peer') + assert.ok(true, 'connected to geth peer') } catch (e) { - st.fail('could not connect to geth peer in 10 seconds') + assert.fail('could not connect to geth peer in 10 seconds') } - st.end() }) - t.test('should snap sync and finish', async (st) => { + it('should snap sync and finish', async () => { try { if (ejsClient !== null && snapCompleted !== undefined) { // call sync if not has been called yet @@ -142,24 +136,21 @@ tape('simple mainnet test run', async (t) => { const snapSyncTimeout = new Promise((_resolve, reject) => setTimeout(reject, 40000)) try { await Promise.race([snapCompleted, snapSyncTimeout]) - st.pass('completed snap sync') + assert.ok(true, 'completed snap sync') } catch (e) { - st.fail('could not complete snap sync in 40 seconds') + assert.fail('could not complete snap sync in 40 seconds') } await ejsClient.stop() } else { - st.fail('ethereumjs client not setup properly for snap sync') + assert.fail('ethereumjs client not setup properly for snap sync') } await teardownCallBack() - st.pass('network cleaned') + assert.ok(true, 'network cleaned') } catch (e) { - st.fail('network not cleaned properly') + assert.fail('network not cleaned properly') } - st.end() }) - - t.end() }) async function createSnapClient(common: any, customGenesisState: any, bootnodes: any) { diff --git a/packages/client/test/sync/beaconsync.spec.ts b/packages/client/test/sync/beaconsync.spec.ts index a2015ee0b3..eab9f2e66a 100644 --- a/packages/client/test/sync/beaconsync.spec.ts +++ b/packages/client/test/sync/beaconsync.spec.ts @@ -1,13 +1,14 @@ import { Block } from '@ethereumjs/block' import { MemoryLevel } from 'memory-level' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it, vi } from 'vitest' import { Chain } from '../../src/blockchain' import { Config } from '../../src/config' import { Skeleton } from '../../src/sync' +import { ReverseBlockFetcher } from '../../src/sync/fetcher/reverseblockfetcher' -tape('[BeaconSynchronizer]', async (t) => { +describe('[BeaconSynchronizer]', async () => { const execution: any = { run: () => {} } class PeerPool { open() {} @@ -23,35 +24,24 @@ tape('[BeaconSynchronizer]', async (t) => { PeerPool.prototype.open = td.func() PeerPool.prototype.close = td.func() PeerPool.prototype.idle = td.func() - class ReverseBlockFetcher { - first: bigint - count: bigint - constructor(opts: any) { - this.first = opts.first - this.count = opts.count - } - fetch() {} - clear() {} - destroy() {} - } + ReverseBlockFetcher.prototype.fetch = td.func() ReverseBlockFetcher.prototype.clear = td.func() ReverseBlockFetcher.prototype.destroy = td.func() - td.replace('../../src/sync/fetcher', { ReverseBlockFetcher }) + vi.doMock('../../src/sync/fetcher/reverseblockfetcher', () => td.constructor(ReverseBlockFetcher)) const { BeaconSynchronizer } = await import('../../src/sync/beaconsync') - t.test('should initialize correctly', async (t) => { + it('should initialize correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) const sync = new BeaconSynchronizer({ config, pool, chain, execution, skeleton }) - t.equal(sync.type, 'beacon', 'beacon type') - t.end() + assert.equal(sync.type, 'beacon', 'beacon type') }) - t.test('should open', async (t) => { + it('should open', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -61,12 +51,11 @@ tape('[BeaconSynchronizer]', async (t) => { ;(sync as any).pool.peers = [] td.when((sync as any).pool.open()).thenResolve(null) await sync.open() - t.pass('opened') + assert.ok(true, 'opened') await sync.close() - t.end() }) - t.test('should get height', async (t) => { + it('should get height', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -76,13 +65,12 @@ tape('[BeaconSynchronizer]', async (t) => { const headers = [{ number: BigInt(5) }] td.when(peer.eth.getBlockHeaders({ block: 'hash', max: 1 })).thenResolve([BigInt(1), headers]) const latest = await sync.latest(peer as any) - t.ok(latest!.number === BigInt(5), 'got height') + assert.ok(latest!.number === BigInt(5), 'got height') await sync.stop() await sync.close() - t.end() }) - t.test('should find best', async (t) => { + it('should find best', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -103,102 +91,112 @@ tape('[BeaconSynchronizer]', async (t) => { ]) ;(sync as any).pool = { peers } ;(sync as any).forceSync = true - t.equal(await sync.best(), peers[1], 'found best') + assert.equal(await sync.best(), peers[1], 'found best') await sync.stop() await sync.close() - t.end() }) - t.test('should sync to next subchain head or chain height', async (st) => { - st.plan(3) - const config = new Config({ - transports: [], - safeReorgDistance: 0, - skeletonSubchainMergeMinimum: 0, - accountCache: 10000, - storageCache: 1000, - }) - const pool = new PeerPool() as any - const chain = await Chain.create({ config }) - const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) - skeleton['getSyncStatus'] = td.func() - await skeleton.open() - const sync = new BeaconSynchronizer({ config, pool, chain, execution, skeleton }) - sync.best = td.func() - sync.latest = td.func() - td.when(sync.best()).thenResolve('peer') - td.when(sync.latest('peer' as any)).thenResolve({ - number: BigInt(2), - hash: () => new Uint8Array(0), - }) - td.when(ReverseBlockFetcher.prototype.fetch(), { delay: 100, times: 3 }).thenResolve(undefined) - ;(skeleton as any).status.progress.subchains = [ - { head: BigInt(10), tail: BigInt(6) }, - { head: BigInt(4), tail: BigInt(2) }, - ] - ;(sync as any).chain = { - blocks: { height: BigInt(0) }, - } - sync.config.logger.addListener('data', (data: any) => { - if ((data.message as string).includes('first=5 count=5')) - st.pass('should sync block 5 and target chain start') - }) - await sync.sync() - sync.config.logger.removeAllListeners() - sync.config.logger.addListener('data', (data: any) => { - if ((data.message as string).includes('first=1 count=1')) - st.pass('should sync block 1 and target chain start') - }) - ;(skeleton as any).status.progress.subchains = [{ head: BigInt(10), tail: BigInt(2) }] - await sync.sync() - sync.config.logger.removeAllListeners() - ;(skeleton as any).status.progress.subchains = [{ head: BigInt(10), tail: BigInt(6) }] - ;(sync as any).chain = { blocks: { height: BigInt(4) } } - sync.config.logger.addListener('data', (data: any) => { - if ((data.message as string).includes('first=5 count=1')) - st.pass('should sync block 5 with count 1') - }) - await sync.sync() - sync.config.logger.removeAllListeners() - }) + it( + 'should sync to next subchain head or chain height', + async () => { + const config = new Config({ + transports: [], + safeReorgDistance: 0, + skeletonSubchainMergeMinimum: 0, + accountCache: 10000, + storageCache: 1000, + }) + const pool = new PeerPool() as any + const chain = await Chain.create({ config }) + const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) + skeleton['getSyncStatus'] = td.func() + await skeleton.open() - t.test('should not sync pre-genesis', async (st) => { - st.plan(1) - const config = new Config({ - transports: [], - safeReorgDistance: 0, - skeletonSubchainMergeMinimum: 1000, - accountCache: 10000, - storageCache: 1000, - }) - const pool = new PeerPool() as any - const chain = await Chain.create({ config }) - const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) - skeleton['getSyncStatus'] = td.func() - await skeleton.open() - const sync = new BeaconSynchronizer({ config, pool, chain, execution, skeleton }) - sync.best = td.func() - sync.latest = td.func() - td.when(sync.best()).thenResolve('peer') - td.when(sync.latest('peer' as any)).thenResolve({ - number: BigInt(2), - hash: () => new Uint8Array(0), - }) - td.when(ReverseBlockFetcher.prototype.fetch(), { delay: 100, times: 1 }).thenResolve(undefined) - ;(skeleton as any).status.progress.subchains = [{ head: BigInt(10), tail: BigInt(6) }] - ;(sync as any).chain = { - // Make height > tail so that skeletonSubchainMergeMinimum is triggered - blocks: { height: BigInt(100) }, - } - sync.config.logger.addListener('data', (data: any) => { - if ((data.message as string).includes('first=5 count=5')) - st.pass('should sync block 5 and target chain start') - }) - await sync.sync() - sync.config.logger.removeAllListeners() - }) + const sync = new BeaconSynchronizer({ config, pool, chain, execution, skeleton }) + sync.best = td.func() + sync.latest = td.func() + td.when(sync.best()).thenResolve('peer') + td.when(sync.latest('peer' as any)).thenResolve({ + number: BigInt(2), + hash: () => new Uint8Array(0), + }) + td.when(ReverseBlockFetcher.prototype.fetch(), { delay: 100, times: 3 }).thenResolve( + undefined + ) + ;(skeleton as any).status.progress.subchains = [ + { head: BigInt(10), tail: BigInt(6) }, + { head: BigInt(4), tail: BigInt(2) }, + ] + ;(sync as any).chain = { + blocks: { height: BigInt(0) }, + } + sync.config.logger.addListener('data', (data: any) => { + if ((data.message as string).includes('first=5 count=5')) + assert.ok(true, 'should sync block 5 and target chain start') + }) + await sync.sync() + sync.config.logger.removeAllListeners() + sync.config.logger.addListener('data', (data: any) => { + if ((data.message as string).includes('first=1 count=1')) + assert.ok(true, 'should sync block 1 and target chain start') + }) + ;(skeleton as any).status.progress.subchains = [{ head: BigInt(10), tail: BigInt(2) }] + await sync.sync() + sync.config.logger.removeAllListeners() + ;(skeleton as any).status.progress.subchains = [{ head: BigInt(10), tail: BigInt(6) }] + ;(sync as any).chain = { blocks: { height: BigInt(4) } } + sync.config.logger.addListener('data', (data: any) => { + if ((data.message as string).includes('first=5 count=1')) + assert.ok(true, 'should sync block 5 with count 1') + }) + await sync.sync() + sync.config.logger.removeAllListeners() + }, + { timeout: 120000 } + ) - t.test('should extend and set with a valid head', async (t) => { + it( + 'should not sync pre-genesis', + async () => { + const config = new Config({ + transports: [], + safeReorgDistance: 0, + skeletonSubchainMergeMinimum: 1000, + accountCache: 10000, + storageCache: 1000, + }) + const pool = new PeerPool() as any + const chain = await Chain.create({ config }) + const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) + skeleton['getSyncStatus'] = td.func() + await skeleton.open() + const sync = new BeaconSynchronizer({ config, pool, chain, execution, skeleton }) + sync.best = td.func() + sync.latest = td.func() + td.when(sync.best()).thenResolve('peer') + td.when(sync.latest('peer' as any)).thenResolve({ + number: BigInt(2), + hash: () => new Uint8Array(0), + }) + td.when(ReverseBlockFetcher.prototype.fetch(), { delay: 100, times: 1 }).thenResolve( + undefined + ) + ;(skeleton as any).status.progress.subchains = [{ head: BigInt(10), tail: BigInt(6) }] + ;(sync as any).chain = { + // Make height > tail so that skeletonSubchainMergeMinimum is triggered + blocks: { height: BigInt(100) }, + } + sync.config.logger.addListener('data', (data: any) => { + if ((data.message as string).includes('first=5 count=5')) + assert.ok(true, 'should sync block 5 and target chain start') + }) + await sync.sync() + sync.config.logger.removeAllListeners() + }, + { timeout: 120000 } + ) + + it('should extend and set with a valid head', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -214,20 +212,22 @@ tape('[BeaconSynchronizer]', async (t) => { ] await sync.open() const block = Block.fromBlockData({ header: { number: BigInt(16), parentHash: head.hash() } }) - t.ok(await sync.extendChain(block), 'should extend chain successfully') - t.ok(await sync.setHead(block), 'should set head successfully') - t.equal(skeleton.bounds().head, BigInt(16), 'head should be updated') + assert.ok(await sync.extendChain(block), 'should extend chain successfully') + assert.ok(await sync.setHead(block), 'should set head successfully') + assert.equal(skeleton.bounds().head, BigInt(16), 'head should be updated') const gapBlock = Block.fromBlockData({ header: { number: BigInt(18) } }) - t.notOk(await sync.extendChain(gapBlock), 'should not extend chain with gapped block') - t.ok(await sync.setHead(gapBlock), 'should be able to set and update head with gapped block') - t.equal(skeleton.bounds().head, BigInt(18), 'head should update with gapped block') + assert.notOk(await sync.extendChain(gapBlock), 'should not extend chain with gapped block') + assert.ok( + await sync.setHead(gapBlock), + 'should be able to set and update head with gapped block' + ) + assert.equal(skeleton.bounds().head, BigInt(18), 'head should update with gapped block') await sync.stop() await sync.close() - t.end() }) - t.test('syncWithPeer should return early if skeleton is already linked', async (t) => { + it('syncWithPeer should return early if skeleton is already linked', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -235,18 +235,16 @@ tape('[BeaconSynchronizer]', async (t) => { skeleton.isLinked = () => true // stub const sync = new BeaconSynchronizer({ config, pool, chain, execution, skeleton }) await sync.open() - t.equal( + assert.equal( await sync.syncWithPeer({} as any), false, `syncWithPeer should return false as nothing to sync` ) await sync.stop() await sync.close() - t.end() }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/sync/fetcher/accountfetcher.spec.ts b/packages/client/test/sync/fetcher/accountfetcher.spec.ts index db300808b7..57ddd0aef2 100644 --- a/packages/client/test/sync/fetcher/accountfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/accountfetcher.spec.ts @@ -1,7 +1,7 @@ import { RLP } from '@ethereumjs/rlp' import { bytesToBigInt, hexToBytes } from '@ethereumjs/util' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Chain } from '../../../src/blockchain' import { Config } from '../../../src/config' @@ -12,7 +12,7 @@ import { wait } from '../../integration/util' export const _accountRangeRLP = '0xf90b7c01f88aeda0000001907a67cf7ece54c42262997b2f19041a4d99466b94b8c12f225827e239cb80872386f26fc100008080eda00000107c642e29a6b613205c923ac3a4cf0cf1704ae9a8bef2784caba060f4b7cb07870e22e1219054118080eda000001d26422787b6d40c0c0c2df85757c5ad4a3e367831e932fa24f34da43d57cb80872386f26fc100008080f90aecb90214f90211a0b3f22b069c398ded55d4ce421b06f6b4d5e13cb53ad1c6220276b2b3a078937ba08a54e492e7b9ef911b4a299487a12390ccd81a087398af7106e00b81a791868da0a323a93f5791d4c39e1496e4856f9233e5e86070c722efde613219aca834bde3a0d8c11a8fc2eba0b47de9d5b207b702a8bd62609e9c2504aaa444fd2e98e31deaa0dbfc625e370fa89cb7b123550ef6fd637687b9e9a7c8556bd41bcd4226226095a094fe5f6ac37c805917beefa220d7c6b3bd50848322f6342e940cc047c9b6a8ffa074af7e57b9c59e06a2e478610d56ab39004cda3109cfd953dc8b1d168c453cbca0d58f31d0ecce773d610aa5d12f7cc2f4ca992db4ce2e154c13a12cb4bb567816a0b26a7d9776165bb52e793df6a77d4032164d788bf9954c9cac289ea0786da2fda043804bd146f583b183dc267b36bbe55f63daa36fd6cbdafce48ce451a444b4eca0fc724e8bb65724450eb3966d8672330c8e49a94c6ceaed06174a2322aafee105a02ccb0445b0a4028f167e425b57cb9462cc6caceda0c3cfb5363f08614314a77ca0c64db3edb50609b6de331f00ba1f455113d1388e9eb5f50f5420983012d62b7da0168c680c03ef3fbcc36a6c1ddd9bf7d46b5fd5ee34dd7048320223c8bbe412f9a05747d2eb930bffce317c253e3889a7db57c87dcc55f1f1f77b3d02fc82bc6bcfa0997073e1664f9cbbcfd968277856596c325a6b83887f4ad007c3b93e1133c65280b90214f90211a0b3e6ec5fa09062b280599994d38261cae87ab198ed1b3a7d7003a277ffc735dfa01bac91007228f4fa15ac9c2a4822b7d4103eafae61dd3db30eb830e31de9cddfa0809973bebc62f48fb834336800b1ce8e1b2128ee5824645464b6c09ddd381578a0f8d54e19e888fc01cd5069bfcddb7ee78a4afdec24aa03822d9fd5356a3c109fa08a61ea95c616906799398778b28f0e8a19f6569f885e4b4f1192f3e9f690cefea09aa53cd259b1df9650222dc285236399da685b7350312a3ac0a07a86bef64d5ea01596637937233489a70e114c23818e3512b3c2abf621d142c14a9b9a3afb09d1a0e8a8bcda78ae77bee956389dff38a10c8c1565bc1a85064da6cd8ba606b9aa35a04ae4b4bfbfb97f5b4e178f8c30a6d93ffd6614c8b4d0b44df31b653a3a1e4f0fa0a4e3413e6ee6c5886ed346827ee0cce05a8e4f799b005aacf002a17e6d93e5aaa09a3e6d344bbd2496bf8fa84abc96a3d5f363ba03103edff2164244bb020c52a2a0998f39835105197f860930b46adad4527f5a9ef31c4744476718b910ffc5e586a01cec4592958b5aefe25bea6a49a11089e798d96aebc2be7fce0f1772146d18aea0d7c178ed5bcf822d22f9ed3ca8c95e5144ee0a9fbae901b21da002e2c3c0415ea0a9d5c5c67326f4154449575827ab68ea47c7c8931490160a7a299f829a670476a074814ffe69da7e253de29fc7d5eb57291a67bd6f16cb52175106b7cbd3b19c8f80b90214f90211a0947eec1b645849d129fb8c65cd06bd52526fb2399d1660ee5108fc4698e809aaa02735f6cbb0e10514b1515826ae1c539850543dbe162badaf2efa51b1a353ca1ca0fde2642bcc8db8d6d6e42731eeae2045fc30b84c6efdc420ce8cee5d537b648fa071e7887ca31ae375838ceeed57165f5592a9e6cae9beb070e92a4f5d5aec5014a0f81f4b4d5e2c52373b8884b398838941df0b16177aa4ea8494b183176cf7d526a0dc6ecec073532c8f9581ece75cb4eea83a40ba0210cc10ef0fd8b27a102a028fa0426f18f1de1bc9b665e9efb45d6547e88e35a267d7ec9197ae97052d1be59ab9a0d6aad68bece934d578e18eb3acd147490bc6cc01e646f1d8618a747526eae4f5a04ffee6f8660794981b15fda1ceafef98db853bfc31c029db7cb515bb34bb5572a0da2497fed45626b94c1eb910c9eedc9c26a4ff5b56b709b96d5a567991ebe2aca021b3bfcd8aa97eb8d9a3ce258389603564f01d6f485899a9f6e0a00d85dc00dfa0339e45f0407ad527a899a2e06e17330c2cfe25b81689dcffd20c166ef256fbc6a0dafd25416aaf44a8bfa1a6bf2b0cc563f9be84b9b3b8bf307983252d7cd63c51a0191504034adb55fe0926c7c4066654739af3e1c9c4173f4d90fa2e1df62a99cca0504e2144c1a889e48cd5a6baa17e39b6a176dbf41147dd171f2673c5c9d849dba04850f33ad929cb1a07136f162e33a5df0f65c48f359637774e7c8ebabe90eb7080b90214f90211a05d16e93a6e58a13a7c7dde40d0c543b9d63d029ec0da5efb4be34cd4ce672181a089cbb0e940fb7bb395091e3b665755be6b51292fba7a7bc39904568c63a907e1a050314b93f73fed553cd9dee63dc1fe9b789f9b9e111a659ff4e4c91c8167a63ca04444bd2a1bb78a83b66a36a09076b2b49eade4e2e8c8ef91538117525893841aa0abde6220817f3608bdfec46ebed292c464ee1d2c58d0b43286b8617bb4cb49d9a07257eff6aebb380db4c75752a84c6b2d0bb86bb190cef2a58829497997262b6aa0a0d4ab9d93be97287f29637a9b16fb8a6c8cd3bc29786b64343113b95a4153ffa0f0d479377ce4c0f31185c45319f915532cea13e97d5abfc939b75b642b5b47bba0eb96a911347f5321e03f1602a041ce82ec29bb4b322faa9f999cf02bb0c7a932a047b6c76ffeb29b4e3c3c09749289213395c8b0126dbd8acee45c6d32d2a0ab5fa0ca462e8ff237f9e56698ca416fac835ed37bc90683d363effe7ec9dacb4963fba0d385f828becce3665e070b645df25dec507a7c6c3813591e3436147be0becc75a0537a7451522228feca0ceb55374615e8396229e1c7a6b0ae16fb49cd8e6ed7a9a0b96561ab484f67b604d2dc46ac170750b321334aabcfb6b212a906e1cb5b3532a09f64f7c76e201d48b4bc1fb02f7e052a5a1bf05b2c59f3c969c8d2d6b373b3dca0398a988af30676952fcf1a968ac530b30dbe32922efe8c27acb9025adcaf1a5180b90134f90131a0b2151043be015f98b1b249180bfac505781022ede708f533f373b2d612837df7a0031e6ffe32d313f0cd57b4bebbf6fcacf83c366157846040108d198129d99a5aa0bfca4f79ac9eb24bcbdbd94fc49c0ca30a6399a2071e4ab3024e1aae0159a31180808080a0f1a2c911436f5bf1aa936e140b17399f7c092ad64a8ab839057a67fc6923a318a0e648ced926c977b0dcc17452361ac43e53f839b8e485a288e93fb667573ae088a0808107d197eb28741f8cec92b6fa76957fa6928b00f4b7301d464809519258098080a02c7ac441b072bbe33030110dccfdda0de6705c4bdb2c94594e10c2fb8687c41080a0162e8104a86bd043ca2fac0c5d56181127c7b24f6c10fefb90c27064b4edeff8a0376bcbdd3b7503a144b9016159b7e2cd074c9566b843cb834123057c61adbd2e80b870f86e9e31907a67cf7ece54c42262997b2f19041a4d99466b94b8c12f225827e239b84df84b80872386f26fc10000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470b873f871a0a75a6fa397f39292a3bb4fdb84463908c473bad9a0206bd00964adabd7a4b589808080808080808080808080a0ea5b9774dfc3fd50b359b86fa49a57fce0186593cf89d865e279413b63947bed80a0a0747bb1023533b4f9cdaa7c845609975d413348fc5f185a120037dccdf3584c80b870f86e9e2026422787b6d40c0c0c2df85757c5ad4a3e367831e932fa24f34da43d57b84df84b80872386f26fc10000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' -tape('[AccountFetcher]', async (t) => { +describe('[AccountFetcher]', async () => { class PeerPool { idle() {} ban() {} @@ -24,7 +24,7 @@ tape('[AccountFetcher]', async (t) => { '../../../src/sync/fetcher/accountfetcher' ) - t.test('should start/stop', async (t) => { + it('should start/stop', async () => { const config = new Config({ maxPerRequest: 5, transports: [] }) const pool = new PeerPool() as any const fetcher = new AccountFetcher({ @@ -35,18 +35,17 @@ tape('[AccountFetcher]', async (t) => { count: BigInt(10), }) fetcher.next = () => false - t.notOk((fetcher as any).running, 'not started') + assert.notOk((fetcher as any).running, 'not started') void fetcher.fetch() - t.equals((fetcher as any).in.length, 1, 'added 1 tasks') + assert.equal((fetcher as any).in.length, 1, 'added 1 tasks') await wait(100) - t.ok((fetcher as any).running, 'started') + assert.ok((fetcher as any).running, 'started') fetcher.destroy() await wait(100) - t.notOk((fetcher as any).running, 'stopped') - t.end() + assert.notOk((fetcher as any).running, 'stopped') }) - t.test('should process', (t) => { + it('should process', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const fetcher = new AccountFetcher({ @@ -77,12 +76,11 @@ tape('[AccountFetcher]', async (t) => { }, ] accountDataResponse.completed = true - t.deepEquals(fetcher.process({} as any, accountDataResponse), fullResult, 'got results') - t.notOk(fetcher.process({} as any, { accountDataResponse: [] } as any), 'bad results') - t.end() + assert.deepEqual(fetcher.process({} as any, accountDataResponse), fullResult, 'got results') + assert.notOk(fetcher.process({} as any, { accountDataResponse: [] } as any), 'bad results') }) - t.test('should adopt correctly', (t) => { + it('should adopt correctly', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const fetcher = new AccountFetcher({ @@ -108,9 +106,9 @@ tape('[AccountFetcher]', async (t) => { fetcher.enqueueTask(task) const job = (fetcher as any).in.peek() let results = fetcher.process(job as any, accountDataResponse) - t.equal((fetcher as any).in.length, 1, 'Fetcher should still have same job') - t.equal(job?.partialResult?.length, 2, 'Should have two partial results') - t.equal(results, undefined, 'Process should not return full results yet') + assert.equal((fetcher as any).in.length, 1, 'Fetcher should still have same job') + assert.equal(job?.partialResult?.length, 2, 'Should have two partial results') + assert.equal(results, undefined, 'Process should not return full results yet') const remainingAccountData: any = [ { @@ -120,12 +118,10 @@ tape('[AccountFetcher]', async (t) => { ] remainingAccountData.completed = true results = fetcher.process(job as any, remainingAccountData) - t.equal(results?.length, 3, 'Should return full results') - - t.end() + assert.equal(results?.length, 3, 'Should return full results') }) - t.test('should request correctly', async (t) => { + it('should request correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const fetcher = new AccountFetcher({ @@ -164,10 +160,9 @@ tape('[AccountFetcher]', async (t) => { bytes: BigInt(50000), }) ) - t.end() }) - t.test('should verify proof correctly', async (t) => { + it('should verify proof correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new SnapProtocol({ config, chain }) @@ -183,7 +178,7 @@ tape('[AccountFetcher]', async (t) => { hexToBytes('0x000010c6f7a0b5ed8d36b4c7f34938583621fafc8b0079a2834d26fa3fcc9ea9') ), }) - t.ok(fetcher.storageFetcher !== undefined, 'storageFetcher should be created') + assert.ok(fetcher.storageFetcher !== undefined, 'storageFetcher should be created') const task = { count: 3, first: BigInt(1) } const resData = RLP.decode(hexToBytes(_accountRangeRLP)) @@ -204,8 +199,8 @@ tape('[AccountFetcher]', async (t) => { } const job = { peer, task } const results = await fetcher.request(job as any) - t.ok(results !== undefined, 'Proof verification is completed without errors') - t.ok( + assert.ok(results !== undefined, 'Proof verification is completed without errors') + assert.ok( fetcher.process(job as any, results!) !== undefined, 'Response should be processed properly' ) @@ -215,9 +210,9 @@ tape('[AccountFetcher]', async (t) => { fetcher.byteCodeFetcher.enqueueByByteCodeRequestList = td.func() try { await fetcher.store(results!) - t.pass('fetcher stored results successfully') + assert.ok(true, 'fetcher stored results successfully') } catch (e) { - t.fail(`fetcher failed to store results, Error: ${(e as Error).message}`) + assert.fail(`fetcher failed to store results, Error: ${(e as Error).message}`) } const fetcherDoneFlags = fetcher.fetcherDoneFlags @@ -235,18 +230,16 @@ tape('[AccountFetcher]', async (t) => { const snapSyncTimeout = new Promise((_resolve, reject) => setTimeout(reject, 10000)) try { await Promise.race([snapCompleted, snapSyncTimeout]) - t.pass('completed snap sync') + assert.ok(true, 'completed snap sync') } catch (e) { - t.fail('could not complete snap sync in 40 seconds') + assert.fail('could not complete snap sync in 40 seconds') } // send end of range input to store await fetcher.store([Object.create(null)] as any) - - t.end() }) - t.test('should find a fetchable peer', async (t) => { + it('should find a fetchable peer', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const fetcher = new AccountFetcher({ @@ -257,12 +250,10 @@ tape('[AccountFetcher]', async (t) => { count: BigInt(10), }) td.when((fetcher as any).pool.idle(td.matchers.anything())).thenReturn('peer0') - t.equals(fetcher.peer(), 'peer0', 'found peer') - t.end() + assert.equal(fetcher.peer(), 'peer0' as any, 'found peer') }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/sync/fetcher/blockfetcher.spec.ts b/packages/client/test/sync/fetcher/blockfetcher.spec.ts index 13f91ccc8a..79dfd82591 100644 --- a/packages/client/test/sync/fetcher/blockfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/blockfetcher.spec.ts @@ -1,15 +1,15 @@ import { BlockHeader } from '@ethereumjs/block' import { Hardfork } from '@ethereumjs/common' import { KECCAK256_RLP } from '@ethereumjs/util' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Chain } from '../../../src/blockchain/chain' import { Config } from '../../../src/config' import { Event } from '../../../src/types' import { wait } from '../../integration/util' -tape('[BlockFetcher]', async (t) => { +describe('[BlockFetcher]', async () => { class PeerPool { idle() {} ban() {} @@ -19,7 +19,7 @@ tape('[BlockFetcher]', async (t) => { const { BlockFetcher } = await import('../../../src/sync/fetcher/blockfetcher') - t.test('should start/stop', async (t) => { + it('should start/stop', async () => { const config = new Config({ maxPerRequest: 5, transports: [] }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -32,18 +32,17 @@ tape('[BlockFetcher]', async (t) => { timeout: 5, }) fetcher.next = () => false - t.notOk((fetcher as any).running, 'not started') + assert.notOk((fetcher as any).running, 'not started') void fetcher.fetch() - t.equals((fetcher as any).in.length, 2, 'added 2 tasks') + assert.equal((fetcher as any).in.length, 2, 'added 2 tasks') await wait(100) - t.ok((fetcher as any).running, 'started') + assert.ok((fetcher as any).running, 'started') fetcher.destroy() await wait(100) - t.notOk((fetcher as any).running, 'stopped') - t.end() + assert.notOk((fetcher as any).running, 'stopped') }) - t.test('enqueueByNumberList()', async (t) => { + it('enqueueByNumberList()', async () => { const config = new Config({ maxPerRequest: 5, transports: [] }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -56,7 +55,7 @@ tape('[BlockFetcher]', async (t) => { timeout: 5, }) void fetcher.fetch() - t.equals((fetcher as any).in.length, 2, 'added 2 tasks') + assert.equal((fetcher as any).in.length, 2, 'added 2 tasks') await wait(100) let blockNumberList = [BigInt(11), BigInt(12)] @@ -64,14 +63,14 @@ tape('[BlockFetcher]', async (t) => { let max = BigInt(12) fetcher.enqueueByNumberList(blockNumberList, min, max) - t.equals((fetcher as any).in.length, 3, '1 new task for two subsequent block numbers') + assert.equal((fetcher as any).in.length, 3, '1 new task for two subsequent block numbers') blockNumberList = [BigInt(13), BigInt(15)] min = BigInt(13) max = BigInt(15) fetcher.enqueueByNumberList(blockNumberList, min, max) - t.equals((fetcher as any).in.length, 3, 'no new task added only the height changed') - t.equals( + assert.equal((fetcher as any).in.length, 3, 'no new task added only the height changed') + assert.equal( fetcher.first + fetcher.count - BigInt(1) === BigInt(15), true, 'height should now be 15' @@ -83,17 +82,16 @@ tape('[BlockFetcher]', async (t) => { min = BigInt(50) max = BigInt(51) fetcher.enqueueByNumberList(blockNumberList, min, max) - t.equals( + assert.equal( (fetcher as any).in.length, 11, '10 new tasks to catch up to head (1-49, 5 per request), 1 new task for subsequent block numbers (50-51)' ) fetcher.destroy() - t.end() }) - t.test('should process', async (t) => { + it('should process', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -105,12 +103,14 @@ tape('[BlockFetcher]', async (t) => { count: BigInt(0), }) const blocks: any = [{ header: { number: 1 } }, { header: { number: 2 } }] - t.deepEquals(fetcher.process({ task: { count: 2 } } as any, blocks), blocks, 'got results') - t.notOk(fetcher.process({ task: { count: 2 } } as any, { blocks: [] } as any), 'bad results') - t.end() + assert.deepEqual(fetcher.process({ task: { count: 2 } } as any, blocks), blocks, 'got results') + assert.notOk( + fetcher.process({ task: { count: 2 } } as any, { blocks: [] } as any), + 'bad results' + ) }) - t.test('should adopt correctly', async (t) => { + it('should adopt correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -127,18 +127,16 @@ tape('[BlockFetcher]', async (t) => { fetcher.enqueueTask(task) const job = (fetcher as any).in.peek() let results = fetcher.process(job as any, blocks) - t.equal((fetcher as any).in.length, 1, 'Fetcher should still have same job') - t.equal(job?.partialResult?.length, 2, 'Should have two partial results') - t.equal(results, undefined, 'Process should not return full results yet') + assert.equal((fetcher as any).in.length, 1, 'Fetcher should still have same job') + assert.equal(job?.partialResult?.length, 2, 'Should have two partial results') + assert.equal(results, undefined, 'Process should not return full results yet') const remainingBlocks: any = [{ header: { number: 3 } }] results = fetcher.process(job as any, remainingBlocks) - t.equal(results?.length, 3, 'Should return full results') - - t.end() + assert.equal(results?.length, 3, 'Should return full results') }) - t.test('should find a fetchable peer', async (t) => { + it('should find a fetchable peer', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -150,11 +148,10 @@ tape('[BlockFetcher]', async (t) => { count: BigInt(0), }) td.when((fetcher as any).pool.idle(td.matchers.anything())).thenReturn('peer0') - t.equals(fetcher.peer(), 'peer0', 'found peer') - t.end() + assert.equal(fetcher.peer(), 'peer0', 'found peer') }) - t.test('should request correctly', async (t) => { + it('should request correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -182,10 +179,9 @@ tape('[BlockFetcher]', async (t) => { reverse: false, }) ) - t.end() }) - t.test('should parse bodies correctly', async (t) => { + it('should parse bodies correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) config.chainCommon.getHardforkBy = td.func() td.when( @@ -232,14 +228,12 @@ tape('[BlockFetcher]', async (t) => { td.when(peer.eth.getBlockBodies(td.matchers.anything())).thenResolve([0, [[[], [], []]]]) const job = { peer, task } const resp = await fetcher.request(job as any) - t.equal(resp.length, 1, 'shanghai block should have been returned') - t.equal(resp[0].withdrawals?.length, 0, 'should have withdrawals array') - t.end() + assert.equal(resp.length, 1, 'shanghai block should have been returned') + assert.equal(resp[0].withdrawals?.length, 0, 'should have withdrawals array') }) - t.test('store()', async (st) => { + it('store()', async () => { td.reset() - st.plan(4) const config = new Config({ maxPerRequest: 5, transports: [] }) const pool = new PeerPool() as any @@ -258,26 +252,25 @@ tape('[BlockFetcher]', async (t) => { ) try { await fetcher.store([]) - st.fail('fetcher store should have errored') + assert.fail('fetcher store should have errored') } catch (err: any) { - st.equal(err.message, 'could not find parent header', 'store() threw on invalid block') + assert.equal(err.message, 'could not find parent header', 'store() threw on invalid block') const { destroyFetcher, banPeer } = fetcher.processStoreError(err, { first: BigInt(1), count: 10, }) - st.equal(destroyFetcher, false, 'fetcher should not be destroyed on this error') - st.equal(banPeer, true, 'peer should be banned on this error') + assert.equal(destroyFetcher, false, 'fetcher should not be destroyed on this error') + assert.equal(banPeer, true, 'peer should be banned on this error') } td.reset() chain.putBlocks = td.func() td.when(chain.putBlocks(td.matchers.anything())).thenResolve(1) config.events.on(Event.SYNC_FETCHED_BLOCKS, () => - st.pass('store() emitted SYNC_FETCHED_BLOCKS event on putting blocks') + assert.ok(true, 'store() emitted SYNC_FETCHED_BLOCKS event on putting blocks') ) await fetcher.store([]) }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/sync/fetcher/bytecodefetcher.spec.ts b/packages/client/test/sync/fetcher/bytecodefetcher.spec.ts index 0f41bf92d4..fc8d26e9ce 100644 --- a/packages/client/test/sync/fetcher/bytecodefetcher.spec.ts +++ b/packages/client/test/sync/fetcher/bytecodefetcher.spec.ts @@ -1,8 +1,9 @@ import { RLP } from '@ethereumjs/rlp' import { Trie } from '@ethereumjs/trie' -import { hexToBytes, utf8ToBytes } from '@ethereumjs/util' -import * as tape from 'tape' +import { hexToBytes } from '@ethereumjs/util' +import { utf8ToBytes } from 'ethereum-cryptography/utils' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Chain } from '../../../src/blockchain' import { Config } from '../../../src/config' @@ -14,7 +15,7 @@ import { _accountRangeRLP } from './accountfetcher.spec' const _byteCodesRLP = '0xf89e1af89b9e60806040526004361061003f5760003560e01c806301ffc9a714610044579e60806040526004361061003f5760003560e01c806301ffc9a714610044589e60806040526004361061003f5760003560e01c806301ffc9a714610044599e60806040526004361061003f5760003560e01c806301ffc9a714610044609e60806040526004361061003f5760003560e01c806301ffc9a71461004461' -tape('[ByteCodeFetcher]', async (t) => { +describe('[ByteCodeFetcher]', async () => { class PeerPool { idle() {} ban() {} @@ -24,7 +25,7 @@ tape('[ByteCodeFetcher]', async (t) => { const { ByteCodeFetcher } = await import('../../../src/sync/fetcher/bytecodefetcher') - t.test('should start/stop', async (t) => { + it('should start/stop', async () => { const config = new Config({ maxPerRequest: 5, transports: [] }) const pool = new PeerPool() as any const fetcher = new ByteCodeFetcher({ @@ -34,27 +35,26 @@ tape('[ByteCodeFetcher]', async (t) => { hashes: [hexToBytes('0x2034f79e0e33b0ae6bef948532021baceb116adf2616478703bec6b17329f1cc')], }) fetcher.next = () => false - t.notOk((fetcher as any).running, 'not started') - t.equals((fetcher as any).in.length, 0, 'No jobs have yet been added') - t.equal((fetcher as any).hashes.length, 1, 'one codehash have been added') + assert.notOk((fetcher as any).running, 'not started') + assert.equal((fetcher as any).in.length, 0, 'No jobs have yet been added') + assert.equal((fetcher as any).hashes.length, 1, 'one codehash have been added') fetcher.enqueueByByteCodeRequestList([ hexToBytes('0x2034f79e0e33b0ae6bef948532021baceb116adf2616478703bec6b17329f1cc'), ]) - t.equals((fetcher as any).in.length, 1, 'A new task has been queued') + assert.equal((fetcher as any).in.length, 1, 'A new task has been queued') const job = (fetcher as any).in.peek() - t.equal(job!.task.hashes.length, 2, 'two storageRequests are added to job') + assert.equal(job!.task.hashes.length, 2, 'two storageRequests are added to job') void fetcher.fetch() await wait(100) - t.ok((fetcher as any).running, 'started') - t.ok(fetcher.write() === false, 'fetcher should not setup a new write pipe') + assert.ok((fetcher as any).running, 'started') + assert.ok(fetcher.write() === false, 'fetcher should not setup a new write pipe') fetcher.destroy() await wait(100) - t.notOk((fetcher as any).running, 'stopped') - t.end() + assert.notOk((fetcher as any).running, 'stopped') }) - t.test('should process', (t) => { + it('should process', () => { const config = new Config({ transports: [] }) const pool = new PeerPool() as any const fetcher = new ByteCodeFetcher({ @@ -74,12 +74,15 @@ tape('[ByteCodeFetcher]', async (t) => { ;(fetcher as any).running = true fetcher.enqueueTask(task) const job = (fetcher as any).in.peek() - t.deepEquals((fetcher.process(job, ByteCodeResponse) as any)[0], fullResult[0], 'got results') - t.notOk(fetcher.process({} as any, { ByteCodeResponse: [] } as any), 'bad results') - t.end() + assert.deepEqual( + (fetcher.process(job, ByteCodeResponse) as any)[0], + fullResult[0], + 'got results' + ) + assert.notOk(fetcher.process({} as any, { ByteCodeResponse: [] } as any), 'bad results') }) - t.test('should adopt correctly', (t) => { + it('should adopt correctly', () => { const config = new Config({ transports: [] }) const pool = new PeerPool() as any const fetcher = new ByteCodeFetcher({ @@ -97,17 +100,16 @@ tape('[ByteCodeFetcher]', async (t) => { fetcher.enqueueTask(task) const job = (fetcher as any).in.peek() let results = fetcher.process(job as any, ByteCodeResponse) - t.equal((fetcher as any).in.length, 1, 'Fetcher should still have same job') - t.equal(job?.partialResult.length, 2, 'Should have two partial results') - t.equal(results, undefined, 'Process should not return full results yet') + assert.equal((fetcher as any).in.length, 1, 'Fetcher should still have same job') + assert.equal(job?.partialResult.length, 2, 'Should have two partial results') + assert.equal(results, undefined, 'Process should not return full results yet') const remainingBytesCodeData: any = [utf8ToBytes(''), utf8ToBytes(''), utf8ToBytes('')] remainingBytesCodeData.completed = true results = fetcher.process(job as any, remainingBytesCodeData) - t.equal((results as any).length, 5, 'Should return full results') - t.end() + assert.equal((results as any).length, 5, 'Should return full results') }) - t.test('should request correctly', async (t) => { + it('should request correctly', async () => { const config = new Config({ transports: [] }) const chain = await Chain.create({ config }) const pool = new PeerPool() as any @@ -149,20 +151,18 @@ tape('[ByteCodeFetcher]', async (t) => { bytes: BigInt(50000), }) ) - t.ok(results?.completed === true, 'response processed and matched properly') - t.equal((results![0] as any).size, 5, 'matched code in the response') + assert.ok(results?.completed === true, 'response processed and matched properly') + assert.equal((results![0] as any).size, 5, 'matched code in the response') try { await fetcher.store(results! as any) - t.pass('fetcher stored results successfully') + assert.ok(true, 'fetcher stored results successfully') } catch (e) { - t.fail(`fetcher failed to store results, Error: ${(e as Error).message}`) + assert.fail(`fetcher failed to store results, Error: ${(e as Error).message}`) } - - t.end() }) - t.test('should find a fetchable peer', async (t) => { + it('should find a fetchable peer', async () => { const config = new Config({ transports: [] }) const pool = new PeerPool() as any const fetcher = new ByteCodeFetcher({ @@ -172,12 +172,10 @@ tape('[ByteCodeFetcher]', async (t) => { hashes: [utf8ToBytes('')], }) td.when((fetcher as any).pool.idle(td.matchers.anything())).thenReturn('peer0') - t.equals(fetcher.peer(), 'peer0', 'found peer') - t.end() + assert.equal(fetcher.peer(), 'peer0' as any, 'found peer') }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/sync/fetcher/fetcher.spec.ts b/packages/client/test/sync/fetcher/fetcher.spec.ts index a7d990026d..00cae0938e 100644 --- a/packages/client/test/sync/fetcher/fetcher.spec.ts +++ b/packages/client/test/sync/fetcher/fetcher.spec.ts @@ -1,5 +1,5 @@ -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Config } from '../../../src/config' import { Fetcher } from '../../../src/sync/fetcher/fetcher' @@ -24,9 +24,8 @@ class FetcherTest extends Fetcher { } } -tape('[Fetcher]', (t) => { - t.test('should handle bad result', (t) => { - t.plan(2) +describe('[Fetcher]', () => { + it('should handle bad result', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const fetcher = new FetcherTest({ config, pool: td.object() }) const job: any = { peer: {}, state: 'active' } @@ -35,24 +34,24 @@ tape('[Fetcher]', (t) => { fetcher.wait = td.func() td.when(fetcher.wait()).thenResolve(undefined) ;(fetcher as any).success(job, undefined) - t.equals((fetcher as any).in.length, 1, 'enqueued job') - setTimeout(() => t.ok(job.peer.idle, 'peer idled'), 10) + assert.equal((fetcher as any).in.length, 1, 'enqueued job') + setTimeout(() => assert.ok(job.peer.idle, 'peer idled'), 10) }) - t.test('should handle failure', (t) => { - t.plan(2) + it('should handle failure', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const fetcher = new FetcherTest({ config, pool: td.object() }) const job = { peer: {}, state: 'active' } ;(fetcher as any).running = true fetcher.next = td.func() - config.events.on(Event.SYNC_FETCHER_ERROR, (err) => t.equals(err.message, 'err0', 'got error')) + config.events.on(Event.SYNC_FETCHER_ERROR, (err) => + assert.equal(err.message, 'err0', 'got error') + ) ;(fetcher as any).failure(job as Job, new Error('err0')) - t.equals((fetcher as any).in.length, 1, 'enqueued job') + assert.equal((fetcher as any).in.length, 1, 'enqueued job') }) - t.test('should handle expiration', (t) => { - t.plan(2) + it('should handle expiration', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const fetcher = new FetcherTest({ config, @@ -74,13 +73,12 @@ tape('[Fetcher]', (t) => { ;(fetcher as any).total = 10 fetcher.next() setTimeout(() => { - t.deepEquals(job, { index: 0, peer: { idle: false }, state: 'expired' }, 'expired job') - t.equals((fetcher as any).in.length, 1, 'enqueued job') + assert.deepEqual(job, { index: 0, peer: { idle: false }, state: 'expired' }, 'expired job') + assert.equal((fetcher as any).in.length, 1, 'enqueued job') }, 20) }) - t.test('should handle queue management', (t) => { - t.plan(3) + it('should handle queue management', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const fetcher = new FetcherTest({ config, @@ -92,9 +90,9 @@ tape('[Fetcher]', (t) => { ;(fetcher as any).in.insert(job1) ;(fetcher as any).in.insert(job2) ;(fetcher as any).in.insert(job3) - t.equals((fetcher as any).in.length, 3, 'queue filled') + assert.equal((fetcher as any).in.length, 3, 'queue filled') fetcher.clear() - t.equals((fetcher as any).in.length, 0, 'queue cleared') + assert.equal((fetcher as any).in.length, 0, 'queue cleared') const job4 = { index: 3 } const job5 = { index: 4 } @@ -104,11 +102,10 @@ tape('[Fetcher]', (t) => { ;(fetcher as any).in.insert(job4) ;(fetcher as any).in.insert(job5) - t.ok(fetcher.next() === false, 'next() fails when heap length exceeds maxQueue') + assert.ok(fetcher.next() === false, 'next() fails when heap length exceeds maxQueue') }) - t.test('should re-enqueue on a non-fatal error', (t) => { - t.plan(1) + it('should re-enqueue on a non-fatal error', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const fetcher = new FetcherTest({ config, pool: td.object(), timeout: 5000 }) const task = { first: BigInt(50), count: 10 } @@ -128,28 +125,26 @@ tape('[Fetcher]', (t) => { }) ;(fetcher as any).success(job, ['something']) setTimeout(() => { - t.ok( + assert.ok( (fetcher as any).in.peek().task.first === BigInt(1), 'should step back for safeReorgDistance' ) }, 20) }) - t.test('should reset td', (st) => { + it('should reset td', () => { td.reset() - st.end() }) - t.test('should handle fatal errors correctly', (st) => { + it('should handle fatal errors correctly', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const fetcher = new FetcherTest({ config, pool: td.object(), timeout: 5000 }) const task = { first: BigInt(50), count: 10 } const job: any = { peer: {}, task, state: 'active', index: 0 } ;(fetcher as any).in.insert(job) fetcher.error({ name: 'VeryBadError', message: 'Something very bad happened' }, job, true) - st.equals(fetcher.syncErrored?.name, 'VeryBadError', 'fatal error has correct name') - st.equals((fetcher as any).in.length, 0, 'fatal error clears job queue') + assert.equal(fetcher.syncErrored?.name, 'VeryBadError', 'fatal error has correct name') + assert.equal((fetcher as any).in.length, 0, 'fatal error clears job queue') fetcher.clear() - st.end() }) }) diff --git a/packages/client/test/sync/fetcher/headerfetcher.spec.ts b/packages/client/test/sync/fetcher/headerfetcher.spec.ts index 5f380ce0d0..ddfcd9c217 100644 --- a/packages/client/test/sync/fetcher/headerfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/headerfetcher.spec.ts @@ -1,11 +1,11 @@ -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Chain } from '../../../src/blockchain' import { Config } from '../../../src/config' import { Event } from '../../../src/types' -tape('[HeaderFetcher]', async (t) => { +describe('[HeaderFetcher]', async () => { class PeerPool { idle() {} ban() {} @@ -16,25 +16,27 @@ tape('[HeaderFetcher]', async (t) => { const { HeaderFetcher } = await import('../../../src/sync/fetcher/headerfetcher') - t.test('should process', (t) => { + it('should process', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() const flow = td.object() const fetcher = new HeaderFetcher({ config, pool, flow }) const headers = [{ number: 1 }, { number: 2 }] - t.deepEquals( + assert.deepEqual( //@ts-ignore fetcher.process({ task: { count: 2 }, peer: 'peer0' }, { headers, bv: BigInt(1) }), headers, 'got results' ) //@ts-ignore - t.notOk(fetcher.process({ task: { count: 2 } }, { headers: [], bv: BigInt(1) }), 'bad results') + assert.notOk( + fetcher.process({ task: { count: 2 } }, { headers: [], bv: BigInt(1) }), + 'bad results' + ) td.verify((fetcher as any).flow.handleReply('peer0', 1)) - t.end() }) - t.test('should adopt correctly', (t) => { + it('should adopt correctly', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const flow = td.object() @@ -50,27 +52,24 @@ tape('[HeaderFetcher]', async (t) => { const job = (fetcher as any).in.peek() let results = fetcher.process(job as any, { headers, bv: BigInt(1) } as any) - t.equal((fetcher as any).in.length, 1, 'Fetcher should still have same job') - t.equal(job?.partialResult?.length, 2, 'Should have two partial results') - t.equal(results, undefined, 'Process should not return full results yet') + assert.equal((fetcher as any).in.length, 1, 'Fetcher should still have same job') + assert.equal(job?.partialResult?.length, 2, 'Should have two partial results') + assert.equal(results, undefined, 'Process should not return full results yet') const remainingHeaders: any = [{ number: 3 }] results = fetcher.process(job as any, { headers: remainingHeaders, bv: BigInt(1) } as any) - t.equal(results?.length, 3, 'Should return full results') - - t.end() + assert.equal(results?.length, 3, 'Should return full results') }) - t.test('should find a fetchable peer', async (t) => { + it('should find a fetchable peer', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() const fetcher = new HeaderFetcher({ config, pool }) td.when((fetcher as any).pool.idle(td.matchers.anything())).thenReturn('peer0') - t.equal(fetcher.peer(), 'peer0', 'found peer') - t.end() + assert.equal(fetcher.peer(), 'peer0', 'found peer') }) - t.test('should request correctly', async (t) => { + it('should request correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const flow = td.object() @@ -95,12 +94,9 @@ tape('[HeaderFetcher]', async (t) => { reverse: false, }) ) - t.end() }) - t.test('store()', async (st) => { - st.plan(2) - + it('store()', async () => { const config = new Config({ maxPerRequest: 5, transports: [] }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -117,17 +113,16 @@ tape('[HeaderFetcher]', async (t) => { try { await fetcher.store([0 as any]) } catch (err: any) { - st.equal(err.message, 'err0', 'store() threw on invalid header') + assert.equal(err.message, 'err0', 'store() threw on invalid header') } td.when(chain.putHeaders([1 as any])).thenResolve(1) config.events.on(Event.SYNC_FETCHED_HEADERS, () => - st.pass('store() emitted SYNC_FETCHED_HEADERS event on putting headers') + assert.ok(true, 'store() emitted SYNC_FETCHED_HEADERS event on putting headers') ) await fetcher.store([1 as any]) }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts b/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts index 33d8287195..9f1f683ac7 100644 --- a/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts @@ -1,7 +1,7 @@ import { Block } from '@ethereumjs/block' import { MemoryLevel } from 'memory-level' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Chain } from '../../../src/blockchain/chain' import { Config } from '../../../src/config' @@ -9,7 +9,7 @@ import { Skeleton } from '../../../src/sync' import { Event } from '../../../src/types' import { wait } from '../../integration/util' -tape('[ReverseBlockFetcher]', async (t) => { +describe('[ReverseBlockFetcher]', async () => { class PeerPool { idle() {} ban() {} @@ -19,7 +19,7 @@ tape('[ReverseBlockFetcher]', async (t) => { const { ReverseBlockFetcher } = await import('../../../src/sync/fetcher/reverseblockfetcher') - t.test('should start/stop', async (t) => { + it('should start/stop', async () => { const config = new Config({ maxPerRequest: 5, transports: [] }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -34,20 +34,19 @@ tape('[ReverseBlockFetcher]', async (t) => { timeout: 5, }) fetcher.next = () => false - t.notOk((fetcher as any).running, 'not started') + assert.notOk((fetcher as any).running, 'not started') void fetcher.fetch() - t.equals((fetcher as any).in.length, 3, 'added 2 tasks') + assert.equal((fetcher as any).in.length, 3, 'added 2 tasks') await wait(100) - t.ok((fetcher as any).running, 'started') - t.equals(fetcher.first, BigInt(14), 'pending tasks first tracking should be reduced') - t.equals(fetcher.count, BigInt(0), 'pending tasks count should be reduced') + assert.ok((fetcher as any).running, 'started') + assert.equal(fetcher.first, BigInt(14), 'pending tasks first tracking should be reduced') + assert.equal(fetcher.count, BigInt(0), 'pending tasks count should be reduced') fetcher.destroy() await wait(100) - t.notOk((fetcher as any).running, 'stopped') - t.end() + assert.notOk((fetcher as any).running, 'stopped') }) - t.test('should generate max tasks', async (t) => { + it('should generate max tasks', async () => { const config = new Config({ maxPerRequest: 5, maxFetcherJobs: 10, transports: [] }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -62,20 +61,19 @@ tape('[ReverseBlockFetcher]', async (t) => { timeout: 5, }) fetcher.next = () => false - t.notOk((fetcher as any).running, 'not started') + assert.notOk((fetcher as any).running, 'not started') void fetcher.fetch() - t.equals((fetcher as any).in.length, 10, 'added max 10 tasks') + assert.equal((fetcher as any).in.length, 10, 'added max 10 tasks') await wait(100) - t.ok((fetcher as any).running, 'started') - t.equals(fetcher.first, BigInt(6), 'pending tasks first tracking should be by maximum') - t.equals(fetcher.count, BigInt(3), 'pending tasks count should be reduced by maximum') + assert.ok((fetcher as any).running, 'started') + assert.equal(fetcher.first, BigInt(6), 'pending tasks first tracking should be by maximum') + assert.equal(fetcher.count, BigInt(3), 'pending tasks count should be reduced by maximum') fetcher.destroy() await wait(100) - t.notOk((fetcher as any).running, 'stopped') - t.end() + assert.notOk((fetcher as any).running, 'stopped') }) - t.test('should process', async (t) => { + it('should process', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -89,12 +87,14 @@ tape('[ReverseBlockFetcher]', async (t) => { count: BigInt(10), }) const blocks: any = [{ header: { number: 2 } }, { header: { number: 1 } }] - t.deepEquals(fetcher.process({ task: { count: 2 } } as any, blocks), blocks, 'got results') - t.notOk(fetcher.process({ task: { count: 2 } } as any, { blocks: [] } as any), 'bad results') - t.end() + assert.deepEqual(fetcher.process({ task: { count: 2 } } as any, blocks), blocks, 'got results') + assert.notOk( + fetcher.process({ task: { count: 2 } } as any, { blocks: [] } as any), + 'bad results' + ) }) - t.test('should adopt correctly', async (t) => { + it('should adopt correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -113,18 +113,16 @@ tape('[ReverseBlockFetcher]', async (t) => { fetcher.enqueueTask(task) const job = (fetcher as any).in.peek() let results = fetcher.process(job as any, blocks) - t.equal((fetcher as any).in.length, 1, 'Fetcher should still have same job') - t.equal(job?.partialResult?.length, 2, 'Should have two partial results') - t.equal(results, undefined, 'Process should not return full results yet') + assert.equal((fetcher as any).in.length, 1, 'Fetcher should still have same job') + assert.equal(job?.partialResult?.length, 2, 'Should have two partial results') + assert.equal(results, undefined, 'Process should not return full results yet') const remainingBlocks: any = [{ header: { number: 1 } }] results = fetcher.process(job as any, remainingBlocks) - t.equal(results?.length, 3, 'Should return full results') - - t.end() + assert.equal(results?.length, 3, 'Should return full results') }) - t.test('should find a fetchable peer', async (t) => { + it('should find a fetchable peer', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -138,11 +136,10 @@ tape('[ReverseBlockFetcher]', async (t) => { count: BigInt(2), }) td.when((fetcher as any).pool.idle(td.matchers.anything())).thenReturn('peer0') - t.equals(fetcher.peer(), 'peer0', 'found peer') - t.end() + assert.equal(fetcher.peer(), 'peer0', 'found peer') }) - t.test('should request correctly', async (t) => { + it('should request correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -172,13 +169,10 @@ tape('[ReverseBlockFetcher]', async (t) => { reverse: true, }) ) - t.end() }) - t.test('store()', async (st) => { + it('store()', async () => { td.reset() - st.plan(4) - const config = new Config({ maxPerRequest: 5, transports: [] }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -198,9 +192,9 @@ tape('[ReverseBlockFetcher]', async (t) => { ) try { await fetcher.store([]) - st.fail('fetcher store should have errored') + assert.fail('fetcher store should have errored') } catch (err: any) { - st.ok( + assert.ok( err.message === `Blocks don't extend canonical subchain`, 'store() threw on invalid block' ) @@ -208,19 +202,19 @@ tape('[ReverseBlockFetcher]', async (t) => { first: BigInt(10), count: 10, }) - st.equal(destroyFetcher, false, 'fetcher should not be destroyed on this error') - st.equal(banPeer, true, 'peer should be banned on this error') + assert.equal(destroyFetcher, false, 'fetcher should not be destroyed on this error') + assert.equal(banPeer, true, 'peer should be banned on this error') } td.reset() skeleton.putBlocks = td.func() td.when(skeleton.putBlocks(td.matchers.anything())).thenResolve(1) config.events.on(Event.SYNC_FETCHED_BLOCKS, () => - st.pass('store() emitted SYNC_FETCHED_BLOCKS event on putting blocks') + assert.ok(true, 'store() emitted SYNC_FETCHED_BLOCKS event on putting blocks') ) await fetcher.store([]) }) - t.test('should restart the fetcher when subchains are merged', async (st) => { + it('should restart the fetcher when subchains are merged', async () => { td.reset() const config = new Config({ transports: [], @@ -263,20 +257,21 @@ tape('[ReverseBlockFetcher]', async (t) => { ] await (skeleton as any).putBlock(block47) await fetcher.store([block49, block48]) - st.ok((skeleton as any).status.progress.subchains.length === 1, 'subchains should be merged') - st.equal( + assert.ok( + (skeleton as any).status.progress.subchains.length === 1, + 'subchains should be merged' + ) + assert.equal( (skeleton as any).status.progress.subchains[0].tail, BigInt(5), 'subchain tail should be next segment' ) - st.notOk((fetcher as any).running, 'fetcher should stop') - st.equal((fetcher as any).in.length, 0, 'fetcher in should be cleared') - st.equal((fetcher as any).out.length, 0, 'fetcher out should be cleared') - st.end() + assert.notOk((fetcher as any).running, 'fetcher should stop') + assert.equal((fetcher as any).in.length, 0, 'fetcher in should be cleared') + assert.equal((fetcher as any).out.length, 0, 'fetcher out should be cleared') }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/sync/fetcher/storagefetcher.spec.ts b/packages/client/test/sync/fetcher/storagefetcher.spec.ts index 53f40bda81..84a251ffbc 100644 --- a/packages/client/test/sync/fetcher/storagefetcher.spec.ts +++ b/packages/client/test/sync/fetcher/storagefetcher.spec.ts @@ -1,7 +1,8 @@ import { RLP } from '@ethereumjs/rlp' -import { hexToBytes, utf8ToBytes } from '@ethereumjs/util' -import * as tape from 'tape' +import { hexToBytes } from '@ethereumjs/util' +import { utf8ToBytes } from 'ethereum-cryptography/utils' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Chain } from '../../../src/blockchain' import { Config } from '../../../src/config' @@ -13,7 +14,7 @@ import { _accountRangeRLP } from './accountfetcher.spec' const _storageRangesRLP = '0xf83e0bf83af838f7a0290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e5639594053cd080a26cb03d5e6d2956cebb31c56e7660cac0' -tape('[StorageFetcher]', async (t) => { +describe('[StorageFetcher]', async () => { class PeerPool { idle() {} ban() {} @@ -23,7 +24,7 @@ tape('[StorageFetcher]', async (t) => { const { StorageFetcher } = await import('../../../src/sync/fetcher/storagefetcher') - t.test('should start/stop', async (t) => { + it('should start/stop', async () => { const config = new Config({ maxPerRequest: 5, transports: [] }) const pool = new PeerPool() as any const fetcher = new StorageFetcher({ @@ -44,9 +45,9 @@ tape('[StorageFetcher]', async (t) => { ], }) fetcher.next = () => false - t.notOk((fetcher as any).running, 'not started') - t.equals((fetcher as any).in.length, 0, 'No jobs have yet been added') - t.equal((fetcher as any).storageRequests.length, 1, 'one storageRequests have been added') + assert.notOk((fetcher as any).running, 'not started') + assert.equal((fetcher as any).in.length, 0, 'No jobs have yet been added') + assert.equal((fetcher as any).storageRequests.length, 1, 'one storageRequests have been added') fetcher.enqueueByStorageRequestList([ { accountHash: hexToBytes( @@ -59,21 +60,20 @@ tape('[StorageFetcher]', async (t) => { count: BigInt(2) ** BigInt(256) - BigInt(1), }, ]) - t.equals((fetcher as any).in.length, 1, 'A new task has been queued') + assert.equal((fetcher as any).in.length, 1, 'A new task has been queued') const job = (fetcher as any).in.peek() - t.equal(job!.task.storageRequests.length, 2, 'two storageRequests are added to job') + assert.equal(job!.task.storageRequests.length, 2, 'two storageRequests are added to job') void fetcher.fetch() await wait(100) - t.ok((fetcher as any).running, 'started') - t.ok(fetcher.write() === false, 'fetcher should not setup a new write pipe') + assert.ok((fetcher as any).running, 'started') + assert.ok(fetcher.write() === false, 'fetcher should not setup a new write pipe') fetcher.destroy() await wait(100) - t.notOk((fetcher as any).running, 'stopped') - t.end() + assert.notOk((fetcher as any).running, 'stopped') }) - t.test('should process', (t) => { + it('should process', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const fetcher = new StorageFetcher({ @@ -113,16 +113,15 @@ tape('[StorageFetcher]', async (t) => { ;(fetcher as any).running = true fetcher.enqueueTask(task) const job = (fetcher as any).in.peek() - t.deepEquals( + assert.deepEqual( (fetcher.process(job, StorageDataResponse) as any)[0], fullResult[0], 'got results' ) - t.notOk(fetcher.process({} as any, { StorageDataResponse: [] } as any), 'bad results') - t.end() + assert.notOk(fetcher.process({} as any, { StorageDataResponse: [] } as any), 'bad results') }) - t.test('should adopt correctly', (t) => { + it('should adopt correctly', () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const fetcher = new StorageFetcher({ @@ -155,9 +154,9 @@ tape('[StorageFetcher]', async (t) => { fetcher.enqueueTask(task) const job = (fetcher as any).in.peek() let results = fetcher.process(job as any, StorageDataResponse) - t.equal((fetcher as any).in.length, 1, 'Fetcher should still have same job') - t.equal(job?.partialResult[0].length, 2, 'Should have two partial results') - t.equal(results, undefined, 'Process should not return full results yet') + assert.equal((fetcher as any).in.length, 1, 'Fetcher should still have same job') + assert.equal(job?.partialResult[0].length, 2, 'Should have two partial results') + assert.equal(results, undefined, 'Process should not return full results yet') const remainingStorageData: any = [ [ [{ hash: utf8ToBytes(''), body: utf8ToBytes('') }], @@ -167,11 +166,10 @@ tape('[StorageFetcher]', async (t) => { ] remainingStorageData.completed = true results = fetcher.process(job as any, remainingStorageData) - t.equal((results as any)[0].length, 5, 'Should return full results') - t.end() + assert.equal((results as any)[0].length, 5, 'Should return full results') }) - t.test('should request correctly', async (t) => { + it('should request correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const pool = new PeerPool() as any @@ -232,10 +230,9 @@ tape('[StorageFetcher]', async (t) => { bytes: BigInt(50000), }) ) - t.end() }) - t.test('should verify proof correctly', async (t) => { + it('should verify proof correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const pool = new PeerPool() as any @@ -285,21 +282,21 @@ tape('[StorageFetcher]', async (t) => { } const job = { peer, partialResult, task } let results = await fetcher.request(job as any) - t.ok(results !== undefined, 'Proof verification is completed without errors') + assert.ok(results !== undefined, 'Proof verification is completed without errors') results!.completed = true results = fetcher.process(job as any, results!) - t.ok(results !== undefined, 'Response should be processed correctly') - t.equal(results![0].length, 3, '3 results should be there with dummy partials') + assert.ok(results !== undefined, 'Response should be processed correctly') + assert.equal(results![0].length, 3, '3 results should be there with dummy partials') // remove out the dummy partials results![0].splice(0, 2) - t.equal(results![0].length, 1, 'valid slot in results') + assert.equal(results![0].length, 1, 'valid slot in results') try { await fetcher.store(results! as any) - t.pass('fetcher stored results successfully') + assert.ok(true, 'fetcher stored results successfully') } catch (e) { - t.fail(`fetcher failed to store results, Error: ${(e as Error).message}`) + assert.fail(`fetcher failed to store results, Error: ${(e as Error).message}`) } // We have not been able to captured valid storage proof yet but we can try invalid @@ -318,20 +315,21 @@ tape('[StorageFetcher]', async (t) => { slots, proof: proofInvalid, }) - t.fail('verifyRangeProof should have failed for an proofInvalid') + assert.fail('verifyRangeProof should have failed for an proofInvalid') } catch (e) { - t.pass(`verifyRangeProof correctly failed on invalid proof, Error: ${(e as Error).message}`) + assert.ok( + true, + `verifyRangeProof correctly failed on invalid proof, Error: ${(e as Error).message}` + ) } // send end of range input to store ;(fetcher as any)['destroyWhenDone'] = false await fetcher.store([Object.create(null)] as any) - t.ok(fetcher['destroyWhenDone'] === true, 'should have marked fetcher to close') - - t.end() + assert.ok(fetcher['destroyWhenDone'] === true, 'should have marked fetcher to close') }) - t.test('should find a fetchable peer', async (t) => { + it('should find a fetchable peer', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const fetcher = new StorageFetcher({ @@ -342,12 +340,10 @@ tape('[StorageFetcher]', async (t) => { count: BigInt(10), }) td.when((fetcher as any).pool.idle(td.matchers.anything())).thenReturn('peer0') - t.equals(fetcher.peer(), 'peer0', 'found peer') - t.end() + assert.equal(fetcher.peer(), 'peer0' as any, 'found peer') }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/sync/fullsync.spec.ts b/packages/client/test/sync/fullsync.spec.ts index bcc48dc71f..6a9f83ab0f 100644 --- a/packages/client/test/sync/fullsync.spec.ts +++ b/packages/client/test/sync/fullsync.spec.ts @@ -1,12 +1,12 @@ import { Block } from '@ethereumjs/block' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Chain } from '../../src/blockchain' import { Config } from '../../src/config' import { Event } from '../../src/types' -tape('[FullSynchronizer]', async (t) => { +describe('[FullSynchronizer]', async () => { const txPool: any = { removeNewBlockTxs: () => {}, checkRunState: () => {} } const execution: any = { run: () => {} } class PeerPool { @@ -30,16 +30,15 @@ tape('[FullSynchronizer]', async (t) => { const { FullSynchronizer } = await import('../../src/sync/fullsync') - t.test('should initialize correctly', async (t) => { + it('should initialize correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) const sync = new FullSynchronizer({ config, pool, chain, txPool, execution }) - t.equals(sync.type, 'full', 'full type') - t.end() + assert.equal(sync.type, 'full', 'full type') }) - t.test('should open', async (t) => { + it('should open', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -54,12 +53,11 @@ tape('[FullSynchronizer]', async (t) => { ;(sync as any).pool.peers = [] td.when((sync as any).pool.open()).thenResolve(null) await sync.open() - t.pass('opened') + assert.ok(true, 'opened') await sync.close() - t.end() }) - t.test('should get height', async (t) => { + it('should get height', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -68,13 +66,12 @@ tape('[FullSynchronizer]', async (t) => { const headers = [{ number: BigInt(5) }] td.when(peer.eth.getBlockHeaders({ block: 'hash', max: 1 })).thenResolve([BigInt(1), headers]) const latest = await sync.latest(peer as any) - t.equal(latest!.number, BigInt(5), 'got height') + assert.equal(latest!.number, BigInt(5), 'got height') await sync.stop() await sync.close() - t.end() }) - t.test('should find best', async (t) => { + it('should find best', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -101,14 +98,12 @@ tape('[FullSynchronizer]', async (t) => { td.when((sync as any).height(peers[1])).thenDo((peer: any) => Promise.resolve(peer.eth.status.td) ) - t.equal(await sync.best(), peers[1], 'found best') + assert.equal(await sync.best(), peers[1], 'found best') await sync.stop() await sync.close() - t.end() }) - t.test('should sync', async (t) => { - t.plan(3) + it('should sync', async () => { const config = new Config({ transports: [], accountCache: 10000, @@ -134,25 +129,25 @@ tape('[FullSynchronizer]', async (t) => { }) td.when(BlockFetcher.prototype.fetch(), { delay: 20, times: 2 }).thenResolve(undefined) ;(sync as any).chain = { blocks: { height: BigInt(3) } } - t.notOk(await sync.sync(), 'local height > remote height') + assert.notOk(await sync.sync(), 'local height > remote height') ;(sync as any).chain = { blocks: { height: BigInt(0) }, } setTimeout(() => { config.events.emit(Event.SYNC_SYNCHRONIZED, BigInt(0)) }, 100) - t.ok(await sync.sync(), 'local height < remote height') + assert.ok(await sync.sync(), 'local height < remote height') td.when(BlockFetcher.prototype.fetch()).thenReject(new Error('err0')) try { await sync.sync() } catch (err: any) { - t.equals(err.message, 'err0', 'got error') + assert.equal(err.message, 'err0', 'got error') await sync.stop() await sync.close() } }) - t.test('should send NewBlock/NewBlockHashes to right peers', async (t) => { + it('should send NewBlock/NewBlockHashes to right peers', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -166,9 +161,9 @@ tape('[FullSynchronizer]', async (t) => { }) ;(sync as any)._fetcher = { enqueueByNumberList: (blockNumberList: bigint[], min: bigint) => { - t.equal(blockNumberList[0], BigInt(0), 'enqueueing the correct block in the Fetcher') - t.equal(blockNumberList.length, 1, 'correct number of blocks enqueued in Fetcher') - t.equal(min, BigInt(0), 'correct start block number in Fetcher') + assert.equal(blockNumberList[0], BigInt(0), 'enqueueing the correct block in the Fetcher') + assert.equal(blockNumberList.length, 1, 'correct number of blocks enqueued in Fetcher') + assert.equal(min, BigInt(0), 'correct start block number in Fetcher') }, } Object.defineProperty(sync, 'fetcher', { @@ -184,7 +179,7 @@ tape('[FullSynchronizer]', async (t) => { eth: { status: { td: BigInt(1) }, send(name: string) { - t.equal(name, 'NewBlock', 'sent NewBlock to Peer 1') + assert.equal(name, 'NewBlock', 'sent NewBlock to Peer 1') }, }, inbound: false, @@ -194,7 +189,7 @@ tape('[FullSynchronizer]', async (t) => { eth: { status: { td: BigInt(2) }, send(name: string) { - t.equal(name, 'NewBlockHashes', 'sent NewBlockHashes to Peer 2') + assert.equal(name, 'NewBlockHashes', 'sent NewBlockHashes to Peer 2') timesSentToPeer2++ }, }, @@ -205,7 +200,7 @@ tape('[FullSynchronizer]', async (t) => { eth: { status: { td: BigInt(3) }, send() { - t.fail('should not send announcement to peer3') + assert.fail('should not send announcement to peer3') }, }, inbound: false, @@ -226,10 +221,10 @@ tape('[FullSynchronizer]', async (t) => { // NewBlock message from Peer 3 await sync.handleNewBlock(newBlock, peers[2] as any) - t.equal(config.syncTargetHeight, BigInt(0), 'sync target height should be set to 0') + assert.equal(config.syncTargetHeight, BigInt(0), 'sync target height should be set to 0') await sync.handleNewBlock(newBlock) - t.equal(timesSentToPeer2, 1, 'sent NewBlockHashes to Peer 2 once') - t.pass('did not send NewBlock to Peer 3') + assert.equal(timesSentToPeer2, 1, 'sent NewBlockHashes to Peer 2 once') + assert.ok(true, 'did not send NewBlock to Peer 3') ;(sync as any).chain._blocks = { latest: chainTip, } @@ -238,7 +233,7 @@ tape('[FullSynchronizer]', async (t) => { td.verify(chain.putBlocks([newBlock])) }) - t.test('should process blocks', async (t) => { + it('should process blocks', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -261,11 +256,10 @@ tape('[FullSynchronizer]', async (t) => { }) sync.running = true - t.ok(await sync.processBlocks([newBlock]), 'should successfully process blocks') + assert.ok(await sync.processBlocks([newBlock]), 'should successfully process blocks') }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/sync/lightsync.spec.ts b/packages/client/test/sync/lightsync.spec.ts index 25fafd780c..b488680198 100644 --- a/packages/client/test/sync/lightsync.spec.ts +++ b/packages/client/test/sync/lightsync.spec.ts @@ -1,38 +1,37 @@ import { BlockHeader } from '@ethereumjs/block' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it, vi } from 'vitest' import { Chain } from '../../src/blockchain' import { Config } from '../../src/config' +import { HeaderFetcher } from '../../src/sync/fetcher/headerfetcher' import { Event } from '../../src/types' -tape('[LightSynchronizer]', async (t) => { +describe('[LightSynchronizer]', async () => { class PeerPool { open() {} close() {} + idle() {} } PeerPool.prototype.open = td.func() PeerPool.prototype.close = td.func() - class HeaderFetcher { - fetch() {} - clear() {} - destroy() {} - } + HeaderFetcher.prototype.fetch = td.func() - td.replace('../../src/sync/fetcher', { HeaderFetcher }) + HeaderFetcher.prototype.clear = td.func() + HeaderFetcher.prototype.destroy = td.func() + vi.mock('../../src/sync/fetcher/headerfetcher', () => td.object()) const { LightSynchronizer } = await import('../../src/sync/lightsync') - t.test('should initialize correctly', async (t) => { + it('should initialize correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) const sync = new LightSynchronizer({ config, pool, chain }) - t.equals(sync.type, 'light', 'light type') - t.end() + assert.equal(sync.type, 'light', 'light type') }) - t.test('should find best', async (t) => { + it('should find best', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -42,7 +41,7 @@ tape('[LightSynchronizer]', async (t) => { pool, chain, }) - ;(sync as any).running = true + sync['running'] = true ;(sync as any).chain = { headers: { td: BigInt(1) } } const peers = [ { @@ -56,12 +55,10 @@ tape('[LightSynchronizer]', async (t) => { ] ;(sync as any).pool = { peers } ;(sync as any).forceSync = true - t.equal(await sync.best(), peers[1], 'found best') - t.end() + assert.equal(await sync.best(), peers[1], 'found best') }) - t.test('should sync', async (t) => { - t.plan(3) + it('should sync', async () => { const config = new Config({ transports: [], accountCache: 10000, @@ -85,25 +82,30 @@ tape('[LightSynchronizer]', async (t) => { }) td.when(HeaderFetcher.prototype.fetch(), { delay: 20, times: 2 }).thenResolve(undefined) ;(sync as any).chain = { headers: { height: BigInt(3) } } - t.notOk(await sync.sync(), 'local height > remote height') + assert.notOk(await sync.sync(), 'local height > remote height') ;(sync as any).chain = { headers: { height: BigInt(0) } } setTimeout(() => { config.events.emit(Event.SYNC_SYNCHRONIZED, BigInt(0)) }, 100) - t.ok(await sync.sync(), 'local height < remote height') + assert.ok(await sync.sync(), 'local height < remote height') td.when(HeaderFetcher.prototype.fetch()).thenReject(new Error('err0')) try { await sync.sync() } catch (err: any) { - t.equals(err.message, 'err0', 'got error') + assert.equal(err.message, 'err0', 'got error') await sync.stop() await sync.close() + vi.unmock('../../src/sync/fetcher/headerfetcher') } }) - t.test('import headers', async (st) => { + it('import headers', async () => { td.reset() - st.plan(1) + HeaderFetcher.prototype.fetch = td.func() + HeaderFetcher.prototype.clear = td.func() + HeaderFetcher.prototype.destroy = td.func() + vi.mock('../../src/sync/fetcher/headerfetcher', () => td.object()) + const { LightSynchronizer } = await import('../../src/sync/lightsync') const config = new Config({ transports: [], accountCache: 10000, @@ -131,18 +133,18 @@ tape('[LightSynchronizer]', async (t) => { ) config.logger.on('data', async (data) => { if ((data.message as string).includes('Imported headers count=1')) { - st.pass('successfully imported new header') + assert.ok(true, 'successfully imported new header') config.logger.removeAllListeners() await sync.stop() await sync.close() + vi.unmock('../../src/sync/fetcher/headerfetcher') } }) await sync.sync() }) - t.test('sync errors', async (st) => { + it('sync errors', async () => { td.reset() - st.plan(1) const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -165,7 +167,7 @@ tape('[LightSynchronizer]', async (t) => { ) config.logger.on('data', async (data) => { if ((data.message as string).includes('No headers fetched are applicable for import')) { - st.pass('generated correct warning message when no headers received') + assert.ok(true, 'generated correct warning message when no headers received') config.logger.removeAllListeners() await sync.stop() await sync.close() diff --git a/packages/client/test/sync/skeleton.spec.ts b/packages/client/test/sync/skeleton.spec.ts index 5512dbe9a7..b161d8d554 100644 --- a/packages/client/test/sync/skeleton.spec.ts +++ b/packages/client/test/sync/skeleton.spec.ts @@ -2,8 +2,8 @@ import { Block, BlockHeader } from '@ethereumjs/block' import { Common } from '@ethereumjs/common' import { equalsBytes, utf8ToBytes } from '@ethereumjs/util' import { MemoryLevel } from 'memory-level' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Chain } from '../../src/blockchain' import { Config } from '../../src/config' @@ -11,7 +11,7 @@ import { getLogger } from '../../src/logging' import { Skeleton, errReorgDenied, errSyncMerged } from '../../src/sync/skeleton' import { short } from '../../src/util' import { wait } from '../integration/util' -import * as genesisJSON from '../testdata/geth-genesis/post-merge.json' +import genesisJSON from '../testdata/geth-genesis/post-merge.json' type Subchain = { head: bigint tail: bigint @@ -36,7 +36,7 @@ const block51 = Block.fromBlockData( { common } ) -tape('[Skeleton] / initSync', async (t) => { +describe('[Skeleton] / initSync', async () => { // Tests various sync initializations based on previous leftovers in the database // and announced heads. interface TestCase { @@ -190,7 +190,7 @@ tape('[Skeleton] / initSync', async (t) => { }, ] for (const [testCaseIndex, testCase] of testCases.entries()) { - t.test(`${testCase.name}`, async (st) => { + it(`${testCase.name}`, async () => { const config = new Config({ common, transports: [], @@ -214,27 +214,27 @@ tape('[Skeleton] / initSync', async (t) => { const { progress } = (skeleton as any).status if (progress.subchains.length !== testCase.newState.length) { - st.fail( + assert.fail( `test ${testCaseIndex}: subchain count mismatch: have ${progress.subchains.length}, want ${testCase.newState.length}` ) } for (const [i, subchain] of progress.subchains.entries()) { if (subchain.head !== testCase.newState[i].head) { - st.fail( + assert.fail( `test ${testCaseIndex}: subchain head mismatch: have ${subchain.head}, want ${testCase.newState[i].head}` ) } else if (subchain.tail !== testCase.newState[i].tail) { - st.fail( + assert.fail( `test ${testCaseIndex}: subchain tail mismatch: have ${subchain.tail}, want ${testCase.newState[i].tail}` ) } else { - st.pass(`test ${testCaseIndex}: subchain[${i}] matched`) + assert.ok(true, `test ${testCaseIndex}: subchain[${i}] matched`) } } }) } }) -tape('[Skeleton] / setHead', async (t) => { +describe('[Skeleton] / setHead', async () => { // Tests that a running skeleton sync can be extended with properly linked up // headers but not with side chains. interface TestCase { @@ -306,7 +306,7 @@ tape('[Skeleton] / setHead', async (t) => { }, ] for (const [testCaseIndex, testCase] of testCases.entries()) { - t.test(`${testCase.name}`, async (st) => { + it(`${testCase.name}`, async () => { const config = new Config({ common, transports: [], @@ -326,18 +326,18 @@ tape('[Skeleton] / setHead', async (t) => { try { await skeleton.setHead(testCase.extend, testCase.force ?? false, false, true) if (testCase.err) { - st.fail(`test ${testCaseIndex}: should have failed`) + assert.fail(`test ${testCaseIndex}: should have failed`) } else { - st.pass(`test ${testCaseIndex}: successfully passed`) + assert.ok(true, `test ${testCaseIndex}: successfully passed`) } } catch (error: any) { if ( typeof testCase.err?.message === 'string' && (error.message as string).includes(testCase.err.message) ) { - st.pass(`test ${testCaseIndex}: passed with correct error`) + assert.ok(true, `test ${testCaseIndex}: passed with correct error`) } else { - st.fail( + assert.fail( `test ${testCaseIndex}: received wrong error expected=${testCase.err?.message} actual=${error.message}` ) } @@ -345,28 +345,27 @@ tape('[Skeleton] / setHead', async (t) => { const { progress } = (skeleton as any).status if (progress.subchains.length !== testCase.newState.length) { - st.fail( + assert.fail( `test ${testCaseIndex}: subchain count mismatch: have ${progress.subchains.length}, want ${testCase.newState.length}` ) } for (const [i, subchain] of progress.subchains.entries()) { if (subchain.head !== testCase.newState[i].head) { - st.fail( + assert.fail( `test ${testCaseIndex}: subchain head mismatch: have ${subchain.head}, want ${testCase.newState[i].head}` ) } else if (subchain.tail !== testCase.newState[i].tail) { - st.fail( + assert.fail( `test ${testCaseIndex}: subchain tail mismatch: have ${subchain.tail}, want ${testCase.newState[i].tail}` ) } else { - st.pass(`test ${testCaseIndex}: subchain[${i}] matched`) + assert.ok(true, `test ${testCaseIndex}: subchain[${i}] matched`) } } }) } - t.test(`skeleton init should throw error if merge not set`, async (st) => { - st.plan(1) + it(`skeleton init should throw error if merge not set`, async () => { const genesis = { ...genesisJSON, config: { @@ -386,12 +385,11 @@ tape('[Skeleton] / setHead', async (t) => { try { new Skeleton({ chain, config, metaDB: new MemoryLevel() }) } catch (e) { - st.pass(`Skeleton init should error if merge not set`) + assert.ok(true, `Skeleton init should error if merge not set`) } - st.end() }) - t.test('should init/setHead properly from genesis', async (st) => { + it('should init/setHead properly from genesis', async () => { const config = new Config({ common, transports: [] }) const chain = await Chain.create({ config }) ;(chain.blockchain as any)._validateBlocks = false @@ -416,86 +414,86 @@ tape('[Skeleton] / setHead', async (t) => { let reorg reorg = await skeleton.initSync(genesis) - st.equal(reorg, false, 'should not reorg on genesis init') + assert.equal(reorg, false, 'should not reorg on genesis init') reorg = await skeleton.setHead(genesis, false) - st.equal(reorg, false, 'should not reorg on genesis announcement') + assert.equal(reorg, false, 'should not reorg on genesis announcement') reorg = await skeleton.setHead(genesis, true) - st.equal(reorg, false, 'should not reorg on genesis setHead') + assert.equal(reorg, false, 'should not reorg on genesis setHead') - st.equal( + assert.equal( (skeleton as any).status.progress.subchains.length, 0, 'no subchain should have been created' ) try { await skeleton.putBlocks([block1]) - st.fail('should have not allowed putBlocks since no subchain set') + assert.fail('should have not allowed putBlocks since no subchain set') } catch (_e) { - st.pass('should not allow putBlocks since no subchain set') + assert.ok(true, 'should not allow putBlocks since no subchain set') } - st.equal(chain.blocks.height, BigInt(0), 'canonical height should be at genesis') + assert.equal(chain.blocks.height, BigInt(0), 'canonical height should be at genesis') reorg = await skeleton.setHead(block1, false) - st.equal(reorg, false, 'should not reorg on valid first block') - st.equal( + assert.equal(reorg, false, 'should not reorg on valid first block') + assert.equal( (skeleton as any).status.progress.subchains.length, 0, 'no subchain should have been created' ) reorg = await skeleton.setHead(block1, true) - st.equal(reorg, false, 'should not reorg on valid first block') - st.equal( + assert.equal(reorg, false, 'should not reorg on valid first block') + assert.equal( (skeleton as any).status.progress.subchains.length, 1, 'subchain should have been created' ) - st.equal( + assert.equal( (skeleton as any).status.progress.subchains[0].head, BigInt(1), 'head should be set to first block' ) - st.equal(skeleton.isLinked(), true, 'subchain status should be linked') + assert.equal(skeleton.isLinked(), true, 'subchain status should be linked') reorg = await skeleton.setHead(block2, true) - st.equal(reorg, false, 'should not reorg on valid second block') - st.equal((skeleton as any).status.progress.subchains.length, 1, 'subchain should be same') - st.equal( + assert.equal(reorg, false, 'should not reorg on valid second block') + assert.equal((skeleton as any).status.progress.subchains.length, 1, 'subchain should be same') + assert.equal( (skeleton as any).status.progress.subchains[0].head, BigInt(2), 'head should be set to first block' ) - st.equal(skeleton.isLinked(), true, 'subchain status should stay linked') + assert.equal(skeleton.isLinked(), true, 'subchain status should stay linked') reorg = await skeleton.setHead(block3, false) - st.equal(reorg, true, 'should not extend on invalid third block') + assert.equal(reorg, true, 'should not extend on invalid third block') // since its not a forced update so shouldn't affect subchain status - st.equal((skeleton as any).status.progress.subchains.length, 1, 'subchain should be same') - st.equal( + assert.equal((skeleton as any).status.progress.subchains.length, 1, 'subchain should be same') + assert.equal( (skeleton as any).status.progress.subchains[0].head, BigInt(2), 'head should be set to second block' ) - st.equal(skeleton.isLinked(), true, 'subchain status should stay linked') + assert.equal(skeleton.isLinked(), true, 'subchain status should stay linked') reorg = await skeleton.setHead(block3, true) - st.equal(reorg, true, 'should not extend on invalid third block') + assert.equal(reorg, true, 'should not extend on invalid third block') // since its not a forced update so shouldn't affect subchain status - st.equal( + assert.equal( (skeleton as any).status.progress.subchains.length, 2, 'new subchain should be created' ) - st.equal( + assert.equal( (skeleton as any).status.progress.subchains[0].head, BigInt(3), 'head should be set to third block' ) - st.equal(skeleton.isLinked(), false, 'subchain status should not be linked anymore') + assert.equal(skeleton.isLinked(), false, 'subchain status should not be linked anymore') }) - t.test('should fill the canonical chain after being linked to genesis', async (st) => { + it('should fill the canonical chain after being linked to genesis', async () => { const config = new Config({ common, transports: [] }) const chain = await Chain.create({ config }) ;(chain.blockchain as any)._validateBlocks = false @@ -528,32 +526,36 @@ tape('[Skeleton] / setHead', async (t) => { await skeleton.initSync(block4) await skeleton.putBlocks([block3, block2]) - st.equal(chain.blocks.height, BigInt(0), 'canonical height should be at genesis') + assert.equal(chain.blocks.height, BigInt(0), 'canonical height should be at genesis') await skeleton.putBlocks([block1]) await wait(200) - st.equal(chain.blocks.height, BigInt(4), 'canonical height should update after being linked') + assert.equal( + chain.blocks.height, + BigInt(4), + 'canonical height should update after being linked' + ) await skeleton.setHead(block5, false) await wait(200) - st.equal( + assert.equal( chain.blocks.height, BigInt(4), 'canonical height should not change when setHead is set with force=false' ) await skeleton.setHead(block5, true) await wait(200) - st.equal( + assert.equal( chain.blocks.height, BigInt(5), 'canonical height should change when setHead is set with force=true' ) for (const block of [block1, block2, block3, block4, block5]) { - st.equal( + assert.equal( (await skeleton.getBlock(block.header.number, true))?.hash(), undefined, `skeleton block number=${block.header.number} should be cleaned up after filling canonical chain` ) - st.equal( + assert.equal( (await skeleton.getBlockByHash(block.hash(), true))?.hash(), undefined, `skeleton block hash=${short( @@ -563,343 +565,336 @@ tape('[Skeleton] / setHead', async (t) => { } }) - t.test( - 'should fill the canonical chain after being linked to a canonical block past genesis', - async (st) => { - const config = new Config({ common, transports: [] }) - const chain = await Chain.create({ config }) - ;(chain.blockchain as any)._validateBlocks = false + it('should fill the canonical chain after being linked to a canonical block past genesis', async () => { + const config = new Config({ common, transports: [] }) + const chain = await Chain.create({ config }) + ;(chain.blockchain as any)._validateBlocks = false - const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) - await chain.open() - await skeleton.open() + const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) + await chain.open() + await skeleton.open() - const genesis = await chain.getBlock(BigInt(0)) + const genesis = await chain.getBlock(BigInt(0)) - const block1 = Block.fromBlockData( - { header: { number: 1, parentHash: genesis.hash(), difficulty: 100 } }, - { common, setHardfork: true } - ) - const block2 = Block.fromBlockData( - { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common, setHardfork: true } - ) - const block3 = Block.fromBlockData( - { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, - { common, setHardfork: true } - ) - const block4 = Block.fromBlockData( - { header: { number: 4, parentHash: block3.hash(), difficulty: 100 } }, - { common, setHardfork: true } - ) - const block5 = Block.fromBlockData( - { header: { number: 5, parentHash: block4.hash(), difficulty: 100 } }, - { common, setHardfork: true } - ) + const block1 = Block.fromBlockData( + { header: { number: 1, parentHash: genesis.hash(), difficulty: 100 } }, + { common, setHardfork: true } + ) + const block2 = Block.fromBlockData( + { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, + { common, setHardfork: true } + ) + const block3 = Block.fromBlockData( + { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, + { common, setHardfork: true } + ) + const block4 = Block.fromBlockData( + { header: { number: 4, parentHash: block3.hash(), difficulty: 100 } }, + { common, setHardfork: true } + ) + const block5 = Block.fromBlockData( + { header: { number: 5, parentHash: block4.hash(), difficulty: 100 } }, + { common, setHardfork: true } + ) - await chain.putBlocks([block1, block2]) - await skeleton.initSync(block4) - st.equal(chain.blocks.height, BigInt(2), 'canonical height should be at block 2') - await skeleton.putBlocks([block3]) - await wait(200) - st.equal(chain.blocks.height, BigInt(4), 'canonical height should update after being linked') - await skeleton.setHead(block5, false) - await wait(200) - st.equal( - chain.blocks.height, - BigInt(4), - 'canonical height should not change when setHead with force=false' + await chain.putBlocks([block1, block2]) + await skeleton.initSync(block4) + assert.equal(chain.blocks.height, BigInt(2), 'canonical height should be at block 2') + await skeleton.putBlocks([block3]) + await wait(200) + assert.equal( + chain.blocks.height, + BigInt(4), + 'canonical height should update after being linked' + ) + await skeleton.setHead(block5, false) + await wait(200) + assert.equal( + chain.blocks.height, + BigInt(4), + 'canonical height should not change when setHead with force=false' + ) + await skeleton.setHead(block5, true) + await wait(200) + assert.equal( + chain.blocks.height, + BigInt(5), + 'canonical height should change when setHead with force=true' + ) + for (const block of [block3, block4, block5]) { + assert.equal( + (await skeleton.getBlock(block.header.number, true))?.hash(), + undefined, + `skeleton block number=${block.header.number} should be cleaned up after filling canonical chain` ) - await skeleton.setHead(block5, true) - await wait(200) - st.equal( - chain.blocks.height, - BigInt(5), - 'canonical height should change when setHead with force=true' + assert.equal( + (await skeleton.getBlockByHash(block.hash(), true))?.hash(), + undefined, + `skeleton block hash=${short( + block.hash() + )} should be cleaned up after filling canonical chain` ) - for (const block of [block3, block4, block5]) { - st.equal( - (await skeleton.getBlock(block.header.number, true))?.hash(), - undefined, - `skeleton block number=${block.header.number} should be cleaned up after filling canonical chain` - ) - st.equal( - (await skeleton.getBlockByHash(block.hash(), true))?.hash(), - undefined, - `skeleton block hash=${short( - block.hash() - )} should be cleaned up after filling canonical chain` - ) - } } - ) - - t.test( - 'should abort filling the canonical chain if the terminal block is invalid', - async (st) => { - const genesis = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 200, - clique: undefined, - ethash: {}, - }, - extraData: '0x00000000000000000', - difficulty: '0x1', - } - const common = Common.fromGethGenesis(genesis, { chain: 'post-merge' }) - common.setHardforkBy({ blockNumber: BigInt(0), td: BigInt(0) }) - const config = new Config({ - transports: [], - common, - accountCache: 10000, - storageCache: 1000, - }) - const chain = await Chain.create({ config }) - ;(chain.blockchain as any)._validateBlocks = false - await chain.open() - const genesisBlock = await chain.getBlock(BigInt(0)) + }) - const block1 = Block.fromBlockData( - { header: { number: 1, parentHash: genesisBlock.hash(), difficulty: 100 } }, - { common } - ) - const block2 = Block.fromBlockData( - { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common } - ) - const block3PoW = Block.fromBlockData( - { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, - { common } - ) - const block3PoS = Block.fromBlockData( - { header: { number: 3, parentHash: block2.hash(), difficulty: 0 } }, - { common, setHardfork: BigInt(200) } - ) - const block4InvalidPoS = Block.fromBlockData( - { header: { number: 4, parentHash: block3PoW.hash(), difficulty: 0 } }, - { common, setHardfork: BigInt(200) } - ) - const block4PoS = Block.fromBlockData( - { header: { number: 4, parentHash: block3PoS.hash(), difficulty: 0 } }, - { common, setHardfork: BigInt(200) } - ) - const block5 = Block.fromBlockData( - { header: { number: 5, parentHash: block4PoS.hash(), difficulty: 0 } }, - { common, setHardfork: BigInt(200) } - ) + it('should abort filling the canonical chain if the terminal block is invalid', async () => { + const genesis = { + ...genesisJSON, + config: { + ...genesisJSON.config, + terminalTotalDifficulty: 200, + clique: undefined, + ethash: {}, + }, + extraData: '0x00000000000000000', + difficulty: '0x1', + } + const common = Common.fromGethGenesis(genesis, { chain: 'post-merge' }) + common.setHardforkBy({ blockNumber: BigInt(0), td: BigInt(0) }) + const config = new Config({ + transports: [], + common, + accountCache: 10000, + storageCache: 1000, + }) + const chain = await Chain.create({ config }) + ;(chain.blockchain as any)._validateBlocks = false + await chain.open() + const genesisBlock = await chain.getBlock(BigInt(0)) - const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) - await skeleton.open() + const block1 = Block.fromBlockData( + { header: { number: 1, parentHash: genesisBlock.hash(), difficulty: 100 } }, + { common } + ) + const block2 = Block.fromBlockData( + { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, + { common } + ) + const block3PoW = Block.fromBlockData( + { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, + { common } + ) + const block3PoS = Block.fromBlockData( + { header: { number: 3, parentHash: block2.hash(), difficulty: 0 } }, + { common, setHardfork: BigInt(200) } + ) + const block4InvalidPoS = Block.fromBlockData( + { header: { number: 4, parentHash: block3PoW.hash(), difficulty: 0 } }, + { common, setHardfork: BigInt(200) } + ) + const block4PoS = Block.fromBlockData( + { header: { number: 4, parentHash: block3PoS.hash(), difficulty: 0 } }, + { common, setHardfork: BigInt(200) } + ) + const block5 = Block.fromBlockData( + { header: { number: 5, parentHash: block4PoS.hash(), difficulty: 0 } }, + { common, setHardfork: BigInt(200) } + ) - await skeleton.initSync(block4InvalidPoS) - await skeleton.putBlocks([block3PoW, block2]) - st.equal(chain.blocks.height, BigInt(0), 'canonical height should be at genesis') - await skeleton.putBlocks([block1]) - await wait(200) - st.equal( - chain.blocks.height, - BigInt(2), - 'canonical height should stop at block 2 (valid terminal block), since block 3 is invalid (past ttd)' - ) - try { - await skeleton.setHead(block5, false) - } catch (error: any) { - if (error !== errReorgDenied) { - t.fail(error) - } - } - await wait(200) - st.equal( - chain.blocks.height, - BigInt(2), - 'canonical height should not change when setHead is set with force=false' - ) - // Put correct chain - await skeleton.initSync(block4PoS) - try { - await skeleton.putBlocks([block3PoS]) - } catch (error: any) { - if (error !== errSyncMerged) { - t.fail(error) - } + const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) + await skeleton.open() + + await skeleton.initSync(block4InvalidPoS) + await skeleton.putBlocks([block3PoW, block2]) + assert.equal(chain.blocks.height, BigInt(0), 'canonical height should be at genesis') + await skeleton.putBlocks([block1]) + await wait(200) + assert.equal( + chain.blocks.height, + BigInt(2), + 'canonical height should stop at block 2 (valid terminal block), since block 3 is invalid (past ttd)' + ) + try { + await skeleton.setHead(block5, false) + } catch (error: any) { + if (error !== errReorgDenied) { + assert.fail(error) } - await wait(200) - st.equal( - chain.blocks.height, - BigInt(4), - 'canonical height should now be at head with correct chain' - ) - const latestHash = chain.headers.latest?.hash() - st.ok( - latestHash !== undefined && equalsBytes(latestHash, block4PoS.hash()), - 'canonical height should now be at head with correct chain' - ) - await skeleton.setHead(block5, true) - await wait(200) - st.equal(skeleton.bounds().head, BigInt(5), 'should update to new height') } - ) - - t.test( - 'should abort filling the canonical chain and backstep if the terminal block is invalid', - async (st) => { - const genesis = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 200, - clique: undefined, - ethash: {}, - }, - extraData: '0x00000000000000000', - difficulty: '0x1', + await wait(200) + assert.equal( + chain.blocks.height, + BigInt(2), + 'canonical height should not change when setHead is set with force=false' + ) + // Put correct chain + await skeleton.initSync(block4PoS) + try { + await skeleton.putBlocks([block3PoS]) + } catch (error: any) { + if (error !== errSyncMerged) { + assert.fail(error) } - const common = Common.fromGethGenesis(genesis, { chain: 'post-merge' }) - common.setHardforkBy({ blockNumber: BigInt(0), td: BigInt(0) }) - const config = new Config({ - transports: [], - common, - accountCache: 10000, - storageCache: 1000, - }) - const chain = await Chain.create({ config }) - ;(chain.blockchain as any)._validateBlocks = false - ;(chain.blockchain as any)._validateConsensus = false - await chain.open() - const genesisBlock = await chain.getBlock(BigInt(0)) - - const block1 = Block.fromBlockData( - { header: { number: 1, parentHash: genesisBlock.hash(), difficulty: 100 } }, - { common } - ) - const block2 = Block.fromBlockData( - { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common } - ) - const block3PoW = Block.fromBlockData( - { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, - { common } - ) - const block4InvalidPoS = Block.fromBlockData( - { header: { number: 4, parentHash: block3PoW.hash(), difficulty: 0 } }, - { common, setHardfork: 200 } - ) + } + await wait(200) + assert.equal( + chain.blocks.height, + BigInt(4), + 'canonical height should now be at head with correct chain' + ) + const latestHash = chain.headers.latest?.hash() + assert.ok( + latestHash !== undefined && equalsBytes(latestHash, block4PoS.hash()), + 'canonical height should now be at head with correct chain' + ) + await skeleton.setHead(block5, true) + await wait(200) + assert.equal(skeleton.bounds().head, BigInt(5), 'should update to new height') + }) - const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) - await skeleton.open() + it('should abort filling the canonical chain and backstep if the terminal block is invalid', async () => { + const genesis = { + ...genesisJSON, + config: { + ...genesisJSON.config, + terminalTotalDifficulty: 200, + clique: undefined, + ethash: {}, + }, + extraData: '0x00000000000000000', + difficulty: '0x1', + } + const common = Common.fromGethGenesis(genesis, { chain: 'post-merge' }) + common.setHardforkBy({ blockNumber: BigInt(0), td: BigInt(0) }) + const config = new Config({ + transports: [], + common, + accountCache: 10000, + storageCache: 1000, + }) + const chain = await Chain.create({ config }) + ;(chain.blockchain as any)._validateBlocks = false + ;(chain.blockchain as any)._validateConsensus = false + await chain.open() + const genesisBlock = await chain.getBlock(BigInt(0)) - await skeleton.initSync(block4InvalidPoS) - await skeleton.putBlocks([block3PoW, block2]) - st.equal(chain.blocks.height, BigInt(0), 'canonical height should be at genesis') - await skeleton.putBlocks([block1]) - await wait(200) - st.equal( - chain.blocks.height, - BigInt(2), - 'canonical height should stop at block 2 (valid terminal block), since block 3 is invalid (past ttd)' - ) - st.equal( - (skeleton as any).status.progress.subchains[0].tail, - BigInt(4), - `Subchain should have been backstepped to 4` - ) + const block1 = Block.fromBlockData( + { header: { number: 1, parentHash: genesisBlock.hash(), difficulty: 100 } }, + { common } + ) + const block2 = Block.fromBlockData( + { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, + { common } + ) + const block3PoW = Block.fromBlockData( + { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, + { common } + ) + const block4InvalidPoS = Block.fromBlockData( + { header: { number: 4, parentHash: block3PoW.hash(), difficulty: 0 } }, + { common, setHardfork: 200 } + ) + + const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) + await skeleton.open() + + await skeleton.initSync(block4InvalidPoS) + await skeleton.putBlocks([block3PoW, block2]) + assert.equal(chain.blocks.height, BigInt(0), 'canonical height should be at genesis') + await skeleton.putBlocks([block1]) + await wait(200) + assert.equal( + chain.blocks.height, + BigInt(2), + 'canonical height should stop at block 2 (valid terminal block), since block 3 is invalid (past ttd)' + ) + assert.equal( + (skeleton as any).status.progress.subchains[0].tail, + BigInt(4), + `Subchain should have been backstepped to 4` + ) + }) + + it('should abort filling the canonical chain if a PoS block comes too early without hitting ttd', async () => { + const genesis = { + ...genesisJSON, + config: { + ...genesisJSON.config, + terminalTotalDifficulty: 200, + skeletonFillCanonicalBackStep: 0, + }, + difficulty: '0x1', } - ) - - t.test( - 'should abort filling the canonical chain if a PoS block comes too early without hitting ttd', - async (st) => { - const genesis = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 200, - skeletonFillCanonicalBackStep: 0, - }, - difficulty: '0x1', - } - const common = Common.fromGethGenesis(genesis, { chain: 'post-merge' }) - common.setHardforkBy({ blockNumber: BigInt(0), td: BigInt(0) }) - const config = new Config({ - transports: [], - common, - logger: getLogger({ loglevel: 'debug' }), - accountCache: 10000, - storageCache: 1000, - }) + const common = Common.fromGethGenesis(genesis, { chain: 'post-merge' }) + common.setHardforkBy({ blockNumber: BigInt(0), td: BigInt(0) }) + const config = new Config({ + transports: [], + common, + logger: getLogger({ loglevel: 'debug' }), + accountCache: 10000, + storageCache: 1000, + }) - const chain = await Chain.create({ config }) - ;(chain.blockchain as any)._validateConsensus = false - // Only add td validations to the validateBlock - chain.blockchain.validateBlock = async (block: Block) => { - if (!(block.header.common.consensusType() === 'pos') && block.header.difficulty === 0n) { - throw Error( - `Invalid header difficulty=${ - block.header.difficulty - } for consensus=${block.header.common.consensusType()}` - ) - } + const chain = await Chain.create({ config }) + ;(chain.blockchain as any)._validateConsensus = false + // Only add td validations to the validateBlock + chain.blockchain.validateBlock = async (block: Block) => { + if (!(block.header.common.consensusType() === 'pos') && block.header.difficulty === 0n) { + throw Error( + `Invalid header difficulty=${ + block.header.difficulty + } for consensus=${block.header.common.consensusType()}` + ) } + } - const originalValidate = (BlockHeader as any).prototype._consensusFormatValidation - ;(BlockHeader as any).prototype._consensusFormatValidation = td.func() - td.replace('@ethereumjs/block', { BlockHeader }) - await chain.open() - const genesisBlock = await chain.getBlock(BigInt(0)) + const originalValidate = BlockHeader.prototype['_consensusFormatValidation'] + BlockHeader.prototype['_consensusFormatValidation'] = td.func() + td.replace('@ethereumjs/block', { BlockHeader }) + await chain.open() + const genesisBlock = await chain.getBlock(BigInt(0)) - const block1 = Block.fromBlockData( - { header: { number: 1, parentHash: genesisBlock.hash(), difficulty: 100 } }, - { common } - ) - const block2 = Block.fromBlockData( - { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common } - ) - const block2PoS = Block.fromBlockData( - { header: { number: 2, parentHash: block1.hash(), difficulty: 0 } }, - { common } - ) - const block3 = Block.fromBlockData( - { header: { number: 3, parentHash: block2.hash(), difficulty: 0 } }, - { common } - ) + const block1 = Block.fromBlockData( + { header: { number: 1, parentHash: genesisBlock.hash(), difficulty: 100 } }, + { common } + ) + const block2 = Block.fromBlockData( + { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, + { common } + ) + const block2PoS = Block.fromBlockData( + { header: { number: 2, parentHash: block1.hash(), difficulty: 0 } }, + { common } + ) + const block3 = Block.fromBlockData( + { header: { number: 3, parentHash: block2.hash(), difficulty: 0 } }, + { common } + ) - const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) - await skeleton.open() + const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) + await skeleton.open() - await skeleton.initSync(block2PoS) - await skeleton.putBlocks([block1]) + await skeleton.initSync(block2PoS) + await skeleton.putBlocks([block1]) - await wait(200) - st.equal( - chain.blocks.height, - BigInt(1), - 'canonical height should stop at block 1 (valid PoW block), since block 2 is invalid (invalid PoS, not past ttd)' - ) - // Put correct chain - await skeleton.initSync(block3) - try { - await skeleton.putBlocks([block2]) - } catch (error: any) { - if (error !== errSyncMerged) { - t.fail(error) - } + await wait(200) + assert.equal( + chain.blocks.height, + BigInt(1), + 'canonical height should stop at block 1 (valid PoW block), since block 2 is invalid (invalid PoS, not past ttd)' + ) + // Put correct chain + await skeleton.initSync(block3) + try { + await skeleton.putBlocks([block2]) + } catch (error: any) { + if (error !== errSyncMerged) { + assert.fail(error) } - await wait(200) - st.equal( - chain.blocks.height, - BigInt(3), - 'canonical height should now be at head with correct chain' - ) - const latestHash = chain.headers.latest?.hash() - st.ok( - latestHash !== undefined && equalsBytes(latestHash, block3.hash()), - 'canonical height should now be at head with correct chain' - ) - ;(BlockHeader as any).prototype._consensusFormatValidation = originalValidate - td.reset() } - ) + await wait(200) + assert.equal( + chain.blocks.height, + BigInt(3), + 'canonical height should now be at head with correct chain' + ) + const latestHash = chain.headers.latest?.hash() + assert.ok( + latestHash !== undefined && equalsBytes(latestHash, block3.hash()), + 'canonical height should now be at head with correct chain' + ) + + BlockHeader.prototype['_consensusFormatValidation'] = originalValidate + td.reset() + }) }) diff --git a/packages/client/test/sync/snapsync.spec.ts b/packages/client/test/sync/snapsync.spec.ts index f6ae6a9ddd..7ffe2d86bd 100644 --- a/packages/client/test/sync/snapsync.spec.ts +++ b/packages/client/test/sync/snapsync.spec.ts @@ -1,11 +1,11 @@ import { BlockHeader } from '@ethereumjs/block' -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Chain } from '../../src/blockchain' import { Config } from '../../src/config' -tape('[SnapSynchronizer]', async (t) => { +describe('[SnapSynchronizer]', async () => { class PeerPool { open() {} close() {} @@ -38,16 +38,15 @@ tape('[SnapSynchronizer]', async (t) => { const { SnapSynchronizer } = await import('../../src/sync/snapsync') - t.test('should initialize correctly', async (t) => { + it('should initialize correctly', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) const sync = new SnapSynchronizer({ config, pool, chain }) - t.equals(sync.type, 'snap', 'snap type') - t.end() + assert.equal(sync.type, 'snap', 'snap type') }) - t.test('should open', async (t) => { + it('should open', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -56,12 +55,11 @@ tape('[SnapSynchronizer]', async (t) => { ;(sync as any).pool.peers = [] td.when((sync as any).pool.open()).thenResolve(null) await sync.open() - t.pass('opened') + assert.ok(true, 'opened') await sync.close() - t.end() }) - t.test('should find best', async (t) => { + it('should find best', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -96,9 +94,7 @@ tape('[SnapSynchronizer]', async (t) => { ] ;(sync as any).pool = { peers } ;(sync as any).forceSync = true - t.equal(await sync.best(), peers[1], 'found best') + assert.equal(await sync.best(), peers[1], 'found best') await sync.start() - - t.end() }) }) diff --git a/packages/client/test/sync/sync.spec.ts b/packages/client/test/sync/sync.spec.ts index 7a0352fba6..6f295bb29b 100644 --- a/packages/client/test/sync/sync.spec.ts +++ b/packages/client/test/sync/sync.spec.ts @@ -1,5 +1,5 @@ -import * as tape from 'tape' import * as td from 'testdouble' +import { assert, describe, it } from 'vitest' import { Chain } from '../../src/blockchain' import { Config } from '../../src/config' @@ -18,7 +18,7 @@ class SynchronizerTest extends Synchronizer { } } -tape('[Synchronizer]', async (t) => { +describe('[Synchronizer]', async () => { class PeerPool { open() {} close() {} @@ -26,7 +26,7 @@ tape('[Synchronizer]', async (t) => { PeerPool.prototype.open = td.func() PeerPool.prototype.close = td.func() - t.test('should sync', async (t) => { + it('should sync', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) config.syncTargetHeight = BigInt(1) const pool = new PeerPool() as any @@ -35,12 +35,11 @@ tape('[Synchronizer]', async (t) => { ;(sync as any).sync = td.func() td.when((sync as any).sync()).thenResolve(true) config.events.on(Event.SYNC_SYNCHRONIZED, async () => { - t.ok('synchronized', 'synchronized') + assert.ok('synchronized', 'synchronized') await sync.stop() - t.notOk((sync as any).running, 'stopped') + assert.notOk((sync as any).running, 'stopped') await sync.close() await chain.close() - t.end() }) void sync.start() ;(sync as any).chain._headers = { @@ -53,12 +52,10 @@ tape('[Synchronizer]', async (t) => { // test getting out of sync ;(config as any).syncedStateRemovalPeriod = 0 config.updateSynchronizedState() - t.equal(config.synchronized, false, 'should fall out of sync') - await new Promise(() => {}) // resolves once t.end() is called + assert.equal(config.synchronized, false, 'should fall out of sync') }) - t.test('should reset td', (t) => { + it('should reset td', () => { td.reset() - t.end() }) }) diff --git a/packages/client/test/sync/txpool.spec.ts b/packages/client/test/sync/txpool.spec.ts index e159661468..6fdfa7397c 100644 --- a/packages/client/test/sync/txpool.spec.ts +++ b/packages/client/test/sync/txpool.spec.ts @@ -10,7 +10,7 @@ import { hexToBytes, privateToAddress, } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { Config } from '../../src/config' import { getLogger } from '../../src/logging' @@ -94,7 +94,7 @@ const handleTxs = async ( } } -tape('[TxPool]', async (t) => { +describe('[TxPool]', async () => { const ogStateManagerSetStateRoot = DefaultStateManager.prototype.setStateRoot DefaultStateManager.prototype.setStateRoot = (): any => {} @@ -130,37 +130,35 @@ tape('[TxPool]', async (t) => { const txB01 = createTx(B, A) // B -> A, nonce: 0, value: 1 const txB02 = createTx(B, A, 1, 5) // B -> A, nonce: 1, value: 5 - t.test('should initialize correctly', (t) => { + it('should initialize correctly', () => { const { pool } = setup() - t.equal(pool.pool.size, 0, 'pool empty') - t.notOk((pool as any).opened, 'pool not opened yet') + assert.equal(pool.pool.size, 0, 'pool empty') + assert.notOk((pool as any).opened, 'pool not opened yet') pool.open() - t.ok((pool as any).opened, 'pool opened') + assert.ok((pool as any).opened, 'pool opened') pool.start() - t.ok((pool as any).running, 'pool running') + assert.ok((pool as any).running, 'pool running') pool.stop() - t.notOk((pool as any).running, 'pool not running anymore') + assert.notOk((pool as any).running, 'pool not running anymore') pool.close() - t.notOk((pool as any).opened, 'pool not opened anymore') - t.end() + assert.notOk((pool as any).opened, 'pool not opened anymore') }) - t.test('should open/close', async (t) => { - t.plan(3) + it('should open/close', async () => { const { pool } = setup() pool.open() pool.start() - t.ok((pool as any).opened, 'pool opened') - t.equals(pool.open(), false, 'already opened') + assert.ok((pool as any).opened, 'pool opened') + assert.equal(pool.open(), false, 'already opened') pool.stop() pool.close() - t.notOk((pool as any).opened, 'closed') + assert.notOk((pool as any).opened, 'closed') }) - t.test('announcedTxHashes() -> add single tx / knownByPeer / getByHash()', async (t) => { + it('announcedTxHashes() -> add single tx / knownByPeer / getByHash()', async () => { // Safeguard that send() method from peer2 gets called - t.plan(12) + const { pool } = setup() pool.open() @@ -173,10 +171,10 @@ tape('[TxPool]', async (t) => { return [null, [txA01]] }, send: () => { - t.fail('should not send to announcing peer') + assert.fail('should not send to announcing peer') }, request: () => { - t.fail('should not send to announcing peer') + assert.fail('should not send to announcing peer') }, }, } @@ -187,11 +185,11 @@ tape('[TxPool]', async (t) => { versions: [66], send: () => { sentToPeer2++ - t.equal(sentToPeer2, 1, 'should send once to non-announcing peer') + assert.equal(sentToPeer2, 1, 'should send once to non-announcing peer') }, request: () => { sentToPeer2++ - t.equal(sentToPeer2, 1, 'should send once to non-announcing peer') + assert.equal(sentToPeer2, 1, 'should send once to non-announcing peer') }, }, } @@ -200,21 +198,25 @@ tape('[TxPool]', async (t) => { peerPool.add(peer2) await pool.handleAnnouncedTxHashes([txA01.hash()], peer, peerPool) - t.equal(pool.pool.size, 1, 'pool size 1') - t.equal((pool as any).pending.length, 0, 'cleared pending txs') - t.equal((pool as any).handled.size, 1, 'added to handled txs') + assert.equal(pool.pool.size, 1, 'pool size 1') + assert.equal((pool as any).pending.length, 0, 'cleared pending txs') + assert.equal((pool as any).handled.size, 1, 'added to handled txs') - t.equal((pool as any).knownByPeer.size, 2, 'known tx hashes size 2 (entries for both peers)') - t.equal((pool as any).knownByPeer.get(peer.id).length, 1, 'one tx added for peer 1') - t.equal( + assert.equal( + (pool as any).knownByPeer.size, + 2, + 'known tx hashes size 2 (entries for both peers)' + ) + assert.equal((pool as any).knownByPeer.get(peer.id).length, 1, 'one tx added for peer 1') + assert.equal( (pool as any).knownByPeer.get(peer.id)[0].hash, bytesToUnprefixedHex(txA01.hash()), 'new known tx hashes entry for announcing peer' ) const txs = pool.getByHash([txA01.hash()]) - t.equal(txs.length, 1, 'should get correct number of txs by hash') - t.equal( + assert.equal(txs.length, 1, 'should get correct number of txs by hash') + assert.equal( bytesToHex(txs[0].serialize()), bytesToHex(txA01.serialize()), 'should get correct tx by hash' @@ -222,19 +224,23 @@ tape('[TxPool]', async (t) => { pool.pool.clear() await pool.handleAnnouncedTxHashes([txA01.hash()], peer, peerPool) - t.equal(pool.pool.size, 0, 'should not add a once handled tx') - t.equal( + assert.equal(pool.pool.size, 0, 'should not add a once handled tx') + assert.equal( (pool as any).knownByPeer.get(peer.id).length, 1, 'should add tx only once to known tx hashes' ) - t.equal((pool as any).knownByPeer.size, 2, 'known tx hashes size 2 (entries for both peers)') + assert.equal( + (pool as any).knownByPeer.size, + 2, + 'known tx hashes size 2 (entries for both peers)' + ) pool.stop() pool.close() }) - t.test('announcedTxHashes() -> TX_RETRIEVAL_LIMIT', async (t) => { + it('announcedTxHashes() -> TX_RETRIEVAL_LIMIT', async () => { const { pool } = setup() const TX_RETRIEVAL_LIMIT: number = (pool as any).TX_RETRIEVAL_LIMIT @@ -244,7 +250,11 @@ tape('[TxPool]', async (t) => { eth: { versions: [66], getPooledTransactions: (res: any) => { - t.equal(res['hashes'].length, TX_RETRIEVAL_LIMIT, 'should limit to TX_RETRIEVAL_LIMIT') + assert.equal( + res['hashes'].length, + TX_RETRIEVAL_LIMIT, + 'should limit to TX_RETRIEVAL_LIMIT' + ) return [null, []] }, }, @@ -262,7 +272,7 @@ tape('[TxPool]', async (t) => { pool.close() }) - t.test('announcedTxHashes() -> add two txs (different sender)', async (t) => { + it('announcedTxHashes() -> add two txs (different sender)', async () => { const { pool } = setup() pool.open() @@ -278,12 +288,12 @@ tape('[TxPool]', async (t) => { const peerPool = new PeerPool({ config }) await pool.handleAnnouncedTxHashes([txA01.hash(), txB01.hash()], peer, peerPool) - t.equal(pool.pool.size, 2, 'pool size 2') + assert.equal(pool.pool.size, 2, 'pool size 2') pool.stop() pool.close() }) - t.test('announcedTxHashes() -> add two txs (same sender and nonce)', async (t) => { + it('announcedTxHashes() -> add two txs (same sender and nonce)', async () => { const { pool } = setup() pool.open() @@ -299,16 +309,16 @@ tape('[TxPool]', async (t) => { const peerPool = new PeerPool({ config }) await pool.handleAnnouncedTxHashes([txA01.hash(), txA02.hash()], peer, peerPool) - t.equal(pool.pool.size, 1, 'pool size 1') + assert.equal(pool.pool.size, 1, 'pool size 1') const address = bytesToUnprefixedHex(A.address) const poolContent = pool.pool.get(address)! - t.equal(poolContent.length, 1, 'only one tx') - t.deepEqual(poolContent[0].tx.hash(), txA02.hash(), 'only later-added tx') + assert.equal(poolContent.length, 1, 'only one tx') + assert.deepEqual(poolContent[0].tx.hash(), txA02.hash(), 'only later-added tx') pool.stop() pool.close() }) - t.test('announcedTxHashes() -> reject underpriced txn (same sender and nonce)', async (t) => { + it('announcedTxHashes() -> reject underpriced txn (same sender and nonce)', async () => { const { pool } = setup() pool.open() @@ -341,32 +351,32 @@ tape('[TxPool]', async (t) => { try { await pool.add(txA02_Underpriced) - t.fail('should fail adding underpriced txn to txpool') + assert.fail('should fail adding underpriced txn to txpool') } catch (e: any) { - t.ok( + assert.ok( e.message.includes('replacement gas too low'), 'successfully failed adding underpriced txn' ) const poolObject = pool['handled'].get(bytesToUnprefixedHex(txA02_Underpriced.hash())) - t.equal(poolObject?.error, e, 'should have an errored poolObject') + assert.equal(poolObject?.error, e, 'should have an errored poolObject') const poolTxs = pool.getByHash([txA02_Underpriced.hash()]) - t.equal(poolTxs.length, 0, `should not be added in pool`) + assert.equal(poolTxs.length, 0, `should not be added in pool`) } - t.equal(pool.pool.size, 1, 'pool size 1') - t.equal(sentToPeer2, 1, 'broadcast attempt to the peer') - t.equal((pool as any).knownByPeer.get(peer2.id).length, 1, 'known send objects') - t.equal( + assert.equal(pool.pool.size, 1, 'pool size 1') + assert.equal(sentToPeer2, 1, 'broadcast attempt to the peer') + assert.equal((pool as any).knownByPeer.get(peer2.id).length, 1, 'known send objects') + assert.equal( (pool as any).knownByPeer.get(peer2.id)[0]?.error?.message, 'NewPooledTransactionHashes', 'should have errored sendObject for NewPooledTransactionHashes broadcast' ) const address = bytesToUnprefixedHex(A.address) const poolContent = pool.pool.get(address)! - t.equal(poolContent.length, 1, 'only one tx') - t.deepEqual(poolContent[0].tx.hash(), txA01.hash(), 'only later-added tx') + assert.equal(poolContent.length, 1, 'only one tx') + assert.deepEqual(poolContent[0].tx.hash(), txA01.hash(), 'only later-added tx') // Another attempt to add tx which should not be broadcased to peer2 await pool.handleAnnouncedTxHashes([txA01.hash()], peer, peerPool) - t.equal(sentToPeer2, 1, 'no new broadcast attempt to the peer') + assert.equal(sentToPeer2, 1, 'no new broadcast attempt to the peer') // Just to enhance logging coverage, assign peerPool for stats collection pool['service'].pool = peerPool pool._logPoolStats() @@ -374,37 +384,33 @@ tape('[TxPool]', async (t) => { pool.close() }) - t.test( - 'announcedTxHashes() -> reject underpriced txn (same sender and nonce) in handleAnnouncedTxHashes', - async (t) => { - const { pool } = setup() - - pool.open() - pool.start() - const txs = [txA01, txA02_Underpriced] - const peer: any = { - eth: { - versions: [66], - getPooledTransactions: () => { - return [null, txs] - }, + it('announcedTxHashes() -> reject underpriced txn (same sender and nonce) in handleAnnouncedTxHashes', async () => { + const { pool } = setup() + + pool.open() + pool.start() + const txs = [txA01, txA02_Underpriced] + const peer: any = { + eth: { + getPooledTransactions: () => { + return [null, txs] }, - } - const peerPool = new PeerPool({ config }) + }, + } + const peerPool = new PeerPool({ config }) - await pool.handleAnnouncedTxHashes([txA01.hash(), txA02_Underpriced.hash()], peer, peerPool) + await pool.handleAnnouncedTxHashes([txA01.hash(), txA02_Underpriced.hash()], peer, peerPool) - t.equal(pool.pool.size, 1, 'pool size 1') - const address = bytesToUnprefixedHex(A.address) - const poolContent = pool.pool.get(address)! - t.equal(poolContent.length, 1, 'only one tx') - t.deepEqual(poolContent[0].tx.hash(), txA01.hash(), 'only later-added tx') - pool.stop() - pool.close() - } - ) + assert.equal(pool.pool.size, 1, 'pool size 1') + const address = bytesToUnprefixedHex(A.address) + const poolContent = pool.pool.get(address)! + assert.equal(poolContent.length, 1, 'only one tx') + assert.deepEqual(poolContent[0].tx.hash(), txA01.hash(), 'only later-added tx') + pool.stop() + pool.close() + }) - t.test('announcedTxHashes() -> reject if pool is full', async (t) => { + it('announcedTxHashes() -> reject if pool is full', async () => { // Setup 5001 txs const txs = [] for (let account = 0; account < 51; account++) { @@ -427,10 +433,10 @@ tape('[TxPool]', async (t) => { break } } - t.notOk(await handleTxs(txs, 'pool is full'), 'successfully rejected too many txs') + assert.notOk(await handleTxs(txs, 'pool is full'), 'successfully rejected too many txs') }) - t.test('announcedTxHashes() -> reject if account tries to send more than 100 txs', async (t) => { + it('announcedTxHashes() -> reject if account tries to send more than 100 txs', async () => { // Setup 101 txs const txs = [] @@ -439,13 +445,13 @@ tape('[TxPool]', async (t) => { txs.push(txn) } - t.notOk( + assert.notOk( await handleTxs(txs, 'already have max amount of txs for this account'), 'successfully rejected too many txs from same account' ) }) - t.test('announcedTxHashes() -> reject unsigned txs', async (t) => { + it('announcedTxHashes() -> reject unsigned txs', async () => { const txs = [] txs.push( @@ -455,13 +461,13 @@ tape('[TxPool]', async (t) => { }) ) - t.notOk( + assert.notOk( await handleTxs(txs, 'Cannot call hash method if transaction is not signed'), 'successfully rejected unsigned tx' ) }) - t.test('announcedTxHashes() -> reject txs with invalid nonce', async (t) => { + it('announcedTxHashes() -> reject txs with invalid nonce', async () => { const txs = [] txs.push( @@ -472,7 +478,7 @@ tape('[TxPool]', async (t) => { }).sign(A.privateKey) ) - t.notOk( + assert.notOk( await handleTxs(txs, 'tx nonce too low', { getAccount: () => new Account(BigInt(1), BigInt('50000000000000000000')), } as any), @@ -480,7 +486,7 @@ tape('[TxPool]', async (t) => { ) }) - t.test('announcedTxHashes() -> reject txs with too much data', async (t) => { + it('announcedTxHashes() -> reject txs with too much data', async () => { const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Paris }) const txs = [] @@ -496,7 +502,7 @@ tape('[TxPool]', async (t) => { ).sign(A.privateKey) ) - t.notOk( + assert.notOk( await handleTxs(txs, 'exceeds the max data size', { getAccount: () => new Account(BigInt(0), BigInt('50000000000000000000000')), } as any), @@ -504,7 +510,7 @@ tape('[TxPool]', async (t) => { ) }) - t.test('announcedTxHashes() -> account cannot pay the fees', async (t) => { + it('announcedTxHashes() -> account cannot pay the fees', async () => { const txs = [] txs.push( @@ -516,7 +522,7 @@ tape('[TxPool]', async (t) => { }).sign(A.privateKey) ) - t.notOk( + assert.notOk( await handleTxs(txs, 'insufficient balance', { getAccount: () => new Account(BigInt(0), BigInt('0')), } as any), @@ -524,7 +530,7 @@ tape('[TxPool]', async (t) => { ) }) - t.test('announcedTxHashes() -> reject txs which cannot pay base fee', async (t) => { + it('announcedTxHashes() -> reject txs which cannot pay base fee', async () => { const txs = [] txs.push( @@ -541,40 +547,37 @@ tape('[TxPool]', async (t) => { baseFeePerGas: BigInt(3000000000), }) - t.notOk( + assert.notOk( await handleTxs(txs, 'not within 50% range of current basefee', undefined, pool), 'successfully rejected tx with too low gas price' ) }) - t.test( - 'announcedTxHashes() -> reject txs which have gas limit higher than block gas limit', - async (t) => { - const txs = [] + it('announcedTxHashes() -> reject txs which have gas limit higher than block gas limit', async () => { + const txs = [] - txs.push( - FeeMarketEIP1559Transaction.fromTxData({ - maxFeePerGas: 1000000000, - maxPriorityFeePerGas: 1000000000, - nonce: 0, - gasLimit: 21000, - }).sign(A.privateKey) - ) + txs.push( + FeeMarketEIP1559Transaction.fromTxData({ + maxFeePerGas: 1000000000, + maxPriorityFeePerGas: 1000000000, + nonce: 0, + gasLimit: 21000, + }).sign(A.privateKey) + ) - const { pool } = setup() + const { pool } = setup() - ;(pool).service.chain.getCanonicalHeadHeader = () => ({ - gasLimit: BigInt(5000), - }) + ;(pool).service.chain.getCanonicalHeadHeader = () => ({ + gasLimit: BigInt(5000), + }) - t.notOk( - await handleTxs(txs, 'exceeds last block gas limit', undefined, pool), - 'successfully rejected tx which has gas limit higher than block gas limit' - ) - } - ) + assert.notOk( + await handleTxs(txs, 'exceeds last block gas limit', undefined, pool), + 'successfully rejected tx which has gas limit higher than block gas limit' + ) + }) - t.test('announcedTxHashes() -> reject txs which are already in pool', async (t) => { + it('announcedTxHashes() -> reject txs which are already in pool', async () => { const txs = [] txs.push( @@ -588,13 +591,13 @@ tape('[TxPool]', async (t) => { const { pool } = setup() - t.notOk( + assert.notOk( await handleTxs(txs, 'this transaction is already in the TxPool', undefined, pool), 'successfully rejected tx which is already in pool' ) }) - t.test('announcedTxHashes() -> reject txs with too low gas price', async (t) => { + it('announcedTxHashes() -> reject txs with too low gas price', async () => { const txs = [] txs.push( @@ -605,55 +608,49 @@ tape('[TxPool]', async (t) => { }).sign(A.privateKey) ) - t.notOk( + assert.notOk( await handleTxs(txs, 'does not pay the minimum gas price of'), 'successfully rejected tx with too low gas price' ) }) - t.test( - 'announcedTxHashes() -> reject txs with too low gas price (AccessListTransaction)', - async (t) => { - const txs = [] + it('announcedTxHashes() -> reject txs with too low gas price (AccessListTransaction)', async () => { + const txs = [] - txs.push( - AccessListEIP2930Transaction.fromTxData({ - gasPrice: 10000000, - nonce: 0, - }).sign(A.privateKey) - ) + txs.push( + AccessListEIP2930Transaction.fromTxData({ + gasPrice: 10000000, + nonce: 0, + }).sign(A.privateKey) + ) - t.notOk( - await handleTxs(txs, 'does not pay the minimum gas price of'), - 'successfully rejected tx with too low gas price' - ) - } - ) + assert.notOk( + await handleTxs(txs, 'does not pay the minimum gas price of'), + 'successfully rejected tx with too low gas price' + ) + }) - t.test( - 'announcedTxHashes() -> reject txs with too low gas price (invalid tx type)', - async (t) => { - const txs = [] + it('announcedTxHashes() -> reject txs with too low gas price (invalid tx type)', async () => { + const txs = [] - const tx = AccessListEIP2930Transaction.fromTxData( - { - gasPrice: 1000000000 - 1, - nonce: 0, - }, - { - freeze: false, - } - ).sign(A.privateKey) + const tx = AccessListEIP2930Transaction.fromTxData( + { + gasPrice: 1000000000 - 1, + nonce: 0, + }, + { + freeze: false, + } + ).sign(A.privateKey) - Object.defineProperty(tx, 'type', { get: () => 5 }) + Object.defineProperty(tx, 'type', { get: () => 5 }) - txs.push(tx) + txs.push(tx) - t.notOk(await handleTxs(txs, ''), 'successfully rejected tx with invalid tx type') - } - ) + assert.notOk(await handleTxs(txs, ''), 'successfully rejected tx with invalid tx type') + }) - t.test('announcedTxs()', async (t) => { + it('announcedTxs()', async () => { const { pool } = setup() pool.open() @@ -666,16 +663,16 @@ tape('[TxPool]', async (t) => { const peerPool = new PeerPool({ config }) await pool.handleAnnouncedTxs([txA01], peer, peerPool) - t.equal(pool.pool.size, 1, 'pool size 1') + assert.equal(pool.pool.size, 1, 'pool size 1') const address = bytesToUnprefixedHex(A.address) const poolContent = pool.pool.get(address)! - t.equal(poolContent.length, 1, 'one tx') - t.deepEqual(poolContent[0].tx.hash(), txA01.hash(), 'correct tx') + assert.equal(poolContent.length, 1, 'one tx') + assert.deepEqual(poolContent[0].tx.hash(), txA01.hash(), 'correct tx') pool.stop() pool.close() }) - t.test('newBlocks() -> should remove included txs', async (t) => { + it('newBlocks() -> should remove included txs', async () => { const { pool } = setup() pool.open() @@ -690,17 +687,17 @@ tape('[TxPool]', async (t) => { const peerPool = new PeerPool({ config }) await pool.handleAnnouncedTxHashes([txA01.hash()], peer, peerPool) - t.equal(pool.pool.size, 1, 'pool size 1') + assert.equal(pool.pool.size, 1, 'pool size 1') // Craft block with tx not in pool let block = Block.fromBlockData({ transactions: [txA02] }, { common }) pool.removeNewBlockTxs([block]) - t.equal(pool.pool.size, 1, 'pool size 1') + assert.equal(pool.pool.size, 1, 'pool size 1') // Craft block with tx in pool block = Block.fromBlockData({ transactions: [txA01] }, { common }) pool.removeNewBlockTxs([block]) - t.equal(pool.pool.size, 0, 'pool should be empty') + assert.equal(pool.pool.size, 0, 'pool should be empty') peer = { eth: { @@ -710,34 +707,34 @@ tape('[TxPool]', async (t) => { }, } await pool.handleAnnouncedTxHashes([txB01.hash(), txB02.hash()], peer, peerPool) - t.equal(pool.pool.size, 1, 'pool size 1') + assert.equal(pool.pool.size, 1, 'pool size 1') const address = bytesToUnprefixedHex(B.address) let poolContent = pool.pool.get(address)! - t.equal(poolContent.length, 2, 'two txs') + assert.equal(poolContent.length, 2, 'two txs') // Craft block with tx not in pool block = Block.fromBlockData({ transactions: [txA02] }, { common }) pool.removeNewBlockTxs([block]) - t.equal(pool.pool.size, 1, 'pool size 1') + assert.equal(pool.pool.size, 1, 'pool size 1') poolContent = pool.pool.get(address)! - t.equal(poolContent.length, 2, 'two txs') + assert.equal(poolContent.length, 2, 'two txs') // Craft block with tx in pool block = Block.fromBlockData({ transactions: [txB01] }, { common }) pool.removeNewBlockTxs([block]) poolContent = pool.pool.get(address)! - t.equal(poolContent.length, 1, 'only one tx') + assert.equal(poolContent.length, 1, 'only one tx') // Craft block with tx in pool block = Block.fromBlockData({ transactions: [txB02] }, { common }) pool.removeNewBlockTxs([block]) - t.equal(pool.pool.size, 0, 'pool size 0') + assert.equal(pool.pool.size, 0, 'pool size 0') pool.stop() pool.close() }) - t.test('cleanup()', async (t) => { + it('cleanup()', async () => { const { pool } = setup() pool.open() @@ -754,23 +751,23 @@ tape('[TxPool]', async (t) => { peerPool.add(peer) await pool.handleAnnouncedTxHashes([txA01.hash(), txB01.hash()], peer, peerPool) - t.equal(pool.pool.size, 2, 'pool size 2') - t.equal((pool as any).handled.size, 2, 'handled size 2') - t.equal((pool as any).knownByPeer.size, 1, 'known by peer size 1') - t.equal((pool as any).knownByPeer.get(peer.id).length, 2, '2 known txs') + assert.equal(pool.pool.size, 2, 'pool size 2') + assert.equal((pool as any).handled.size, 2, 'handled size 2') + assert.equal((pool as any).knownByPeer.size, 1, 'known by peer size 1') + assert.equal((pool as any).knownByPeer.get(peer.id).length, 2, '2 known txs') pool.cleanup() - t.equal( + assert.equal( pool.pool.size, 2, 'should not remove txs from pool (POOLED_STORAGE_TIME_LIMIT within range)' ) - t.equal( + assert.equal( (pool as any).knownByPeer.size, 1, 'should not remove txs from known by peer map (POOLED_STORAGE_TIME_LIMIT within range)' ) - t.equal( + assert.equal( (pool as any).handled.size, 2, 'should not remove txs from handled (HANDLED_CLEANUP_TIME_LIMIT within range)' @@ -792,17 +789,17 @@ tape('[TxPool]', async (t) => { ;(pool as any).handled.set(hash, handledObj) pool.cleanup() - t.equal( + assert.equal( pool.pool.size, 1, 'should remove txs from pool (POOLED_STORAGE_TIME_LIMIT before range)' ) - t.equal( + assert.equal( (pool as any).knownByPeer.get(peer.id).length, 1, 'should remove one tx from known by peer map (POOLED_STORAGE_TIME_LIMIT before range)' ) - t.equal( + assert.equal( (pool as any).handled.size, 1, 'should remove txs from handled (HANDLED_CLEANUP_TIME_LIMIT before range)' diff --git a/packages/client/test/util/parse.spec.ts b/packages/client/test/util/parse.spec.ts index f3a5a6e153..f3f8ef633e 100644 --- a/packages/client/test/util/parse.spec.ts +++ b/packages/client/test/util/parse.spec.ts @@ -1,57 +1,55 @@ import { multiaddr } from 'multiaddr' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { parseMultiaddrs, parseTransports } from '../../src/util' -tape('[Util/Parse]', (t) => { - t.test('should parse multiaddrs', (t) => { - t.plan(8) - t.deepEquals(parseMultiaddrs(''), [], 'handle empty') - t.deepEquals( +describe('[Util/Parse]', () => { + it('should parse multiaddrs', () => { + assert.deepEqual(parseMultiaddrs(''), [], 'handle empty') + assert.deepEqual( parseMultiaddrs('10.0.0.1:1234'), [multiaddr('/ip4/10.0.0.1/tcp/1234')], 'parse ip:port' ) - t.deepEquals( + assert.deepEqual( parseMultiaddrs('enode://abc@10.0.0.1:1234'), [multiaddr('/ip4/10.0.0.1/tcp/1234')], 'parse url' ) - t.deepEquals( + assert.deepEqual( parseMultiaddrs('/ip4/1.1.1.1/tcp/50507/ws'), [multiaddr('/ip4/1.1.1.1/tcp/50507/ws')], 'parse multiaddr' ) - t.deepEquals( + assert.deepEqual( parseMultiaddrs( '/ip4/1.1.1.2/tcp/50508/ws/p2p/QmYAuYxw6QX1x5aafs6g3bUrPbMDifP5pDun3N9zbVLpEa' ), [multiaddr('/ip4/1.1.1.2/tcp/50508/ws/p2p/QmYAuYxw6QX1x5aafs6g3bUrPbMDifP5pDun3N9zbVLpEa')], 'parse multiaddr with peer id' ) - t.deepEquals( + assert.deepEqual( parseMultiaddrs( '10.0.0.1:1234,enode://343149e4feefa15d882d9fe4ac7d88f885bd05ebb735e547f12e12080a9fa07c8014ca6fd7f373123488102fe5e34111f8509cf0b7de3f5b44339c9f25e87cb8@127.0.0.1:2345' ), [multiaddr('/ip4/10.0.0.1/tcp/1234'), multiaddr('/ip4/127.0.0.1/tcp/2345')], 'parse multiple' ) - t.throws(() => parseMultiaddrs(10 as any), /not a function/, 'throws error') - t.deepEquals( + assert.throws(() => parseMultiaddrs(10 as any), /not a function/, 'throws error') + assert.deepEqual( parseMultiaddrs('[2607:f8b0:4003:c00::6a]:5678'), [multiaddr('/ip6/2607:f8b0:4003:c00::6a/tcp/5678')], 'parse ipv6 multiaddr' ) }) - t.test('should parse transports', (t) => { - t.plan(2) - t.deepEquals( + it('should parse transports', () => { + assert.deepEqual( parseTransports(['t1']), [{ name: 't1', options: {} }], 'parsed transport without options' ) - t.deepEquals( + assert.deepEqual( parseTransports(['t2:k1=v1,k:k=v2,k3="v3",k4,k5=']), [ { diff --git a/packages/client/test/util/rpc.spec.ts b/packages/client/test/util/rpc.spec.ts index 944329963c..a46b4ec00f 100644 --- a/packages/client/test/util/rpc.spec.ts +++ b/packages/client/test/util/rpc.spec.ts @@ -1,5 +1,5 @@ import { bytesToHex } from '@ethereumjs/util' -import * as tape from 'tape' +import { assert, describe, it } from 'vitest' import { EthereumClient } from '../../src/client' import { Config } from '../../src/config' @@ -14,8 +14,8 @@ import { const request = require('supertest') -tape('[Util/RPC]', (t) => { - t.test('should return enabled RPC servers', async (st) => { +describe('[Util/RPC]', () => { + it('should return enabled RPC servers', async () => { const config = new Config({ transports: [], accountCache: 10000, storageCache: 1000 }) const client = await EthereumClient.create({ config }) const manager = new RPCManager(client, config) @@ -46,17 +46,16 @@ tape('[Util/RPC]', (t) => { server.emit('response', req, []) // empty server.emit('response', [req], respBulk) // mismatch length - st.ok( + assert.ok( httpServer !== undefined && wsServer !== undefined, 'should return http and ws servers' ) } } - st.end() }) }) -tape('[Util/RPC/Engine eth methods]', async (t) => { +describe('[Util/RPC/Engine eth methods]', async () => { const config = new Config({ transports: [], accountCache: 10000, @@ -82,7 +81,7 @@ tape('[Util/RPC/Engine eth methods]', async (t) => { 'eth_syncing', ] for (const method of methods) { - t.test(`should have method ${method}`, (st) => { + it(`should have method ${method}`, () => { const req = { jsonrpc: '2.0', method, @@ -99,7 +98,7 @@ tape('[Util/RPC/Engine eth methods]', async (t) => { } }) .end((err: any) => { - st.end(err) + assert.notOk(err) }) }) } diff --git a/packages/client/vitest.config.unit.ts b/packages/client/vitest.config.unit.ts new file mode 100644 index 0000000000..7803737c09 --- /dev/null +++ b/packages/client/vitest.config.unit.ts @@ -0,0 +1,9 @@ +import { defineConfig } from 'vitest/config' + +export default defineConfig({ + test: { + silent: true, + exclude: ['test/integration', 'test/sim'], + testTimeout: 60000, + }, +})