mirror of
https://github.com/0xbow-io/privacy-pools-core.git
synced 2026-01-08 00:53:54 -05:00
Release v1.1.1 (#107)
This commit is contained in:
107
.github/workflows/sdk-npm-release.yml
vendored
107
.github/workflows/sdk-npm-release.yml
vendored
@@ -1,18 +1,15 @@
|
||||
name: SDK / Release
|
||||
name: SDK NPM Release
|
||||
|
||||
on: workflow_dispatch
|
||||
|
||||
concurrency:
|
||||
group: ${{github.workflow}}-${{github.ref}}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/sdk
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "packages/sdk/**"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
canary-release:
|
||||
name: SDK Release
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -31,7 +28,55 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: yarn --frozen-lockfile --network-concurrency 1
|
||||
|
||||
- name: Get package version and validate
|
||||
id: version_check
|
||||
run: |
|
||||
# Get version from package.json
|
||||
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||
echo "PACKAGE_VERSION=$PACKAGE_VERSION" >> $GITHUB_ENV
|
||||
echo "package_version=$PACKAGE_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get published version from npm (handle case where package doesn't exist yet)
|
||||
set +e
|
||||
NPM_VERSION=$(npm view @0xbow/privacy-pools-core-sdk version 2>/dev/null)
|
||||
NPM_EXIT_CODE=$?
|
||||
set -e
|
||||
|
||||
if [ $NPM_EXIT_CODE -eq 0 ]; then
|
||||
echo "NPM_VERSION=$NPM_VERSION" >> $GITHUB_ENV
|
||||
echo "npm_version=$NPM_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
# Compare versions
|
||||
if [ "$PACKAGE_VERSION" = "$NPM_VERSION" ]; then
|
||||
echo "📋 Package version ($PACKAGE_VERSION) matches published version ($NPM_VERSION)"
|
||||
echo "This suggests no release is needed - skipping publish step"
|
||||
echo "SHOULD_PUBLISH=false" >> $GITHUB_ENV
|
||||
elif npx semver $PACKAGE_VERSION -r ">$NPM_VERSION" >/dev/null 2>&1; then
|
||||
echo "✅ Version validation passed: $PACKAGE_VERSION > $NPM_VERSION"
|
||||
echo "SHOULD_PUBLISH=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "❌ Error: Package version ($PACKAGE_VERSION) is not greater than published version ($NPM_VERSION)"
|
||||
echo "If you intended to release, please bump the version in packages/sdk/package.json"
|
||||
echo "If this is just a code change without release, this is expected behavior"
|
||||
echo "SHOULD_PUBLISH=false" >> $GITHUB_ENV
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "📦 First time publishing package version: $PACKAGE_VERSION"
|
||||
echo "NPM_VERSION=none" >> $GITHUB_ENV
|
||||
echo "npm_version=none" >> $GITHUB_OUTPUT
|
||||
echo "SHOULD_PUBLISH=true" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
# Validate semantic versioning format
|
||||
if ! npx semver $PACKAGE_VERSION >/dev/null 2>&1; then
|
||||
echo "❌ Error: Package version ($PACKAGE_VERSION) is not a valid semantic version"
|
||||
exit 1
|
||||
fi
|
||||
working-directory: packages/sdk
|
||||
|
||||
- name: Build SDK
|
||||
if: env.SHOULD_PUBLISH == 'true'
|
||||
run: |
|
||||
yarn clean
|
||||
yarn build
|
||||
@@ -39,16 +84,36 @@ jobs:
|
||||
bash ./scripts/copy_circuits.sh
|
||||
working-directory: packages/sdk
|
||||
|
||||
- name: Get current version and set new version
|
||||
run: |
|
||||
CURRENT_VERSION=$(npm view @0xbow/privacy-pools-core-sdk | grep latest | cut -d' ' -f2)
|
||||
IFS='.' read -ra VERSION_PARTS <<< "$CURRENT_VERSION"
|
||||
PATCH_VERSION=$((VERSION_PARTS[2] + 1))
|
||||
NEW_VERSION="${VERSION_PARTS[0]}.${VERSION_PARTS[1]}.$PATCH_VERSION"
|
||||
echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_ENV
|
||||
yarn version --new-version $NEW_VERSION --no-git-tag-version
|
||||
- name: Run tests
|
||||
if: env.SHOULD_PUBLISH == 'true'
|
||||
run: yarn test
|
||||
working-directory: packages/sdk
|
||||
|
||||
- name: Publish canary
|
||||
- name: Publish to npm
|
||||
if: env.SHOULD_PUBLISH == 'true'
|
||||
run: npm publish --access public --tag latest
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
working-directory: packages/sdk
|
||||
|
||||
- name: Release Summary
|
||||
if: env.SHOULD_PUBLISH == 'true'
|
||||
run: |
|
||||
echo "Successfully published @0xbow/privacy-pools-core-sdk@${{ env.PACKAGE_VERSION }}"
|
||||
echo "Package URL: https://www.npmjs.com/package/@0xbow/privacy-pools-core-sdk/v/${{ env.PACKAGE_VERSION }}"
|
||||
if [ "${{ env.NPM_VERSION }}" != "none" ]; then
|
||||
echo "Version bump: ${{ env.NPM_VERSION }} → ${{ env.PACKAGE_VERSION }}"
|
||||
else
|
||||
echo "First release of version ${{ env.PACKAGE_VERSION }}"
|
||||
fi
|
||||
|
||||
- name: No Release Summary
|
||||
if: env.SHOULD_PUBLISH == 'false'
|
||||
run: |
|
||||
echo "No release performed"
|
||||
echo "Current package.json version: ${{ env.PACKAGE_VERSION }}"
|
||||
if [ "${{ env.NPM_VERSION }}" != "none" ]; then
|
||||
echo "Published npm version: ${{ env.NPM_VERSION }}"
|
||||
echo "To release a new version, bump the version in packages/sdk/package.json"
|
||||
fi
|
||||
echo "Workflow completed successfully without publishing"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "privacy-pool-core",
|
||||
"version": "1.1.0",
|
||||
"version": "1.1.1",
|
||||
"description": "Core repository for the Privacy Pool protocol",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [1.0.2] - 2025-09-02
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed issue with incorrect deposits decryption
|
||||
- Fixed duplicated precommitments collision
|
||||
|
||||
## [1.0.1] - 2025-07-31
|
||||
|
||||
### Fixed
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@0xbow/privacy-pools-core-sdk",
|
||||
"version": "1.0.1",
|
||||
"version": "1.0.2",
|
||||
"description": "Typescript SDK for the Privacy Pool protocol",
|
||||
"repository": "https://github.com/0xbow-io/privacy-pools-core",
|
||||
"license": "Apache-2.0",
|
||||
|
||||
@@ -488,7 +488,12 @@ export class AccountService {
|
||||
|
||||
const depositMap = new Map<Hash, DepositEvent>();
|
||||
for (const event of depositEvents) {
|
||||
depositMap.set(event.precommitment, event);
|
||||
const existingEvent = depositMap.get(event.precommitment);
|
||||
|
||||
// If no existing event, or current event is older (earlier block), use current event
|
||||
if (!existingEvent || event.blockNumber < existingEvent.blockNumber) {
|
||||
depositMap.set(event.precommitment, event);
|
||||
}
|
||||
}
|
||||
|
||||
return depositMap;
|
||||
@@ -621,7 +626,12 @@ export class AccountService {
|
||||
scope: Hash,
|
||||
depositEvents: Map<Hash, DepositEvent>
|
||||
): void {
|
||||
for (let index = BigInt(0); index < depositEvents.size; index++) {
|
||||
const MAX_CONSECUTIVE_MISSES = 10; // Large enough to avoid tx failures
|
||||
|
||||
const foundIndices = new Set<bigint>();
|
||||
let consecutiveMisses = 0;
|
||||
|
||||
for (let index = BigInt(0); ; index++) {
|
||||
// Generate nullifier, secret, and precommitment for this index
|
||||
const { nullifier, secret, precommitment } = this.createDepositSecrets(
|
||||
scope,
|
||||
@@ -632,9 +642,18 @@ export class AccountService {
|
||||
const event = depositEvents.get(precommitment);
|
||||
|
||||
if (!event) {
|
||||
break; // No more deposits found, exit the loop
|
||||
consecutiveMisses++;
|
||||
if (consecutiveMisses >= MAX_CONSECUTIVE_MISSES) {
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Can reset counter in case if user had any tx failures for
|
||||
// newer deposits
|
||||
consecutiveMisses = 0;
|
||||
foundIndices.add(index);
|
||||
|
||||
// Create a new pool account for this deposit
|
||||
this.addPoolAccount(
|
||||
scope,
|
||||
@@ -645,6 +664,8 @@ export class AccountService {
|
||||
event.blockNumber,
|
||||
event.transactionHash
|
||||
);
|
||||
|
||||
this.logger.debug(`Found deposit at index ${index} for scope ${scope}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,12 @@ import * as snarkjs from "snarkjs";
|
||||
import { Commitment, Hash, Secret } from "../../src/types/commitment.js";
|
||||
import { LeanIMTMerkleProof } from "@zk-kit/lean-imt";
|
||||
import { ProofError } from "../../src/errors/base.error.js";
|
||||
import { AccountCommitment } from "../../src/types/account.js";
|
||||
import { AccountCommitment, PoolInfo } from "../../src/types/account.js";
|
||||
import { AccountService } from "../../src/core/account.service.js";
|
||||
import { DataService } from "../../src/core/data.service.js";
|
||||
import { DepositEvent } from "../../src/types/events.js";
|
||||
import { Address, Hex } from "viem";
|
||||
import { english, generateMnemonic } from "viem/accounts";
|
||||
|
||||
vi.mock("snarkjs");
|
||||
vi.mock("viem", async (importOriginal) => {
|
||||
@@ -283,3 +288,312 @@ describe("PrivacyPoolSDK", () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("AccountService", () => {
|
||||
// Test constants
|
||||
const TEST_MNEMONIC = generateMnemonic(english);
|
||||
const TEST_POOL: PoolInfo = {
|
||||
chainId: 1,
|
||||
address: "0x8Fac8db5cae9C29e9c80c40e8CeDC47EEfe3874E" as Address,
|
||||
scope: BigInt("123456789") as Hash,
|
||||
deploymentBlock: 1000n,
|
||||
};
|
||||
|
||||
let dataService: DataService;
|
||||
let accountService: AccountService;
|
||||
|
||||
// Helper function to create mock transaction hashes
|
||||
function mockTxHash(index: number): Hex {
|
||||
const paddedIndex = index.toString(16).padStart(64, "0");
|
||||
return `0x${paddedIndex}` as Hex;
|
||||
}
|
||||
|
||||
// Helper function to create deposit events with all required fields
|
||||
function createDepositEvent(
|
||||
value: bigint,
|
||||
label: Hash,
|
||||
precommitment: Hash,
|
||||
blockNumber: bigint,
|
||||
txHash: Hex
|
||||
): DepositEvent {
|
||||
return {
|
||||
depositor: "0x1234567890123456789012345678901234567890" as Address,
|
||||
value,
|
||||
label,
|
||||
commitment: BigInt(123) as Hash,
|
||||
precommitment,
|
||||
blockNumber,
|
||||
transactionHash: txHash,
|
||||
};
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
dataService = {
|
||||
getDeposits: vi.fn(async () => []),
|
||||
getWithdrawals: vi.fn(async () => []),
|
||||
getRagequits: vi.fn(async () => []),
|
||||
} as unknown as DataService;
|
||||
|
||||
accountService = new AccountService(dataService, {
|
||||
mnemonic: TEST_MNEMONIC,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe("_processDepositEvents", () => {
|
||||
it("should process consecutive deposits starting from index 0", () => {
|
||||
const scope = TEST_POOL.scope;
|
||||
const depositEvents = new Map<Hash, DepositEvent>();
|
||||
|
||||
// Create 3 consecutive deposits at indices 0, 1, 2
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const { precommitment } = accountService.createDepositSecrets(scope, BigInt(i));
|
||||
const event = createDepositEvent(
|
||||
BigInt(1000 + i),
|
||||
BigInt(100 + i) as Hash,
|
||||
precommitment,
|
||||
BigInt(2000 + i),
|
||||
mockTxHash(i)
|
||||
);
|
||||
depositEvents.set(precommitment, event);
|
||||
}
|
||||
|
||||
(accountService as unknown as { _processDepositEvents: (scope: Hash, events: Map<Hash, DepositEvent>) => void })._processDepositEvents(scope, depositEvents);
|
||||
|
||||
// Verify all 3 accounts were created
|
||||
const accounts = accountService.account.poolAccounts.get(scope);
|
||||
expect(accounts).toBeDefined();
|
||||
expect(accounts?.length).toBe(3);
|
||||
|
||||
// Verify account details
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const account = accounts?.[i];
|
||||
expect(account?.deposit.value).toBe(BigInt(1000 + i));
|
||||
expect(account?.deposit.label).toBe(BigInt(100 + i));
|
||||
expect(account?.deposit.blockNumber).toBe(BigInt(2000 + i));
|
||||
expect(account?.deposit.txHash).toBe(mockTxHash(i));
|
||||
}
|
||||
});
|
||||
|
||||
it("should handle gaps in deposit indices with consecutive misses limit", () => {
|
||||
const scope = TEST_POOL.scope;
|
||||
const depositEvents = new Map<Hash, DepositEvent>();
|
||||
|
||||
// Create deposits at indices 0, 1, 5, 6 (gap at 2, 3, 4)
|
||||
const indices = [0, 1, 5, 6];
|
||||
for (const i of indices) {
|
||||
const { precommitment } = accountService.createDepositSecrets(scope, BigInt(i));
|
||||
const event = createDepositEvent(
|
||||
BigInt(1000 + i),
|
||||
BigInt(100 + i) as Hash,
|
||||
precommitment,
|
||||
BigInt(2000 + i),
|
||||
mockTxHash(i)
|
||||
);
|
||||
depositEvents.set(precommitment, event);
|
||||
}
|
||||
|
||||
(accountService as unknown as { _processDepositEvents: (scope: Hash, events: Map<Hash, DepositEvent>) => void })._processDepositEvents(scope, depositEvents);
|
||||
|
||||
const accounts = accountService.account.poolAccounts.get(scope);
|
||||
expect(accounts).toBeDefined();
|
||||
expect(accounts?.length).toBe(4); // All 4 deposits should be found
|
||||
|
||||
// Verify the correct deposits were processed
|
||||
const values = accounts?.map(acc => acc.deposit.value) ?? [];
|
||||
expect(values).toEqual([BigInt(1000), BigInt(1001), BigInt(1005), BigInt(1006)]);
|
||||
});
|
||||
|
||||
it("should stop after 10 consecutive misses", () => {
|
||||
const scope = TEST_POOL.scope;
|
||||
const depositEvents = new Map<Hash, DepositEvent>();
|
||||
|
||||
// Create deposits at indices 0, 1, then a large gap, then 15
|
||||
const indices = [0, 1, 15];
|
||||
for (const i of indices) {
|
||||
const { precommitment } = accountService.createDepositSecrets(scope, BigInt(i));
|
||||
const event = createDepositEvent(
|
||||
BigInt(1000 + i),
|
||||
BigInt(100 + i) as Hash,
|
||||
precommitment,
|
||||
BigInt(2000 + i),
|
||||
mockTxHash(i)
|
||||
);
|
||||
depositEvents.set(precommitment, event);
|
||||
}
|
||||
|
||||
(accountService as unknown as { _processDepositEvents: (scope: Hash, events: Map<Hash, DepositEvent>) => void })._processDepositEvents(scope, depositEvents);
|
||||
|
||||
// Should only find deposits at indices 0, 1 and stop due to consecutive misses
|
||||
const accounts = accountService.account.poolAccounts.get(scope);
|
||||
expect(accounts).toBeDefined();
|
||||
expect(accounts?.length).toBe(2); // Only first 2 deposits found
|
||||
|
||||
const values = accounts?.map(acc => acc.deposit.value) ?? [];
|
||||
expect(values).toEqual([BigInt(1000), BigInt(1001)]);
|
||||
});
|
||||
|
||||
it("should reset consecutive misses counter when a deposit is found", () => {
|
||||
const scope = TEST_POOL.scope;
|
||||
const depositEvents = new Map<Hash, DepositEvent>();
|
||||
|
||||
const indices = [0, 5, 10, 20];
|
||||
for (const i of indices) {
|
||||
const { precommitment } = accountService.createDepositSecrets(scope, BigInt(i));
|
||||
const event = createDepositEvent(
|
||||
BigInt(1000 + i),
|
||||
BigInt(100 + i) as Hash,
|
||||
precommitment,
|
||||
BigInt(2000 + i),
|
||||
mockTxHash(i)
|
||||
);
|
||||
depositEvents.set(precommitment, event);
|
||||
}
|
||||
|
||||
(accountService as unknown as { _processDepositEvents: (scope: Hash, events: Map<Hash, DepositEvent>) => void })._processDepositEvents(scope, depositEvents);
|
||||
|
||||
// All deposits should be found because gaps are within the consecutive misses limit
|
||||
const accounts = accountService.account.poolAccounts.get(scope);
|
||||
expect(accounts).toBeDefined();
|
||||
expect(accounts?.length).toBe(4);
|
||||
|
||||
const values = accounts?.map(acc => acc.deposit.value) ?? [];
|
||||
expect(values).toEqual([BigInt(1000), BigInt(1005), BigInt(1010), BigInt(1020)]);
|
||||
});
|
||||
|
||||
it("should handle empty deposit events", () => {
|
||||
const scope = TEST_POOL.scope;
|
||||
const depositEvents = new Map<Hash, DepositEvent>();
|
||||
|
||||
(accountService as unknown as { _processDepositEvents: (scope: Hash, events: Map<Hash, DepositEvent>) => void })._processDepositEvents(scope, depositEvents);
|
||||
|
||||
// No accounts should be created
|
||||
const accounts = accountService.account.poolAccounts.get(scope);
|
||||
expect(accounts).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should handle deposits with large gaps that exceed consecutive misses limit", () => {
|
||||
const scope = TEST_POOL.scope;
|
||||
const depositEvents = new Map<Hash, DepositEvent>();
|
||||
|
||||
const indices = [0, 1, 2, 20];
|
||||
for (const i of indices) {
|
||||
const { precommitment } = accountService.createDepositSecrets(scope, BigInt(i));
|
||||
const event = createDepositEvent(
|
||||
BigInt(1000 + i),
|
||||
BigInt(100 + i) as Hash,
|
||||
precommitment,
|
||||
BigInt(2000 + i),
|
||||
mockTxHash(i)
|
||||
);
|
||||
depositEvents.set(precommitment, event);
|
||||
}
|
||||
|
||||
(accountService as unknown as { _processDepositEvents: (scope: Hash, events: Map<Hash, DepositEvent>) => void })._processDepositEvents(scope, depositEvents);
|
||||
|
||||
const accounts = accountService.account.poolAccounts.get(scope);
|
||||
expect(accounts).toBeDefined();
|
||||
expect(accounts?.length).toBe(3);
|
||||
|
||||
const values = accounts?.map(acc => acc.deposit.value) ?? [];
|
||||
expect(values).toEqual([BigInt(1000), BigInt(1001), BigInt(1002)]);
|
||||
});
|
||||
|
||||
it("should track found indices correctly", () => {
|
||||
const scope = TEST_POOL.scope;
|
||||
const depositEvents = new Map<Hash, DepositEvent>();
|
||||
|
||||
// Create non-consecutive deposits
|
||||
const indices = [0, 2, 4, 6];
|
||||
for (const i of indices) {
|
||||
const { precommitment } = accountService.createDepositSecrets(scope, BigInt(i));
|
||||
const event = createDepositEvent(
|
||||
BigInt(1000 + i),
|
||||
BigInt(100 + i) as Hash,
|
||||
precommitment,
|
||||
BigInt(2000 + i),
|
||||
mockTxHash(i)
|
||||
);
|
||||
depositEvents.set(precommitment, event);
|
||||
}
|
||||
|
||||
(accountService as unknown as { _processDepositEvents: (scope: Hash, events: Map<Hash, DepositEvent>) => void })._processDepositEvents(scope, depositEvents);
|
||||
|
||||
// All should be found since gaps are small
|
||||
const accounts = accountService.account.poolAccounts.get(scope);
|
||||
expect(accounts).toBeDefined();
|
||||
expect(accounts?.length).toBe(4);
|
||||
|
||||
// Verify deposits are in the correct order (by index)
|
||||
const values = accounts?.map(acc => acc.deposit.value) ?? [];
|
||||
expect(values).toEqual([BigInt(1000), BigInt(1002), BigInt(1004), BigInt(1006)]);
|
||||
});
|
||||
|
||||
it("should handle transaction failure scenarios with gaps", () => {
|
||||
const scope = TEST_POOL.scope;
|
||||
const depositEvents = new Map<Hash, DepositEvent>();
|
||||
|
||||
const indices = [0, 1, 4, 5];
|
||||
for (const i of indices) {
|
||||
const { precommitment } = accountService.createDepositSecrets(scope, BigInt(i));
|
||||
const event = createDepositEvent(
|
||||
BigInt(1000 + i),
|
||||
BigInt(100 + i) as Hash,
|
||||
precommitment,
|
||||
BigInt(2000 + i),
|
||||
mockTxHash(i)
|
||||
);
|
||||
depositEvents.set(precommitment, event);
|
||||
}
|
||||
|
||||
(accountService as unknown as { _processDepositEvents: (scope: Hash, events: Map<Hash, DepositEvent>) => void })._processDepositEvents(scope, depositEvents);
|
||||
|
||||
// All deposits should be found (gap of 2 is within limit)
|
||||
const accounts = accountService.account.poolAccounts.get(scope);
|
||||
expect(accounts).toBeDefined();
|
||||
expect(accounts?.length).toBe(4);
|
||||
|
||||
const values = accounts?.map(acc => acc.deposit.value) ?? [];
|
||||
expect(values).toEqual([BigInt(1000), BigInt(1001), BigInt(1004), BigInt(1005)]);
|
||||
});
|
||||
|
||||
it("should generate correct nullifier and secret for each deposit", () => {
|
||||
const scope = TEST_POOL.scope;
|
||||
const depositEvents = new Map<Hash, DepositEvent>();
|
||||
|
||||
// Create 2 deposits
|
||||
const indices = [0, 1];
|
||||
const expectedSecrets: { nullifier: Secret; secret: Secret }[] = [];
|
||||
|
||||
for (const i of indices) {
|
||||
const { nullifier, secret, precommitment } = accountService.createDepositSecrets(scope, BigInt(i));
|
||||
expectedSecrets.push({ nullifier, secret });
|
||||
|
||||
const event = createDepositEvent(
|
||||
BigInt(1000 + i),
|
||||
BigInt(100 + i) as Hash,
|
||||
precommitment,
|
||||
BigInt(2000 + i),
|
||||
mockTxHash(i)
|
||||
);
|
||||
depositEvents.set(precommitment, event);
|
||||
}
|
||||
|
||||
(accountService as unknown as { _processDepositEvents: (scope: Hash, events: Map<Hash, DepositEvent>) => void })._processDepositEvents(scope, depositEvents);
|
||||
|
||||
const accounts = accountService.account.poolAccounts.get(scope);
|
||||
expect(accounts).toBeDefined();
|
||||
expect(accounts?.length).toBe(2);
|
||||
|
||||
// Verify each account has the correct nullifier and secret
|
||||
for (let i = 0; i < 2; i++) {
|
||||
const account = accounts?.[i];
|
||||
expect(account?.deposit.nullifier).toBe(expectedSecrets[i]?.nullifier);
|
||||
expect(account?.deposit.secret).toBe(expectedSecrets[i]?.secret);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user