test(signerStore): improve coverage and documentation (#210)

* docs: update documentation, rename ID to Id for consistent casing

* docs: flag all missing tests and docs

* chore: add words to spellcheck

* refactor: rename IDRegistry to IdRegistry

* docs: update more method documentation

* test(signerStore): add coverage for merge

* fix(signerStore): throw on mismatching blockHashes, compare blockNums correctly

* test: coverage for side effects of mergeIdRegistry

* tests: add tests + docs

* test: add coverage for bytesDecrement

* refactor(reactionStore): use bytesIncrement/bytesDecrement
This commit is contained in:
Varun Srinivasan
2022-11-04 21:05:14 -07:00
committed by GitHub
parent d10b1c853d
commit a0583efb2f
16 changed files with 951 additions and 614 deletions

View File

@@ -19,5 +19,9 @@
"editor.codeActionsOnSave": {
"source.fixAll.eslint": true
},
"cSpell.words": ["farcaster"]
"cSpell.words": [
"farcaster",
"flatbuffer",
"flatbuffers"
]
}

View File

@@ -53,7 +53,7 @@ beforeAll(async () => {
custodyWallet = Wallet.createRandom();
custodyAddress = utils.arrayify(custodyWallet.address);
custodyEvent = new ContractEventModel(
await Factories.IDRegistryEvent.create({ fid: Array.from(fid), to: Array.from(custodyAddress) })
await Factories.IdRegistryEvent.create({ fid: Array.from(fid), to: Array.from(custodyAddress) })
);
signer = await generateEd25519KeyPair();
@@ -111,7 +111,7 @@ beforeAll(async () => {
describe('mergeIdRegistryEvent', () => {
test('succeeds', async () => {
await expect(engine.mergeIdRegistryEvent(custodyEvent)).resolves.toEqual(undefined);
await expect(signerStore.getIDRegistryEvent(fid)).resolves.toEqual(custodyEvent);
await expect(signerStore.getIdRegistryEvent(fid)).resolves.toEqual(custodyEvent);
});
});

View File

@@ -62,10 +62,10 @@ class Engine {
async mergeIdRegistryEvent(event: ContractEventModel): Promise<void> {
if (
event.type() === ContractEventType.IDRegistryRegister ||
event.type() === ContractEventType.IDRegistryTransfer
event.type() === ContractEventType.IdRegistryRegister ||
event.type() === ContractEventType.IdRegistryTransfer
) {
return this._signerStore.mergeIDRegistryEvent(event);
return this._signerStore.mergeIdRegistryEvent(event);
} else {
throw new BadRequestError('invalid event type');
}

View File

@@ -9,7 +9,7 @@ const fid = Factories.FID.build();
let model: ContractEventModel;
beforeAll(async () => {
const idRegistryEvent = await Factories.IDRegistryEvent.create({ fid: Array.from(fid) });
const idRegistryEvent = await Factories.IdRegistryEvent.create({ fid: Array.from(fid) });
model = new ContractEventModel(idRegistryEvent);
});

View File

@@ -4,6 +4,7 @@ import { UserPostfix } from '~/storage/flatbuffers/types';
import { ContractEvent, ContractEventType } from '~/utils/generated/contract_event_generated';
import MessageModel from './messageModel';
/** ContractEventModel provides helpers to read and write Flatbuffers ContractEvents from RocksDB */
export default class ContractEventModel {
public event: ContractEvent;
@@ -18,7 +19,7 @@ export default class ContractEventModel {
/** <user prefix byte, fid, ID Registry event prefix byte> */
static primaryKey(fid: Uint8Array): Buffer {
return Buffer.concat([MessageModel.userKey(fid), Buffer.from([UserPostfix.IDRegistryEvent])]);
return Buffer.concat([MessageModel.userKey(fid), Buffer.from([UserPostfix.IdRegistryEvent])]);
}
static async get<T extends ContractEventModel>(db: RocksDB, fid: Uint8Array): Promise<T> {

View File

@@ -59,7 +59,7 @@ export enum UserPostfix {
/* Used to index a verification in the remove set */
VerificationRemoves = 13,
/* Used to store an id registry event */
IDRegistryEvent = 14,
IdRegistryEvent = 14,
/* Used to store a signer */
SignerMessage = 15,
/* Used to index a signer in the add set */

View File

@@ -1,5 +1,6 @@
import {
bytesCompare,
bytesDecrement,
bytesIncrement,
FARCASTER_EPOCH,
fromFarcasterTime,
@@ -19,7 +20,7 @@ describe('bytesCompare', () => {
[new Uint8Array([1, 0, 0, 1, 0]), new Uint8Array([1, 0, 0, 2, 0]), -1],
];
for (const [a, b, result] of cases) {
test(`returns bytewise order for two byte arrays: ${a}, ${b}`, () => {
test(`returns byte-wise order for two byte arrays: ${a}, ${b}`, () => {
expect(bytesCompare(a, b)).toEqual(result);
});
}
@@ -44,6 +45,31 @@ describe('bytesIncrement', () => {
}
});
describe('bytesDecrement', () => {
const passingCases: [Uint8Array, Uint8Array][] = [
[new Uint8Array([1]), new Uint8Array([0])],
[new Uint8Array([1, 2]), new Uint8Array([1, 1])],
[new Uint8Array([1, 0]), new Uint8Array([0, 255])],
[new Uint8Array([1, 0, 1, 0]), new Uint8Array([1, 0, 0, 255])],
[new Uint8Array([0, 0, 2]), new Uint8Array([0, 0, 1])],
[new Uint8Array([1, 0, 0, 0]), new Uint8Array([0, 255, 255, 255])],
];
const failingCases: [Uint8Array][] = [[new Uint8Array([0])], [new Uint8Array([0, 0])]];
for (const [input, output] of passingCases) {
test(`decrements byte array: ${input}`, () => {
expect(bytesDecrement(input)).toEqual(output);
});
}
for (const [input] of failingCases) {
test(`should when decrementing byte array: ${input}`, () => {
expect(() => bytesDecrement(input)).toThrow(BadRequestError);
});
}
});
describe('fromFarcasterTime', () => {
test('returns seconds since 01/01/2022', () => {
const time = Date.now();

View File

@@ -36,6 +36,25 @@ export const bytesIncrement = (bytes: Uint8Array): Uint8Array => {
return new Uint8Array([1, ...bytes]);
};
export const bytesDecrement = (bytes: Uint8Array): Uint8Array => {
let i = bytes.length - 1;
while (i >= 0) {
if (bytes[i] > 0) {
bytes[i] = bytes[i] - 1;
return bytes;
} else {
if (i === 0) {
throw new BadRequestError('Cannot decrement zero');
}
bytes[i] = 255;
}
i = i - 1;
}
return new Uint8Array([...bytes]);
};
export const FARCASTER_EPOCH = 1640995200000; // January 1, 2022 UTC
export const getFarcasterTime = (): number => {
return toFarcasterTime(Date.now());

View File

@@ -5,6 +5,7 @@ import { BadRequestError, NotFoundError } from '~/utils/errors';
import { ReactionAddModel, ReactionRemoveModel, UserPostfix } from '~/storage/flatbuffers/types';
import ReactionStore from '~/storage/sets/flatbuffers/reactionStore';
import { MessageType, ReactionType } from '~/utils/generated/message_generated';
import { bytesDecrement, bytesIncrement } from '~/storage/flatbuffers/utils';
const db = jestBinaryRocksDB('flatbuffers.reactionStore.test');
const set = new ReactionStore(db);
@@ -146,37 +147,37 @@ describe('getReactionRemove', () => {
});
});
describe('getReactionAddsByFid', () => {
describe('getReactionAddsByUser', () => {
test('returns reactionAdds if they exist', async () => {
await set.merge(reactionAdd);
await set.merge(reactionAddRecast);
await expect(set.getReactionAddsByFid(fid)).resolves.toEqual([reactionAdd, reactionAddRecast]);
await expect(set.getReactionAddsByUser(fid)).resolves.toEqual([reactionAdd, reactionAddRecast]);
});
test('returns empty array if no ReactionAdd exists', async () => {
await expect(set.getReactionAddsByFid(fid)).resolves.toEqual([]);
await expect(set.getReactionAddsByUser(fid)).resolves.toEqual([]);
});
test('returns empty array if no ReactionAdd exists, even if ReactionRemove exists', async () => {
await set.merge(reactionRemove);
await expect(set.getReactionAddsByFid(fid)).resolves.toEqual([]);
await expect(set.getReactionAddsByUser(fid)).resolves.toEqual([]);
});
});
describe('getReactionRemovesByFid', () => {
describe('getReactionRemovesByUser', () => {
test('returns ReactionRemove if it exists', async () => {
await set.merge(reactionRemove);
await set.merge(reactionRemoveRecast);
await expect(set.getReactionRemovesByFid(fid)).resolves.toEqual([reactionRemove, reactionRemoveRecast]);
await expect(set.getReactionRemovesByUser(fid)).resolves.toEqual([reactionRemove, reactionRemoveRecast]);
});
test('returns empty array if no ReactionRemove exists', async () => {
await expect(set.getReactionRemovesByFid(fid)).resolves.toEqual([]);
await expect(set.getReactionRemovesByUser(fid)).resolves.toEqual([]);
});
test('returns empty array if no ReactionRemove exists, even if ReactionAdds exists', async () => {
await set.merge(reactionAdd);
await expect(set.getReactionRemovesByFid(fid)).resolves.toEqual([]);
await expect(set.getReactionRemovesByUser(fid)).resolves.toEqual([]);
});
});
@@ -268,7 +269,7 @@ describe('merge', () => {
await assertReactionAddWins(reactionAdd);
});
test('succeeds once, even if merged twice', async () => {
test('succeeds once, even if merged twice', async () => {
await expect(set.merge(reactionAdd)).resolves.toEqual(undefined);
await expect(set.merge(reactionAdd)).resolves.toEqual(undefined);
@@ -279,14 +280,14 @@ describe('merge', () => {
let reactionAddLater: ReactionAddModel;
beforeAll(async () => {
const reactionAddData = await Factories.ReactionAddData.create({
const addData = await Factories.ReactionAddData.create({
...reactionAdd.data.unpack(),
timestamp: reactionAdd.timestamp() + 1,
});
const reactionAddMessage = await Factories.Message.create({
data: Array.from(reactionAddData.bb?.bytes() ?? []),
const addMessage = await Factories.Message.create({
data: Array.from(addData.bb?.bytes() ?? []),
});
reactionAddLater = new MessageModel(reactionAddMessage) as ReactionAddModel;
reactionAddLater = new MessageModel(addMessage) as ReactionAddModel;
});
test('succeeds with a later timestamp', async () => {
@@ -310,19 +311,16 @@ describe('merge', () => {
let reactionAddLater: ReactionAddModel;
beforeAll(async () => {
const reactionAddData = await Factories.ReactionAddData.create({
const addData = await Factories.ReactionAddData.create({
...reactionAdd.data.unpack(),
});
const laterHash = Array.from(reactionAdd.hash());
laterHash[0] = 255;
const reactionAddMessage = await Factories.Message.create({
data: Array.from(reactionAddData.bb?.bytes() ?? []),
hash: Array.from(laterHash),
const addMessage = await Factories.Message.create({
data: Array.from(addData.bb?.bytes() ?? []),
hash: Array.from(bytesIncrement(reactionAdd.hash().slice())),
});
reactionAddLater = new MessageModel(reactionAddMessage) as ReactionAddModel;
reactionAddLater = new MessageModel(addMessage) as ReactionAddModel;
});
test('succeeds with a later hash', async () => {
@@ -378,13 +376,9 @@ describe('merge', () => {
timestamp: reactionAdd.timestamp(),
});
// Set the first byte of the hash to the max value to ensure it is later
const laterHash = Array.from(reactionAdd.hash());
laterHash[0] = 255;
const reactionRemoveMessage = await Factories.Message.create({
data: Array.from(reactionRemoveData.bb?.bytes() ?? []),
hash: Array.from(laterHash),
hash: Array.from(bytesIncrement(reactionAdd.hash().slice())),
});
const reactionRemoveLater = new MessageModel(reactionRemoveMessage) as ReactionRemoveModel;
@@ -402,13 +396,9 @@ describe('merge', () => {
timestamp: reactionAdd.timestamp(),
});
// Set the first byte of the hash to the min value to ensure it is earlier
const earlierHash = Array.from(reactionAdd.hash());
earlierHash[0] = 0;
const reactionRemoveMessage = await Factories.Message.create({
data: Array.from(reactionRemoveData.bb?.bytes() ?? []),
hash: Array.from(earlierHash),
hash: Array.from(bytesDecrement(reactionAdd.hash().slice())),
});
const reactionRemoveEarlier = new MessageModel(reactionRemoveMessage) as ReactionRemoveModel;
@@ -475,15 +465,12 @@ describe('merge', () => {
...reactionRemove.data.unpack(),
});
const laterHash = Array.from(reactionRemove.hash());
laterHash[0] = 255;
const reactionAddMessage = await Factories.Message.create({
const addMessage = await Factories.Message.create({
data: Array.from(reactionRemoveData.bb?.bytes() ?? []),
hash: Array.from(laterHash),
hash: Array.from(bytesIncrement(reactionRemove.hash().slice())),
});
reactionRemoveLater = new MessageModel(reactionAddMessage) as ReactionRemoveModel;
reactionRemoveLater = new MessageModel(addMessage) as ReactionRemoveModel;
});
test('succeeds with a later hash', async () => {
@@ -512,15 +499,15 @@ describe('merge', () => {
});
test('no-ops with an earlier timestamp', async () => {
const reactionAddData = await Factories.ReactionAddData.create({
const addData = await Factories.ReactionAddData.create({
...reactionRemove.data.unpack(),
timestamp: reactionRemove.timestamp() + 1,
type: MessageType.ReactionAdd,
});
const reactionAddMessage = await Factories.Message.create({
data: Array.from(reactionAddData.bb?.bytes() ?? []),
const addMessage = await Factories.Message.create({
data: Array.from(addData.bb?.bytes() ?? []),
});
const reactionAddLater = new MessageModel(reactionAddMessage) as ReactionAddModel;
const reactionAddLater = new MessageModel(addMessage) as ReactionAddModel;
await set.merge(reactionAddLater);
await expect(set.merge(reactionRemove)).resolves.toEqual(undefined);
await assertReactionAddWins(reactionAddLater);
@@ -530,20 +517,16 @@ describe('merge', () => {
describe('with conflicting ReactionAdd with identical timestamps', () => {
test('succeeds with an earlier hash', async () => {
const reactionAddData = await Factories.ReactionAddData.create({
const addData = await Factories.ReactionAddData.create({
...reactionRemove.data.unpack(),
type: MessageType.ReactionAdd,
});
// Set the first byte of the hash to the max value to ensure it is later
const laterHash = Array.from(reactionRemove.hash());
laterHash[0] = 255;
const reactionAddMessage = await Factories.Message.create({
data: Array.from(reactionAddData.bb?.bytes() ?? []),
hash: Array.from(laterHash),
const addMessage = await Factories.Message.create({
data: Array.from(addData.bb?.bytes() ?? []),
hash: Array.from(bytesIncrement(reactionRemove.hash().slice())),
});
const reactionAddLater = new MessageModel(reactionAddMessage) as ReactionAddModel;
const reactionAddLater = new MessageModel(addMessage) as ReactionAddModel;
await set.merge(reactionAddLater);
await expect(set.merge(reactionRemove)).resolves.toEqual(undefined);
@@ -553,20 +536,16 @@ describe('merge', () => {
});
test('succeeds with a later hash', async () => {
const reactionAddData = await Factories.ReactionAddData.create({
const removeData = await Factories.ReactionAddData.create({
...reactionRemove.data.unpack(),
});
// Set the first byte of the hash to the min value to ensure it is earlier
const earlierHash = Array.from(reactionRemove.hash());
earlierHash[0] = 0;
const reactionRemoveMessage = await Factories.Message.create({
data: Array.from(reactionAddData.bb?.bytes() ?? []),
hash: Array.from(earlierHash),
const removeMessage = await Factories.Message.create({
data: Array.from(removeData.bb?.bytes() ?? []),
hash: Array.from(bytesDecrement(reactionRemove.hash().slice())),
});
const reactionRemoveEarlier = new MessageModel(reactionRemoveMessage) as ReactionRemoveModel;
const reactionRemoveEarlier = new MessageModel(removeMessage) as ReactionRemoveModel;
await set.merge(reactionRemoveEarlier);
await expect(set.merge(reactionRemove)).resolves.toEqual(undefined);

View File

@@ -24,7 +24,7 @@ import { bytesCompare } from '~/storage/flatbuffers/utils';
* set (adds, removes) to make lookups easy when checking if a collision exists. An index is also
* build for the target to make it easy to fetch all reactions for a target.
*
* The key-value entries created by the Reaction Set are:
* The key-value entries created by the Reaction Store are:
*
* 1. fid:tsHash -> reaction message
* 2. fid:set:targetCastTsHash:reactionType -> fid:tsHash (Set Index)
@@ -160,7 +160,7 @@ class ReactionStore {
}
/** Finds all ReactionAdd Messages by iterating through the prefixes */
async getReactionAddsByFid(fid: Uint8Array): Promise<ReactionAddModel[]> {
async getReactionAddsByUser(fid: Uint8Array): Promise<ReactionAddModel[]> {
const prefix = ReactionStore.reactionAddsKey(fid);
const msgKeys: Buffer[] = [];
for await (const [, value] of this._db.iteratorByPrefix(prefix, { keys: false, valueAsBuffer: true })) {
@@ -170,7 +170,7 @@ class ReactionStore {
}
/** Finds all ReactionRemove Messages by iterating through the prefixes */
async getReactionRemovesByFid(fid: Uint8Array): Promise<ReactionRemoveModel[]> {
async getReactionRemovesByUser(fid: Uint8Array): Promise<ReactionRemoveModel[]> {
const prefix = ReactionStore.reactionRemovesKey(fid);
const messageKeys: Buffer[] = [];
for await (const [, value] of this._db.iteratorByPrefix(prefix, { keys: false, valueAsBuffer: true })) {
@@ -273,7 +273,7 @@ class ReactionStore {
}
/**
* Determines the RocksDB keys that must be modified to settle merge conflicts as a result of adding a Reaction to the Set.
* Determines the RocksDB keys that must be modified to settle merge conflicts as a result of adding a Reaction to the Store.
*
* @returns a RocksDB transaction if keys must be added or removed, undefined otherwise
*/

View File

@@ -1,7 +1,7 @@
import { faker } from '@faker-js/faker';
import Factories from '~/test/factories/flatbuffer';
import { jestBinaryRocksDB } from '~/storage/db/jestUtils';
import { BadRequestError, NotFoundError } from '~/utils/errors';
import { BadRequestError, NotFoundError, ValidationError } from '~/utils/errors';
import { EthereumSigner } from '~/types';
import { generateEd25519KeyPair, generateEthereumSigner } from '~/utils/crypto';
import { arrayify } from 'ethers/lib/utils';
@@ -9,8 +9,10 @@ import SignerStore from '~/storage/sets/flatbuffers/signerStore';
import ContractEventModel from '~/storage/flatbuffers/contractEventModel';
import { SignerAddModel, SignerRemoveModel, UserPostfix } from '~/storage/flatbuffers/types';
import MessageModel from '~/storage/flatbuffers/messageModel';
import { bytesDecrement, bytesIncrement } from '~/storage/flatbuffers/utils';
import { MessageType } from '~/utils/generated/message_generated';
const db = jestBinaryRocksDB('flatbuffers.signerSet.test');
const db = jestBinaryRocksDB('flatbuffers.signerStore.test');
const set = new SignerStore(db);
const fid = Factories.FID.build();
@@ -29,7 +31,7 @@ let signerRemove: SignerRemoveModel;
beforeAll(async () => {
custody1 = await generateEthereumSigner();
custody1Address = arrayify(custody1.signerKey);
const idRegistryEvent = await Factories.IDRegistryEvent.create({
const idRegistryEvent = await Factories.IdRegistryEvent.create({
fid: Array.from(fid),
to: Array.from(custody1Address),
});
@@ -44,6 +46,7 @@ beforeAll(async () => {
body: Factories.SignerBody.build({ signer: Array.from(signer) }),
fid: Array.from(fid),
});
const addMessage = await Factories.Message.create(
{ data: Array.from(addData.bb?.bytes() ?? []) },
{ transient: { wallet: custody1.wallet } }
@@ -62,20 +65,20 @@ beforeAll(async () => {
signerRemove = new MessageModel(removeMessage) as SignerRemoveModel;
});
describe('getIDRegistryEvent', () => {
test('returns contract event', async () => {
await set.mergeIDRegistryEvent(custody1Event);
await expect(set.getIDRegistryEvent(fid)).resolves.toEqual(custody1Event);
describe('getIdRegistryEvent', () => {
test('returns contract event if it exists', async () => {
await set.mergeIdRegistryEvent(custody1Event);
await expect(set.getIdRegistryEvent(fid)).resolves.toEqual(custody1Event);
});
test('fails if event is missing', async () => {
await expect(set.getIDRegistryEvent(fid)).rejects.toThrow(NotFoundError);
await expect(set.getIdRegistryEvent(fid)).rejects.toThrow(NotFoundError);
});
});
describe('getCustodyAddress', () => {
test('returns to from current IDRegistry event', async () => {
await set.mergeIDRegistryEvent(custody1Event);
test('returns to from current IdRegistry event', async () => {
await set.mergeIdRegistryEvent(custody1Event);
await expect(set.getCustodyAddress(fid)).resolves.toEqual(custody1Address);
});
@@ -84,109 +87,18 @@ describe('getCustodyAddress', () => {
});
});
describe('mergeIDRegistryEvent', () => {
test('succeeds', async () => {
await expect(set.mergeIDRegistryEvent(custody1Event)).resolves.toEqual(undefined);
await expect(set.getIDRegistryEvent(fid)).resolves.toEqual(custody1Event);
});
test('causes signers to become active', async () => {
await set.merge(signerAdd);
await expect(set.getSignerAdd(fid, signer)).rejects.toThrow(NotFoundError);
await expect(set.mergeIDRegistryEvent(custody1Event)).resolves.toEqual(undefined);
await expect(set.getSignerAdd(fid, signer)).resolves.toEqual(signerAdd);
});
describe('overwrites existing event', () => {
let newEvent: ContractEventModel;
beforeEach(async () => {
await set.mergeIDRegistryEvent(custody1Event);
});
afterEach(async () => {
await expect(set.mergeIDRegistryEvent(newEvent)).resolves.toEqual(undefined);
await expect(set.getIDRegistryEvent(fid)).resolves.toEqual(newEvent);
});
test('with a higher block number', async () => {
const idRegistryEvent = await Factories.IDRegistryEvent.create({
...custody1Event.event.unpack(),
to: Array.from(custody2Address),
blockNumber: custody1Event.blockNumber() + 1,
});
newEvent = new ContractEventModel(idRegistryEvent);
});
test('with the same block number and a higher log index', async () => {
const idRegistryEvent = await Factories.IDRegistryEvent.create({
...custody1Event.event.unpack(),
to: Array.from(custody2Address),
logIndex: custody1Event.logIndex() + 1,
});
newEvent = new ContractEventModel(idRegistryEvent);
});
test('with the same block number and log index and a higher transaction hash order', async () => {
const idRegistryEvent = await Factories.IDRegistryEvent.create({
...custody1Event.event.unpack(),
to: Array.from(custody2Address),
transactionHash: Array.from([...custody1Event.transactionHash(), 1]),
});
newEvent = new ContractEventModel(idRegistryEvent);
});
});
describe('no-ops', () => {
let newEvent: ContractEventModel;
beforeEach(async () => {
await set.mergeIDRegistryEvent(custody1Event);
});
afterEach(async () => {
await expect(set.mergeIDRegistryEvent(newEvent)).resolves.toEqual(undefined);
await expect(set.getIDRegistryEvent(fid)).resolves.toEqual(custody1Event);
});
test('when existing event has a higher block number', async () => {
const idRegistryEvent = await Factories.IDRegistryEvent.create({
...custody1Event.event.unpack(),
to: Array.from(custody2Address),
blockNumber: custody1Event.blockNumber() - 1,
});
newEvent = new ContractEventModel(idRegistryEvent);
});
test('when existing event has the same block number and a higher log index', async () => {
const idRegistryEvent = await Factories.IDRegistryEvent.create({
...custody1Event.event.unpack(),
to: Array.from(custody2Address),
logIndex: custody1Event.logIndex() - 1,
});
newEvent = new ContractEventModel(idRegistryEvent);
});
test('when existing event has the same block number and log index and a higher transaction hash order', async () => {
const idRegistryEvent = await Factories.IDRegistryEvent.create({
...custody1Event.event.unpack(),
to: Array.from(custody2Address),
transactionHash: Array.from([...custody1Event.transactionHash().slice(0, -1)]),
});
newEvent = new ContractEventModel(idRegistryEvent);
});
test('when event is a duplicate', async () => {
newEvent = custody1Event;
});
});
});
describe('getSignerAdd', () => {
test('fails if missing', async () => {
await expect(set.getSignerAdd(fid, signer, custody1Address)).rejects.toThrow(NotFoundError);
});
test('fails if incorrect custody address is passed in', async () => {
await set.merge(signerAdd);
const arbitraryCustodyAddress = arrayify(faker.datatype.hexadecimal({ length: 40 }));
await expect(set.getSignerAdd(fid, signer, arbitraryCustodyAddress)).rejects.toThrow(NotFoundError);
});
test('returns message', async () => {
await set.merge(signerAdd);
await expect(set.getSignerAdd(fid, signer, custody1Address)).resolves.toEqual(signerAdd);
@@ -194,7 +106,7 @@ describe('getSignerAdd', () => {
describe('without passing custodyAddress', () => {
test('defaults to current custodyAddress', async () => {
await set.mergeIDRegistryEvent(custody1Event);
await set.mergeIdRegistryEvent(custody1Event);
await set.merge(signerAdd);
await expect(set.getSignerAdd(fid, signer)).resolves.toEqual(signerAdd);
});
@@ -211,6 +123,13 @@ describe('getSignerRemove', () => {
await expect(set.getSignerRemove(fid, signer, custody1Address)).rejects.toThrow(NotFoundError);
});
test('fails if incorrect custody address is passed in', async () => {
await set.merge(signerRemove);
const arbitraryCustodyAddress = arrayify(faker.datatype.hexadecimal({ length: 40 }));
await expect(set.getSignerAdd(fid, signer, arbitraryCustodyAddress)).rejects.toThrow(NotFoundError);
});
test('returns message', async () => {
await set.merge(signerRemove);
await expect(set.getSignerRemove(fid, signer, custody1Address)).resolves.toEqual(signerRemove);
@@ -218,7 +137,7 @@ describe('getSignerRemove', () => {
describe('without passing custodyAddress', () => {
test('defaults to current custodyAddress', async () => {
await set.mergeIDRegistryEvent(custody1Event);
await set.mergeIdRegistryEvent(custody1Event);
await set.merge(signerRemove);
await expect(set.getSignerRemove(fid, signer)).resolves.toEqual(signerRemove);
});
@@ -248,7 +167,7 @@ describe('getSignerAddsByUser', () => {
describe('without passing custodyAddress', () => {
test('defaults to current custodyAddress', async () => {
await set.mergeIDRegistryEvent(custody1Event);
await set.mergeIdRegistryEvent(custody1Event);
await set.merge(signerAdd);
await expect(set.getSignerAddsByUser(fid)).resolves.toEqual([signerAdd]);
});
@@ -278,7 +197,7 @@ describe('getSignerRemovesByUser', () => {
describe('without passing custodyAddress', () => {
test('defaults to current custodyAddress', async () => {
await set.mergeIDRegistryEvent(custody1Event);
await set.mergeIdRegistryEvent(custody1Event);
await set.merge(signerRemove);
await expect(set.getSignerRemovesByUser(fid)).resolves.toEqual([signerRemove]);
});
@@ -290,42 +209,327 @@ describe('getSignerRemovesByUser', () => {
});
});
// TODO: write test cases for cyclical custody event transfers
describe('mergeIdRegistryEvent', () => {
test('succeeds and activates signers, if present', async () => {
await set.merge(signerAdd);
await expect(set.getSignerAdd(fid, signer)).rejects.toThrow(NotFoundError);
await expect(set.mergeIdRegistryEvent(custody1Event)).resolves.toEqual(undefined);
await expect(set.getIdRegistryEvent(fid)).resolves.toEqual(custody1Event);
await expect(set.getSignerAdd(fid, signer)).resolves.toEqual(signerAdd);
});
test('fails if events have the same blockNumber but different blockHashes', async () => {
const idRegistryEvent = await Factories.IdRegistryEvent.create({
...custody1Event.event.unpack(),
blockHash: Array.from(arrayify(faker.datatype.hexadecimal({ length: 64 }))),
});
const blockHashConflictEvent = new ContractEventModel(idRegistryEvent);
await set.mergeIdRegistryEvent(custody1Event);
await expect(set.mergeIdRegistryEvent(blockHashConflictEvent)).rejects.toThrow(ValidationError);
});
test('fails if events have the same blockNumber and logIndex but different transactionHashes', async () => {
const idRegistryEvent = await Factories.IdRegistryEvent.create({
...custody1Event.event.unpack(),
transactionHash: Array.from(arrayify(faker.datatype.hexadecimal({ length: 64 }))),
});
const txHashConflictEvent = new ContractEventModel(idRegistryEvent);
await set.mergeIdRegistryEvent(custody1Event);
await expect(set.mergeIdRegistryEvent(txHashConflictEvent)).rejects.toThrow(ValidationError);
});
describe('overwrites existing event', () => {
let newEvent: ContractEventModel;
beforeEach(async () => {
await set.mergeIdRegistryEvent(custody1Event);
await set.merge(signerAdd);
await expect(set.getSignerAdd(fid, signer)).resolves.toEqual(signerAdd);
});
afterEach(async () => {
await expect(set.mergeIdRegistryEvent(newEvent)).resolves.toEqual(undefined);
await expect(set.getIdRegistryEvent(fid)).resolves.toEqual(newEvent);
await expect(set.getSignerAdd(fid, signer)).rejects.toThrow(NotFoundError);
});
test('when it has a higher block number', async () => {
const idRegistryEvent = await Factories.IdRegistryEvent.create({
...custody1Event.event.unpack(),
transactionHash: Array.from(arrayify(faker.datatype.hexadecimal({ length: 64 }))),
to: Array.from(custody2Address),
blockNumber: custody1Event.blockNumber() + 1,
});
newEvent = new ContractEventModel(idRegistryEvent);
});
test('when it has the same block number and a higher log index', async () => {
const idRegistryEvent = await Factories.IdRegistryEvent.create({
...custody1Event.event.unpack(),
transactionHash: Array.from(arrayify(faker.datatype.hexadecimal({ length: 64 }))),
to: Array.from(custody2Address),
logIndex: custody1Event.logIndex() + 1,
});
newEvent = new ContractEventModel(idRegistryEvent);
});
});
describe('does not overwrite existing event', () => {
let newEvent: ContractEventModel;
beforeEach(async () => {
await set.mergeIdRegistryEvent(custody1Event);
await set.merge(signerAdd);
await expect(set.getSignerAdd(fid, signer)).resolves.toEqual(signerAdd);
});
afterEach(async () => {
await expect(set.mergeIdRegistryEvent(newEvent)).resolves.toEqual(undefined);
await expect(set.getIdRegistryEvent(fid)).resolves.toEqual(custody1Event);
await expect(set.getSignerAdd(fid, signer)).resolves.toEqual(signerAdd);
});
test('when it has a lower block number', async () => {
const idRegistryEvent = await Factories.IdRegistryEvent.create({
...custody1Event.event.unpack(),
transactionHash: Array.from(arrayify(faker.datatype.hexadecimal({ length: 64 }))),
to: Array.from(custody2Address),
blockNumber: custody1Event.blockNumber() - 1,
});
newEvent = new ContractEventModel(idRegistryEvent);
});
test('when it has the same block number and a lower log index', async () => {
const idRegistryEvent = await Factories.IdRegistryEvent.create({
...custody1Event.event.unpack(),
to: Array.from(custody2Address),
logIndex: custody1Event.logIndex() - 1,
});
newEvent = new ContractEventModel(idRegistryEvent);
});
test('when is a duplicate', async () => {
newEvent = custody1Event;
});
});
});
describe('merge', () => {
const assertSignerExists = async (message: SignerAddModel | SignerRemoveModel) => {
await expect(MessageModel.get(db, fid, UserPostfix.SignerMessage, message.tsHash())).resolves.toEqual(message);
};
const assertSignerDoesNotExist = async (message: SignerAddModel | SignerRemoveModel) => {
await expect(MessageModel.get(db, fid, UserPostfix.SignerMessage, message.tsHash())).rejects.toThrow(NotFoundError);
};
const assertSignerAddWins = async (message: SignerAddModel) => {
await assertSignerExists(message);
await expect(set.getSignerAdd(fid, signer, custody1Address)).resolves.toEqual(message);
await expect(set.getSignerRemove(fid, signer, custody1Address)).rejects.toThrow(NotFoundError);
};
const assertSignerRemoveWins = async (message: SignerRemoveModel) => {
await assertSignerExists(message);
await expect(set.getSignerRemove(fid, signer, custody1Address)).resolves.toEqual(message);
await expect(set.getSignerAdd(fid, signer, custody1Address)).rejects.toThrow(NotFoundError);
};
test('fails with invalid message type', async () => {
const invalidData = await Factories.ReactionAddData.create({ fid: Array.from(fid) });
const message = await Factories.Message.create({ data: Array.from(invalidData.bb?.bytes() ?? []) });
await expect(set.merge(new MessageModel(message))).rejects.toThrow(BadRequestError);
});
describe('SignerRemove', () => {
describe('succeeds', () => {
beforeEach(async () => {
describe('SignerAdd', () => {
test('succeeds', async () => {
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
await assertSignerAddWins(signerAdd);
});
test('succeeds once, even if merged twice', async () => {
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
await assertSignerAddWins(signerAdd);
});
describe('with a conflicting SignerAdd with different timestamps', () => {
let signerAddLater: SignerAddModel;
beforeAll(async () => {
const addData = await Factories.ReactionAddData.create({
...signerAdd.data.unpack(),
timestamp: signerAdd.timestamp() + 1,
});
const addMessage = await Factories.Message.create(
{
data: Array.from(addData.bb?.bytes() ?? []),
},
{ transient: { wallet: custody1.wallet } }
);
signerAddLater = new MessageModel(addMessage) as SignerAddModel;
});
test('succeeds with a later timestamp', async () => {
await set.merge(signerAdd);
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await expect(set.merge(signerAddLater)).resolves.toEqual(undefined);
await assertSignerDoesNotExist(signerAdd);
await assertSignerAddWins(signerAddLater);
});
test('saves message', async () => {
await expect(MessageModel.get(db, fid, UserPostfix.SignerMessage, signerRemove.tsHash())).resolves.toEqual(
signerRemove
);
});
test('no-ops with an earlier timestamp', async () => {
await set.merge(signerAddLater);
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
test('saves signerRemoves index', async () => {
await expect(set.getSignerRemove(fid, signer, custody1Address)).resolves.toEqual(signerRemove);
});
test('deletes SignerAdd message', async () => {
await expect(MessageModel.get(db, fid, UserPostfix.SignerMessage, signerAdd.tsHash())).rejects.toThrow(
NotFoundError
);
});
test('deletes signerAdds index', async () => {
await expect(set.getSignerAdd(fid, signer, custody1Address)).rejects.toThrow(NotFoundError);
await assertSignerDoesNotExist(signerAdd);
await assertSignerAddWins(signerAddLater);
});
});
describe('with conflicting SignerRemove', () => {
describe('with a conflicting SignerAdd with identical timestamps', () => {
let signerAddLater: SignerAddModel;
beforeAll(async () => {
const addData = await Factories.ReactionAddData.create({
...signerAdd.data.unpack(),
});
const addMessage = await Factories.Message.create(
{
data: Array.from(addData.bb?.bytes() ?? []),
// Makes a copy of the hash and increments it
hash: Array.from(bytesIncrement(signerAdd.hash().slice())),
},
{ transient: { wallet: custody1.wallet } }
);
signerAddLater = new MessageModel(addMessage) as SignerAddModel;
});
test('succeeds with a later hash', async () => {
await set.merge(signerAdd);
await expect(set.merge(signerAddLater)).resolves.toEqual(undefined);
await assertSignerDoesNotExist(signerAdd);
await assertSignerAddWins(signerAddLater);
});
test('no-ops with an earlier hash', async () => {
await set.merge(signerAddLater);
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
await assertSignerDoesNotExist(signerAdd);
await assertSignerAddWins(signerAddLater);
});
});
describe('with conflicting SignerRemove with different timestamps', () => {
test('succeeds with a later timestamp', async () => {
const removeData = await Factories.SignerRemoveData.create({
...signerRemove.data.unpack(),
timestamp: signerAdd.timestamp() - 1,
});
const removeMessage = await Factories.Message.create(
{
data: Array.from(removeData.bb?.bytes() ?? []),
},
{ transient: { wallet: custody1.wallet } }
);
const signerRemoveEarlier = new MessageModel(removeMessage) as SignerRemoveModel;
await set.merge(signerRemoveEarlier);
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
await assertSignerAddWins(signerAdd);
await assertSignerDoesNotExist(signerRemoveEarlier);
});
test('no-ops with an earlier timestamp', async () => {
await set.merge(signerRemove);
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
await assertSignerRemoveWins(signerRemove);
await assertSignerDoesNotExist(signerAdd);
});
});
describe('with conflicting SignerRemove with identical timestamps', () => {
test('no-ops if remove has a later hash', async () => {
const removeData = await Factories.SignerRemoveData.create({
...signerRemove.data.unpack(),
timestamp: signerAdd.timestamp(),
});
const removeMessage = await Factories.Message.create(
{
data: Array.from(removeData.bb?.bytes() ?? []),
// Makes a copy of the hash and increments it
hash: Array.from(bytesIncrement(signerAdd.hash().slice())),
},
{ transient: { wallet: custody1.wallet } }
);
const signerRemoveLater = new MessageModel(removeMessage) as SignerRemoveModel;
await set.merge(signerRemoveLater);
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
await assertSignerRemoveWins(signerRemoveLater);
await assertSignerDoesNotExist(signerAdd);
});
test('succeeds if remove has an earlier hash', async () => {
const removeData = await Factories.SignerRemoveData.create({
...signerRemove.data.unpack(),
timestamp: signerAdd.timestamp(),
});
const removeMessage = await Factories.Message.create(
{
data: Array.from(removeData.bb?.bytes() ?? []),
// TODO: investigate if this test is correct
hash: Array.from(bytesDecrement(signerAdd.hash().slice())),
},
{ transient: { wallet: custody1.wallet } }
);
const signerRemoveEarlier = new MessageModel(removeMessage) as SignerRemoveModel;
await set.merge(signerRemoveEarlier);
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
await assertSignerDoesNotExist(signerAdd);
await assertSignerRemoveWins(signerRemoveEarlier);
});
});
});
describe('SignerRemove', () => {
test('succeeds', async () => {
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await assertSignerRemoveWins(signerRemove);
});
test('succeeds once, even if merged twice', async () => {
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await assertSignerRemoveWins(signerRemove);
});
describe('with a conflicting SignerRemove with different timestamps', () => {
let signerRemoveLater: SignerRemoveModel;
beforeAll(async () => {
@@ -345,162 +549,132 @@ describe('merge', () => {
test('succeeds with a later timestamp', async () => {
await set.merge(signerRemove);
await expect(set.merge(signerRemoveLater)).resolves.toEqual(undefined);
await expect(set.getSignerRemove(fid, signer, custody1Address)).resolves.toEqual(signerRemoveLater);
await expect(MessageModel.get(db, fid, UserPostfix.VerificationMessage, signerRemove.tsHash())).rejects.toThrow(
NotFoundError
);
await assertSignerDoesNotExist(signerRemove);
await assertSignerRemoveWins(signerRemoveLater);
});
test('no-ops with an earlier timestamp', async () => {
await set.merge(signerRemoveLater);
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await expect(set.getSignerRemove(fid, signer, custody1Address)).resolves.toEqual(signerRemoveLater);
await expect(MessageModel.get(db, fid, UserPostfix.VerificationMessage, signerRemove.tsHash())).rejects.toThrow(
NotFoundError
);
});
// TODO: same signer, different custody address
});
describe('with conflicting SignerAdd', () => {
let signerAddLater: SignerAddModel;
beforeAll(async () => {
const addData = await Factories.SignerAddData.create({
...signerAdd.data.unpack(),
timestamp: signerRemove.timestamp() + 1,
});
const addMessage = await Factories.Message.create(
{
data: Array.from(addData.bb?.bytes() ?? []),
},
{ transient: { wallet: custody1.wallet } }
);
signerAddLater = new MessageModel(addMessage) as SignerAddModel;
});
test('no-ops with an earlier timestamp', async () => {
await set.merge(signerAddLater);
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await expect(set.getSignerAdd(fid, signer, custody1Address)).resolves.toEqual(signerAddLater);
await expect(set.getSignerRemove(fid, signer, custody1Address)).rejects.toThrow(NotFoundError);
await expect(MessageModel.get(db, fid, UserPostfix.SignerMessage, signerRemove.tsHash())).rejects.toThrow(
NotFoundError
);
});
test('succeeds with a later timestamp', async () => {
await set.merge(signerAdd);
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await expect(set.getSignerRemove(fid, signer, custody1Address)).resolves.toEqual(signerRemove);
await expect(set.getSignerAdd(fid, signer, custody1Address)).rejects.toThrow(NotFoundError);
await expect(MessageModel.get(db, fid, UserPostfix.SignerMessage, signerAdd.tsHash())).rejects.toThrow(
NotFoundError
);
await assertSignerDoesNotExist(signerRemove);
await assertSignerRemoveWins(signerRemoveLater);
});
});
test('succeeds when SignerAdd does not exist', async () => {
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await expect(set.getSignerRemove(fid, signer, custody1Address)).resolves.toEqual(signerRemove);
await expect(set.getSignerAdd(fid, signer, custody1Address)).rejects.toThrow(NotFoundError);
});
});
describe('SignerAdd', () => {
describe('succeeds', () => {
beforeEach(async () => {
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
});
test('saves message', async () => {
await expect(MessageModel.get(db, fid, UserPostfix.SignerMessage, signerAdd.tsHash())).resolves.toEqual(
signerAdd
);
});
test('saves signerAdds index', async () => {
await expect(set.getSignerAdd(fid, signer, custody1Address)).resolves.toEqual(signerAdd);
});
test('no-ops when merged twice', async () => {
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
await expect(set.getSignerAdd(fid, signer, custody1Address)).resolves.toEqual(signerAdd);
});
});
describe('with conflicting SignerAdd', () => {
let signerAddLater: SignerAddModel;
beforeAll(async () => {
const addData = await Factories.SignerAddData.create({
...signerAdd.data.unpack(),
timestamp: signerAdd.timestamp() + 1,
});
const addMessage = await Factories.Message.create(
{
data: Array.from(addData.bb?.bytes() ?? []),
},
{ transient: { wallet: custody1.wallet } }
);
signerAddLater = new MessageModel(addMessage) as SignerAddModel;
});
test('succeeds with a later timestamp', async () => {
await set.merge(signerAdd);
await expect(set.merge(signerAddLater)).resolves.toEqual(undefined);
await expect(set.getSignerAdd(fid, signer, custody1Address)).resolves.toEqual(signerAddLater);
await expect(MessageModel.get(db, fid, UserPostfix.SignerMessage, signerAdd.tsHash())).rejects.toThrow(
NotFoundError
);
});
test('no-ops with an earlier timestamp', async () => {
await set.merge(signerAddLater);
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
await expect(set.getSignerAdd(fid, signer, custody1Address)).resolves.toEqual(signerAddLater);
await expect(MessageModel.get(db, fid, UserPostfix.SignerMessage, signerAdd.tsHash())).rejects.toThrow(
NotFoundError
);
});
});
describe('with conflicting SignerRemove', () => {
let signerRemoveEarlier: SignerRemoveModel;
describe('with a conflicting SignerRemove with identical timestamps', () => {
let signerRemoveLater: SignerRemoveModel;
beforeAll(async () => {
const removeData = await Factories.SignerRemoveData.create({
...signerRemove.data.unpack(),
timestamp: signerAdd.timestamp() - 1,
});
const removeMessage = await Factories.Message.create(
{
data: Array.from(removeData.bb?.bytes() ?? []),
hash: Array.from(bytesIncrement(signerRemove.hash().slice())),
},
{ transient: { wallet: custody1.wallet } }
);
signerRemoveEarlier = new MessageModel(removeMessage) as SignerRemoveModel;
signerRemoveLater = new MessageModel(removeMessage) as SignerRemoveModel;
});
test('succeeds with a later hash', async () => {
await set.merge(signerRemove);
await expect(set.merge(signerRemoveLater)).resolves.toEqual(undefined);
await assertSignerDoesNotExist(signerRemove);
await assertSignerRemoveWins(signerRemoveLater);
});
test('no-ops with an earlier hash', async () => {
await set.merge(signerRemoveLater);
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await assertSignerDoesNotExist(signerRemove);
await assertSignerRemoveWins(signerRemoveLater);
});
});
describe('with conflicting SignerAdd with different timestamps', () => {
test('succeeds with a later timestamp', async () => {
await set.merge(signerRemoveEarlier);
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
await expect(set.getSignerAdd(fid, signer, custody1Address)).resolves.toEqual(signerAdd);
await expect(set.getSignerRemove(fid, signer, custody1Address)).rejects.toThrow(NotFoundError);
await expect(
MessageModel.get(db, fid, UserPostfix.SignerMessage, signerRemoveEarlier.tsHash())
).rejects.toThrow(NotFoundError);
await set.merge(signerAdd);
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await assertSignerRemoveWins(signerRemove);
await assertSignerDoesNotExist(signerAdd);
});
test('no-ops with an earlier timestamp', async () => {
await set.merge(signerRemove);
await expect(set.merge(signerAdd)).resolves.toEqual(undefined);
await expect(set.getSignerRemove(fid, signer, custody1Address)).resolves.toEqual(signerRemove);
await expect(set.getSignerAdd(fid, signer, custody1Address)).rejects.toThrow(NotFoundError);
await expect(MessageModel.get(db, fid, UserPostfix.SignerMessage, signerAdd.tsHash())).rejects.toThrow(
NotFoundError
const addData = await Factories.ReactionAddData.create({
...signerRemove.data.unpack(),
timestamp: signerRemove.timestamp() + 1,
type: MessageType.SignerAdd,
});
const reactionAddMessage = await Factories.Message.create(
{
data: Array.from(addData.bb?.bytes() ?? []),
},
{ transient: { wallet: custody1.wallet } }
);
const reactionAddLater = new MessageModel(reactionAddMessage) as SignerAddModel;
await set.merge(reactionAddLater);
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await assertSignerAddWins(reactionAddLater);
await assertSignerDoesNotExist(signerRemove);
});
});
describe('with conflicting SignerAdd with identical timestamps', () => {
test('succeeds with an earlier hash', async () => {
const addData = await Factories.ReactionAddData.create({
...signerRemove.data.unpack(),
type: MessageType.SignerAdd,
});
const reactionAddMessage = await Factories.Message.create(
{
data: Array.from(addData.bb?.bytes() ?? []),
hash: Array.from(bytesIncrement(signerRemove.hash().slice())),
},
{ transient: { wallet: custody1.wallet } }
);
const reactionAddLater = new MessageModel(reactionAddMessage) as SignerAddModel;
await set.merge(reactionAddLater);
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await assertSignerDoesNotExist(reactionAddLater);
await assertSignerRemoveWins(signerRemove);
});
test('succeeds with a later hash', async () => {
const removeData = await Factories.ReactionAddData.create({
...signerRemove.data.unpack(),
});
const removeMessage = await Factories.Message.create(
{
data: Array.from(removeData.bb?.bytes() ?? []),
hash: Array.from(bytesDecrement(signerRemove.hash().slice())),
},
{ transient: { wallet: custody1.wallet } }
);
const signerRemoveEarlier = new MessageModel(removeMessage) as SignerRemoveModel;
await set.merge(signerRemoveEarlier);
await expect(set.merge(signerRemove)).resolves.toEqual(undefined);
await assertSignerDoesNotExist(signerRemoveEarlier);
await assertSignerRemoveWins(signerRemove);
});
});
});

View File

@@ -1,5 +1,5 @@
import RocksDB, { Transaction } from '~/storage/db/binaryrocksdb';
import { BadRequestError } from '~/utils/errors';
import { BadRequestError, ValidationError } from '~/utils/errors';
import MessageModel from '~/storage/flatbuffers/messageModel';
import { ResultAsync } from 'neverthrow';
import { SignerAddModel, UserPostfix, SignerRemoveModel } from '~/storage/flatbuffers/types';
@@ -8,6 +8,34 @@ import { bytesCompare } from '~/storage/flatbuffers/utils';
import { MessageType } from '~/utils/generated/message_generated';
import ContractEventModel from '~/storage/flatbuffers/contractEventModel';
/**
* SignerStore persists Signer Messages in RocksDB using a series of two-phase CRDT sets
* to guarantee eventual consistency.
*
* A Signer is an EdDSA key-pair that is authorized to sign Messages on behalf of a user. They can
* be added with a SignerAdd message that is signed by the user's custody address. Signers that are
* signed by the custody address that currently holds the fid are considered active. All other
* Farcaster Messages must be signed by an active signer.
*
* Signers can be removed with a SignerREmove message signed by the user's custody address.
* Removing a signer also removes all messages signed by it, and should only be invoked if a
* compromise is suspected.
*
* The SignerStore has a two-phase CRDT set for each custody address, which keeps tracks of
* signers. It also stores the current custody address as a single key in the database which can be
* used to look up the two-phase set that corresponds to the active signers. Conflicts between
* Signer messages are resolved with Last-Write-Wins + Remove-Wins rules as follows:
*
* 1. Highest timestamp wins
* 2. Remove wins over Adds
* 3. Highest lexicographic hash wins
*
* The key-value entries created by the Signer Store are:
*
* 1. fid:tsHash -> signer message
* 2. fid:set:custodyAddress:signerAddress -> fid:tsHash (Set Index)
* 3. fid:custodyAddress -> fid:tsHash (Custody Address)
*/
class SignerStore {
private _db: RocksDB;
@@ -15,59 +43,101 @@ class SignerStore {
this._db = db;
}
/** RocksDB key of the form <user prefix (1 byte), fid (32 bytes), signer removes key (1 byte), custody address, signer (variable bytes)> */
static signerRemovesKey(fid: Uint8Array, custodyAddress: Uint8Array, signer?: Uint8Array): Buffer {
return Buffer.concat([
MessageModel.userKey(fid),
Buffer.from([UserPostfix.SignerRemoves]),
Buffer.from(custodyAddress),
signer ? Buffer.from(signer) : new Uint8Array(),
]);
}
/** RocksDB key of the form <user prefix (1 byte), fid (32 bytes), signer adds key (1 byte), custody address, signer (variable bytes)> */
static signerAddsKey(fid: Uint8Array, custodyAddress: Uint8Array, signer?: Uint8Array): Buffer {
/**
* Generates a unique key used to store a SignerAdd message key in the SignerAdds set index
*
* @param fid farcaster id of the user who created the Signer
* @param custodyAddress the Ethereum address of the secp256k1 key-pair that signed the message
* @param signerPubKey the EdDSA public key of the signer
*
* @returns RocksDB key of the form <RootPrefix>:<fid>:<UserPostfix>:<custodyAddress?>:<signerPubKey?>
*/
static signerAddsKey(fid: Uint8Array, custodyAddress: Uint8Array, signerPubKey?: Uint8Array): Buffer {
return Buffer.concat([
MessageModel.userKey(fid),
Buffer.from([UserPostfix.SignerAdds]),
Buffer.from(custodyAddress),
signer ? Buffer.from(signer) : new Uint8Array(),
signerPubKey ? Buffer.from(signerPubKey) : new Uint8Array(),
]);
}
async getIDRegistryEvent(fid: Uint8Array): Promise<ContractEventModel> {
/**
* Generates a unique key used to store a SignerRemove message key in the SignerRemoves set index
*
* @param fid farcaster id of the user who created the Signer
* @param custodyAddress the Ethereum address of the secp256k1 key-pair that signed the message
* @param signerPubKey the EdDSA public key of the signer
*
* @returns RocksDB key of the form <RootPrefix>:<fid>:<UserPostfix>:<custodyAddress?>:<signerPubKey?>
*/
static signerRemovesKey(fid: Uint8Array, custodyAddress: Uint8Array, signerPubKey?: Uint8Array): Buffer {
return Buffer.concat([
MessageModel.userKey(fid),
Buffer.from([UserPostfix.SignerRemoves]),
Buffer.from(custodyAddress),
signerPubKey ? Buffer.from(signerPubKey) : new Uint8Array(),
]);
}
/** Returns the most recent event from the IdRegistry contract that affected the fid */
async getIdRegistryEvent(fid: Uint8Array): Promise<ContractEventModel> {
return ContractEventModel.get(this._db, fid);
}
/** Returns the custody address that currently owns an fid */
async getCustodyAddress(fid: Uint8Array): Promise<Uint8Array> {
const idRegistryEvent = await this.getIDRegistryEvent(fid);
const idRegistryEvent = await this.getIdRegistryEvent(fid);
return idRegistryEvent.to();
}
/** Look up SignerAdd message by fid, custody address, and signer */
async getSignerAdd(fid: Uint8Array, signer: Uint8Array, custodyAddress?: Uint8Array): Promise<SignerAddModel> {
//TODO: When implementing the Result type consider refactoring these methods into separate ones
// for active vs. all signers
/**
* Finds a SignerAdd Message by checking the adds-set's index for a user's custody address
*
* @param fid fid of the user who created the SignerAdd
* @param signerPubKey the EdDSA public key of the signer
* @param custodyAddress the Ethereum address that currently owns the Farcaster ID (default: current custody address)
* @returns the SignerAdd Model if it exists, throws NotFoundError otherwise
*/
async getSignerAdd(fid: Uint8Array, signerPubKey: Uint8Array, custodyAddress?: Uint8Array): Promise<SignerAddModel> {
if (!custodyAddress) {
// Will throw NotFoundError if custody address is missing
custodyAddress = await this.getCustodyAddress(fid);
}
const messageTsHash = await this._db.get(SignerStore.signerAddsKey(fid, custodyAddress, signer));
const messageTsHash = await this._db.get(SignerStore.signerAddsKey(fid, custodyAddress, signerPubKey));
return MessageModel.get<SignerAddModel>(this._db, fid, UserPostfix.SignerMessage, messageTsHash);
}
/** Look up SignerRemove message by fid, custody address, and signer */
/**
* Finds a SignerRemove Message by checking the remove-set's index for a user's custody address
*
* @param fid fid of the user who created the SignerRemove
* @param signer the EdDSA public key of the signer
* @param custodyAddress the Ethereum address that currently owns the Farcaster ID (default: current custody address)
* @returns the SignerRemove message if it exists, throws NotFoundError otherwise
*/
async getSignerRemove(fid: Uint8Array, signer: Uint8Array, custodyAddress?: Uint8Array): Promise<SignerRemoveModel> {
if (!custodyAddress) {
// Will throw NotFoundError if custody address is missing
custodyAddress = await this.getCustodyAddress(fid);
}
const messageTsHash = await this._db.get(SignerStore.signerRemovesKey(fid, custodyAddress, signer));
return MessageModel.get<SignerRemoveModel>(this._db, fid, UserPostfix.SignerMessage, messageTsHash);
}
/** Get all SignerAdd messages for an fid and custody address */
//TODO: When implementing the Result type consider refactoring these methods into separate ones
// for active vs. all signers
/**
* Finds all SignerAdd messages for a user's custody address
*
* @param fid fid of the user who created the signers
* @param custodyAddress the Ethereum address that currently owns the fid (default: current custody address)
* @returns the SignerRemove messages if it exists, throws NotFoundError otherwise
*/
async getSignerAddsByUser(fid: Uint8Array, custodyAddress?: Uint8Array): Promise<SignerAddModel[]> {
if (!custodyAddress) {
// Will throw NotFoundError if custody address is missing
custodyAddress = await this.getCustodyAddress(fid);
}
const addsPrefix = SignerStore.signerAddsKey(fid, custodyAddress);
@@ -78,10 +148,15 @@ class SignerStore {
return MessageModel.getManyByUser<SignerAddModel>(this._db, fid, UserPostfix.SignerMessage, messageKeys);
}
/** Get all Signerremove messages for an fid and custody address */
/**
* Finds all SignerRemove Messages for a user's custody address
*
* @param fid fid of the user who created the signers
* @param custodyAddress the Ethereum address that currently owns the fid (default: current custody address)
* @returns the SignerRemove message if it exists, throws NotFoundError otherwise
*/
async getSignerRemovesByUser(fid: Uint8Array, custodyAddress?: Uint8Array): Promise<SignerRemoveModel[]> {
if (!custodyAddress) {
// Will throw NotFoundError if custody address is missing
custodyAddress = await this.getCustodyAddress(fid);
}
const removesPrefix = SignerStore.signerRemovesKey(fid, custodyAddress);
@@ -92,9 +167,13 @@ class SignerStore {
return MessageModel.getManyByUser<SignerRemoveModel>(this._db, fid, UserPostfix.SignerMessage, messageKeys);
}
// TODO: emit signer change events as a result of ID Registry events
async mergeIDRegistryEvent(event: ContractEventModel): Promise<void> {
const existingEvent = await ResultAsync.fromPromise(this.getIDRegistryEvent(event.fid()), () => undefined);
/**
* Merges a ContractEvent into the SignerStore, storing the causally latest event at the key:
* <RootPrefix:User><fid><UserPostfix:IdRegistryEvent>
*/
async mergeIdRegistryEvent(event: ContractEventModel): Promise<void> {
// TODO: emit signer change events as a result of ID Registry events
const existingEvent = await ResultAsync.fromPromise(this.getIdRegistryEvent(event.fid()), () => undefined);
if (existingEvent.isOk() && this.eventCompare(existingEvent.value, event) >= 0) {
return undefined;
}
@@ -104,7 +183,7 @@ class SignerStore {
return this._db.commit(txn);
}
/** Merge a SignerAdd or SignerRemove message into the set */
/** Merges a SignerAdd or SignerRemove message into the SignerStore */
async merge(message: MessageModel): Promise<void> {
if (isSignerRemove(message)) {
return this.mergeRemove(message);
@@ -125,9 +204,15 @@ class SignerStore {
// Compare blockNumber
if (a.blockNumber() < b.blockNumber()) {
return -1;
} else if (a.blockNumber > b.blockNumber) {
} else if (a.blockNumber() > b.blockNumber()) {
return 1;
}
// Cannot happen unless we do not filter out uncle blocks correctly upstream
if (bytesCompare(a.blockHash(), b.blockHash()) !== 0) {
throw new ValidationError('block hash mismatch');
}
// Compare logIndex
if (a.logIndex() < b.logIndex()) {
return -1;
@@ -135,8 +220,12 @@ class SignerStore {
return 1;
}
// Compare transactionHash (lexicographical order)
return bytesCompare(a.transactionHash(), b.transactionHash());
// Cannot happen unless we pass in malformed data
if (bytesCompare(a.transactionHash(), b.transactionHash()) !== 0) {
throw new ValidationError('tx hash mismatch');
}
return 0;
}
private async mergeAdd(message: SignerAddModel): Promise<void> {
@@ -165,15 +254,16 @@ class SignerStore {
return this._db.commit(txn);
}
private SignerMessageCompare(
private signerMessageCompare(
aType: MessageType,
aTsHash: Uint8Array,
bType: MessageType,
bTsHash: Uint8Array
): number {
const tsHashOrder = bytesCompare(aTsHash, bTsHash);
if (tsHashOrder !== 0) {
return tsHashOrder;
// Compare timestamps (first 4 bytes of tsHash) to enforce Last-Write-Wins
const timestampOrder = bytesCompare(aTsHash.subarray(0, 4), bTsHash.subarray(0, 4));
if (timestampOrder !== 0) {
return timestampOrder;
}
if (aType === MessageType.SignerRemove && bType === MessageType.SignerAdd) {
@@ -182,9 +272,15 @@ class SignerStore {
return -1;
}
return 0;
// Compare hashes (last 4 bytes of tsHash) to break ties between messages of the same type and timestamp
return bytesCompare(aTsHash.subarray(4), bTsHash.subarray(4));
}
/**
* Determines the RocksDB keys that must be modified to settle merge conflicts as a result of adding a Signer to the Store.
*
* @returns a RocksDB transaction if keys must be added or removed, undefined otherwise
*/
private async resolveMergeConflicts(
txn: Transaction,
message: SignerAddModel | SignerRemoveModel
@@ -194,7 +290,7 @@ class SignerStore {
throw new BadRequestError('signer is required');
}
// Look up the remove tsHash for this custody adddress and signer
// Look up the remove tsHash for this custody address and signer
const removeTsHash = await ResultAsync.fromPromise(
this._db.get(SignerStore.signerRemovesKey(message.fid(), message.signer(), signer)),
() => undefined
@@ -202,7 +298,7 @@ class SignerStore {
if (removeTsHash.isOk()) {
if (
this.SignerMessageCompare(MessageType.SignerRemove, removeTsHash.value, message.type(), message.tsHash()) >= 0
this.signerMessageCompare(MessageType.SignerRemove, removeTsHash.value, message.type(), message.tsHash()) >= 0
) {
// If the existing remove has the same or higher order than the new message, no-op
return undefined;
@@ -226,7 +322,7 @@ class SignerStore {
);
if (addTsHash.isOk()) {
if (this.SignerMessageCompare(MessageType.SignerAdd, addTsHash.value, message.type(), message.tsHash()) >= 0) {
if (this.signerMessageCompare(MessageType.SignerAdd, addTsHash.value, message.type(), message.tsHash()) >= 0) {
// If the existing add has the same or higher order than the new message, no-op
return undefined;
} else {
@@ -245,6 +341,7 @@ class SignerStore {
return txn;
}
/* Builds a RocksDB transaction to insert a SignerAdd message and construct its indices */
private putSignerAddTransaction(txn: Transaction, message: SignerAddModel): Transaction {
// Put message and index by signer
txn = MessageModel.putTransaction(txn, message);
@@ -258,6 +355,7 @@ class SignerStore {
return txn;
}
/* Builds a RocksDB transaction to remove a SignerAdd message and delete its indices */
private deleteSignerAddTransaction(txn: Transaction, message: SignerAddModel): Transaction {
// Delete from signerAdds
txn = txn.del(
@@ -268,6 +366,7 @@ class SignerStore {
return MessageModel.deleteTransaction(txn, message);
}
/* Builds a RocksDB transaction to insert a SignerRemove message and construct its indices */
private putSignerRemoveTransaction(txn: Transaction, message: SignerRemoveModel): Transaction {
// Put message and index by signer
txn = MessageModel.putTransaction(txn, message);
@@ -281,6 +380,7 @@ class SignerStore {
return txn;
}
/* Builds a RocksDB transaction to remove a SignerRemove message and delete its indices */
private deleteSignerRemoveTransaction(txn: Transaction, message: SignerRemoveModel): Transaction {
// Delete from signerRemoves
txn = txn.del(

View File

@@ -385,7 +385,7 @@ const MessageFactory = Factory.define<MessageT, { signer?: KeyPair; wallet?: Wal
}
);
const IDRegistryEventFactory = Factory.define<ContractEventT, any, ContractEvent>(({ onCreate }) => {
const IdRegistryEventFactory = Factory.define<ContractEventT, any, ContractEvent>(({ onCreate }) => {
onCreate((params) => {
const builder = new Builder(1);
builder.finish(params.pack(builder));
@@ -399,7 +399,7 @@ const IDRegistryEventFactory = Factory.define<ContractEventT, any, ContractEvent
faker.datatype.number({ max: 1000 }),
Array.from(FIDFactory.build()),
Array.from(arrayify(faker.datatype.hexadecimal({ length: 40 }))),
ContractEventType.IDRegistryRegister
ContractEventType.IdRegistryRegister
);
});
@@ -430,7 +430,7 @@ const Factories = {
UserDataBody: UserDataBodyFactory,
UserDataAddData: UserDataAddDataFactory,
Message: MessageFactory,
IDRegistryEvent: IDRegistryEventFactory,
IdRegistryEvent: IdRegistryEventFactory,
};
export default Factories;

View File

@@ -1,6 +1,6 @@
// automatically generated by the FlatBuffers compiler, do not modify
export enum ContractEventType {
IDRegistryRegister = 1,
IDRegistryTransfer = 2
IdRegistryRegister = 1,
IdRegistryTransfer = 2,
}

View File

@@ -4,257 +4,288 @@ import * as flatbuffers from 'flatbuffers';
import { ContractEventType } from '../farcaster/contract-event-type';
export class ContractEvent {
bb: flatbuffers.ByteBuffer|null = null;
bb: flatbuffers.ByteBuffer | null = null;
bb_pos = 0;
__init(i:number, bb:flatbuffers.ByteBuffer):ContractEvent {
this.bb_pos = i;
this.bb = bb;
return this;
}
static getRootAsContractEvent(bb:flatbuffers.ByteBuffer, obj?:ContractEvent):ContractEvent {
return (obj || new ContractEvent()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
}
static getSizePrefixedRootAsContractEvent(bb:flatbuffers.ByteBuffer, obj?:ContractEvent):ContractEvent {
bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
return (obj || new ContractEvent()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
}
blockNumber():number {
const offset = this.bb!.__offset(this.bb_pos, 4);
return offset ? this.bb!.readUint32(this.bb_pos + offset) : 0;
}
blockHash(index: number):number|null {
const offset = this.bb!.__offset(this.bb_pos, 6);
return offset ? this.bb!.readUint8(this.bb!.__vector(this.bb_pos + offset) + index) : 0;
}
blockHashLength():number {
const offset = this.bb!.__offset(this.bb_pos, 6);
return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
}
blockHashArray():Uint8Array|null {
const offset = this.bb!.__offset(this.bb_pos, 6);
return offset ? new Uint8Array(this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset), this.bb!.__vector_len(this.bb_pos + offset)) : null;
}
transactionHash(index: number):number|null {
const offset = this.bb!.__offset(this.bb_pos, 8);
return offset ? this.bb!.readUint8(this.bb!.__vector(this.bb_pos + offset) + index) : 0;
}
transactionHashLength():number {
const offset = this.bb!.__offset(this.bb_pos, 8);
return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
}
transactionHashArray():Uint8Array|null {
const offset = this.bb!.__offset(this.bb_pos, 8);
return offset ? new Uint8Array(this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset), this.bb!.__vector_len(this.bb_pos + offset)) : null;
}
logIndex():number {
const offset = this.bb!.__offset(this.bb_pos, 10);
return offset ? this.bb!.readUint16(this.bb_pos + offset) : 0;
}
fid(index: number):number|null {
const offset = this.bb!.__offset(this.bb_pos, 12);
return offset ? this.bb!.readUint8(this.bb!.__vector(this.bb_pos + offset) + index) : 0;
}
fidLength():number {
const offset = this.bb!.__offset(this.bb_pos, 12);
return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
}
fidArray():Uint8Array|null {
const offset = this.bb!.__offset(this.bb_pos, 12);
return offset ? new Uint8Array(this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset), this.bb!.__vector_len(this.bb_pos + offset)) : null;
}
to(index: number):number|null {
const offset = this.bb!.__offset(this.bb_pos, 14);
return offset ? this.bb!.readUint8(this.bb!.__vector(this.bb_pos + offset) + index) : 0;
}
toLength():number {
const offset = this.bb!.__offset(this.bb_pos, 14);
return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
}
toArray():Uint8Array|null {
const offset = this.bb!.__offset(this.bb_pos, 14);
return offset ? new Uint8Array(this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset), this.bb!.__vector_len(this.bb_pos + offset)) : null;
}
type():ContractEventType {
const offset = this.bb!.__offset(this.bb_pos, 16);
return offset ? this.bb!.readUint8(this.bb_pos + offset) : ContractEventType.IDRegistryRegister;
}
static startContractEvent(builder:flatbuffers.Builder) {
builder.startObject(7);
}
static addBlockNumber(builder:flatbuffers.Builder, blockNumber:number) {
builder.addFieldInt32(0, blockNumber, 0);
}
static addBlockHash(builder:flatbuffers.Builder, blockHashOffset:flatbuffers.Offset) {
builder.addFieldOffset(1, blockHashOffset, 0);
}
static createBlockHashVector(builder:flatbuffers.Builder, data:number[]|Uint8Array):flatbuffers.Offset {
builder.startVector(1, data.length, 1);
for (let i = data.length - 1; i >= 0; i--) {
builder.addInt8(data[i]!);
__init(i: number, bb: flatbuffers.ByteBuffer): ContractEvent {
this.bb_pos = i;
this.bb = bb;
return this;
}
return builder.endVector();
}
static startBlockHashVector(builder:flatbuffers.Builder, numElems:number) {
builder.startVector(1, numElems, 1);
}
static addTransactionHash(builder:flatbuffers.Builder, transactionHashOffset:flatbuffers.Offset) {
builder.addFieldOffset(2, transactionHashOffset, 0);
}
static createTransactionHashVector(builder:flatbuffers.Builder, data:number[]|Uint8Array):flatbuffers.Offset {
builder.startVector(1, data.length, 1);
for (let i = data.length - 1; i >= 0; i--) {
builder.addInt8(data[i]!);
static getRootAsContractEvent(bb: flatbuffers.ByteBuffer, obj?: ContractEvent): ContractEvent {
return (obj || new ContractEvent()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
}
return builder.endVector();
}
static startTransactionHashVector(builder:flatbuffers.Builder, numElems:number) {
builder.startVector(1, numElems, 1);
}
static addLogIndex(builder:flatbuffers.Builder, logIndex:number) {
builder.addFieldInt16(3, logIndex, 0);
}
static addFid(builder:flatbuffers.Builder, fidOffset:flatbuffers.Offset) {
builder.addFieldOffset(4, fidOffset, 0);
}
static createFidVector(builder:flatbuffers.Builder, data:number[]|Uint8Array):flatbuffers.Offset {
builder.startVector(1, data.length, 1);
for (let i = data.length - 1; i >= 0; i--) {
builder.addInt8(data[i]!);
static getSizePrefixedRootAsContractEvent(bb: flatbuffers.ByteBuffer, obj?: ContractEvent): ContractEvent {
bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
return (obj || new ContractEvent()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
}
return builder.endVector();
}
static startFidVector(builder:flatbuffers.Builder, numElems:number) {
builder.startVector(1, numElems, 1);
}
static addTo(builder:flatbuffers.Builder, toOffset:flatbuffers.Offset) {
builder.addFieldOffset(5, toOffset, 0);
}
static createToVector(builder:flatbuffers.Builder, data:number[]|Uint8Array):flatbuffers.Offset {
builder.startVector(1, data.length, 1);
for (let i = data.length - 1; i >= 0; i--) {
builder.addInt8(data[i]!);
blockNumber(): number {
const offset = this.bb!.__offset(this.bb_pos, 4);
return offset ? this.bb!.readUint32(this.bb_pos + offset) : 0;
}
return builder.endVector();
}
static startToVector(builder:flatbuffers.Builder, numElems:number) {
builder.startVector(1, numElems, 1);
}
blockHash(index: number): number | null {
const offset = this.bb!.__offset(this.bb_pos, 6);
return offset ? this.bb!.readUint8(this.bb!.__vector(this.bb_pos + offset) + index) : 0;
}
static addType(builder:flatbuffers.Builder, type:ContractEventType) {
builder.addFieldInt8(6, type, ContractEventType.IDRegistryRegister);
}
blockHashLength(): number {
const offset = this.bb!.__offset(this.bb_pos, 6);
return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
}
static endContractEvent(builder:flatbuffers.Builder):flatbuffers.Offset {
const offset = builder.endObject();
builder.requiredField(offset, 6) // block_hash
builder.requiredField(offset, 8) // transaction_hash
return offset;
}
blockHashArray(): Uint8Array | null {
const offset = this.bb!.__offset(this.bb_pos, 6);
return offset
? new Uint8Array(
this.bb!.bytes().buffer,
this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset),
this.bb!.__vector_len(this.bb_pos + offset)
)
: null;
}
static finishContractEventBuffer(builder:flatbuffers.Builder, offset:flatbuffers.Offset) {
builder.finish(offset);
}
transactionHash(index: number): number | null {
const offset = this.bb!.__offset(this.bb_pos, 8);
return offset ? this.bb!.readUint8(this.bb!.__vector(this.bb_pos + offset) + index) : 0;
}
static finishSizePrefixedContractEventBuffer(builder:flatbuffers.Builder, offset:flatbuffers.Offset) {
builder.finish(offset, undefined, true);
}
transactionHashLength(): number {
const offset = this.bb!.__offset(this.bb_pos, 8);
return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
}
static createContractEvent(builder:flatbuffers.Builder, blockNumber:number, blockHashOffset:flatbuffers.Offset, transactionHashOffset:flatbuffers.Offset, logIndex:number, fidOffset:flatbuffers.Offset, toOffset:flatbuffers.Offset, type:ContractEventType):flatbuffers.Offset {
ContractEvent.startContractEvent(builder);
ContractEvent.addBlockNumber(builder, blockNumber);
ContractEvent.addBlockHash(builder, blockHashOffset);
ContractEvent.addTransactionHash(builder, transactionHashOffset);
ContractEvent.addLogIndex(builder, logIndex);
ContractEvent.addFid(builder, fidOffset);
ContractEvent.addTo(builder, toOffset);
ContractEvent.addType(builder, type);
return ContractEvent.endContractEvent(builder);
}
transactionHashArray(): Uint8Array | null {
const offset = this.bb!.__offset(this.bb_pos, 8);
return offset
? new Uint8Array(
this.bb!.bytes().buffer,
this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset),
this.bb!.__vector_len(this.bb_pos + offset)
)
: null;
}
unpack(): ContractEventT {
return new ContractEventT(
this.blockNumber(),
this.bb!.createScalarList(this.blockHash.bind(this), this.blockHashLength()),
this.bb!.createScalarList(this.transactionHash.bind(this), this.transactionHashLength()),
this.logIndex(),
this.bb!.createScalarList(this.fid.bind(this), this.fidLength()),
this.bb!.createScalarList(this.to.bind(this), this.toLength()),
this.type()
);
}
logIndex(): number {
const offset = this.bb!.__offset(this.bb_pos, 10);
return offset ? this.bb!.readUint16(this.bb_pos + offset) : 0;
}
fid(index: number): number | null {
const offset = this.bb!.__offset(this.bb_pos, 12);
return offset ? this.bb!.readUint8(this.bb!.__vector(this.bb_pos + offset) + index) : 0;
}
unpackTo(_o: ContractEventT): void {
_o.blockNumber = this.blockNumber();
_o.blockHash = this.bb!.createScalarList(this.blockHash.bind(this), this.blockHashLength());
_o.transactionHash = this.bb!.createScalarList(this.transactionHash.bind(this), this.transactionHashLength());
_o.logIndex = this.logIndex();
_o.fid = this.bb!.createScalarList(this.fid.bind(this), this.fidLength());
_o.to = this.bb!.createScalarList(this.to.bind(this), this.toLength());
_o.type = this.type();
}
fidLength(): number {
const offset = this.bb!.__offset(this.bb_pos, 12);
return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
}
fidArray(): Uint8Array | null {
const offset = this.bb!.__offset(this.bb_pos, 12);
return offset
? new Uint8Array(
this.bb!.bytes().buffer,
this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset),
this.bb!.__vector_len(this.bb_pos + offset)
)
: null;
}
to(index: number): number | null {
const offset = this.bb!.__offset(this.bb_pos, 14);
return offset ? this.bb!.readUint8(this.bb!.__vector(this.bb_pos + offset) + index) : 0;
}
toLength(): number {
const offset = this.bb!.__offset(this.bb_pos, 14);
return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
}
toArray(): Uint8Array | null {
const offset = this.bb!.__offset(this.bb_pos, 14);
return offset
? new Uint8Array(
this.bb!.bytes().buffer,
this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset),
this.bb!.__vector_len(this.bb_pos + offset)
)
: null;
}
type(): ContractEventType {
const offset = this.bb!.__offset(this.bb_pos, 16);
return offset ? this.bb!.readUint8(this.bb_pos + offset) : ContractEventType.IdRegistryRegister;
}
static startContractEvent(builder: flatbuffers.Builder) {
builder.startObject(7);
}
static addBlockNumber(builder: flatbuffers.Builder, blockNumber: number) {
builder.addFieldInt32(0, blockNumber, 0);
}
static addBlockHash(builder: flatbuffers.Builder, blockHashOffset: flatbuffers.Offset) {
builder.addFieldOffset(1, blockHashOffset, 0);
}
static createBlockHashVector(builder: flatbuffers.Builder, data: number[] | Uint8Array): flatbuffers.Offset {
builder.startVector(1, data.length, 1);
for (let i = data.length - 1; i >= 0; i--) {
builder.addInt8(data[i]!);
}
return builder.endVector();
}
static startBlockHashVector(builder: flatbuffers.Builder, numElems: number) {
builder.startVector(1, numElems, 1);
}
static addTransactionHash(builder: flatbuffers.Builder, transactionHashOffset: flatbuffers.Offset) {
builder.addFieldOffset(2, transactionHashOffset, 0);
}
static createTransactionHashVector(builder: flatbuffers.Builder, data: number[] | Uint8Array): flatbuffers.Offset {
builder.startVector(1, data.length, 1);
for (let i = data.length - 1; i >= 0; i--) {
builder.addInt8(data[i]!);
}
return builder.endVector();
}
static startTransactionHashVector(builder: flatbuffers.Builder, numElems: number) {
builder.startVector(1, numElems, 1);
}
static addLogIndex(builder: flatbuffers.Builder, logIndex: number) {
builder.addFieldInt16(3, logIndex, 0);
}
static addFid(builder: flatbuffers.Builder, fidOffset: flatbuffers.Offset) {
builder.addFieldOffset(4, fidOffset, 0);
}
static createFidVector(builder: flatbuffers.Builder, data: number[] | Uint8Array): flatbuffers.Offset {
builder.startVector(1, data.length, 1);
for (let i = data.length - 1; i >= 0; i--) {
builder.addInt8(data[i]!);
}
return builder.endVector();
}
static startFidVector(builder: flatbuffers.Builder, numElems: number) {
builder.startVector(1, numElems, 1);
}
static addTo(builder: flatbuffers.Builder, toOffset: flatbuffers.Offset) {
builder.addFieldOffset(5, toOffset, 0);
}
static createToVector(builder: flatbuffers.Builder, data: number[] | Uint8Array): flatbuffers.Offset {
builder.startVector(1, data.length, 1);
for (let i = data.length - 1; i >= 0; i--) {
builder.addInt8(data[i]!);
}
return builder.endVector();
}
static startToVector(builder: flatbuffers.Builder, numElems: number) {
builder.startVector(1, numElems, 1);
}
static addType(builder: flatbuffers.Builder, type: ContractEventType) {
builder.addFieldInt8(6, type, ContractEventType.IdRegistryRegister);
}
static endContractEvent(builder: flatbuffers.Builder): flatbuffers.Offset {
const offset = builder.endObject();
builder.requiredField(offset, 6); // block_hash
builder.requiredField(offset, 8); // transaction_hash
return offset;
}
static finishContractEventBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {
builder.finish(offset);
}
static finishSizePrefixedContractEventBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {
builder.finish(offset, undefined, true);
}
static createContractEvent(
builder: flatbuffers.Builder,
blockNumber: number,
blockHashOffset: flatbuffers.Offset,
transactionHashOffset: flatbuffers.Offset,
logIndex: number,
fidOffset: flatbuffers.Offset,
toOffset: flatbuffers.Offset,
type: ContractEventType
): flatbuffers.Offset {
ContractEvent.startContractEvent(builder);
ContractEvent.addBlockNumber(builder, blockNumber);
ContractEvent.addBlockHash(builder, blockHashOffset);
ContractEvent.addTransactionHash(builder, transactionHashOffset);
ContractEvent.addLogIndex(builder, logIndex);
ContractEvent.addFid(builder, fidOffset);
ContractEvent.addTo(builder, toOffset);
ContractEvent.addType(builder, type);
return ContractEvent.endContractEvent(builder);
}
unpack(): ContractEventT {
return new ContractEventT(
this.blockNumber(),
this.bb!.createScalarList(this.blockHash.bind(this), this.blockHashLength()),
this.bb!.createScalarList(this.transactionHash.bind(this), this.transactionHashLength()),
this.logIndex(),
this.bb!.createScalarList(this.fid.bind(this), this.fidLength()),
this.bb!.createScalarList(this.to.bind(this), this.toLength()),
this.type()
);
}
unpackTo(_o: ContractEventT): void {
_o.blockNumber = this.blockNumber();
_o.blockHash = this.bb!.createScalarList(this.blockHash.bind(this), this.blockHashLength());
_o.transactionHash = this.bb!.createScalarList(this.transactionHash.bind(this), this.transactionHashLength());
_o.logIndex = this.logIndex();
_o.fid = this.bb!.createScalarList(this.fid.bind(this), this.fidLength());
_o.to = this.bb!.createScalarList(this.to.bind(this), this.toLength());
_o.type = this.type();
}
}
export class ContractEventT {
constructor(
public blockNumber: number = 0,
public blockHash: (number)[] = [],
public transactionHash: (number)[] = [],
public logIndex: number = 0,
public fid: (number)[] = [],
public to: (number)[] = [],
public type: ContractEventType = ContractEventType.IDRegistryRegister
){}
constructor(
public blockNumber: number = 0,
public blockHash: number[] = [],
public transactionHash: number[] = [],
public logIndex: number = 0,
public fid: number[] = [],
public to: number[] = [],
public type: ContractEventType = ContractEventType.IdRegistryRegister
) {}
pack(builder: flatbuffers.Builder): flatbuffers.Offset {
const blockHash = ContractEvent.createBlockHashVector(builder, this.blockHash);
const transactionHash = ContractEvent.createTransactionHashVector(builder, this.transactionHash);
const fid = ContractEvent.createFidVector(builder, this.fid);
const to = ContractEvent.createToVector(builder, this.to);
pack(builder:flatbuffers.Builder): flatbuffers.Offset {
const blockHash = ContractEvent.createBlockHashVector(builder, this.blockHash);
const transactionHash = ContractEvent.createTransactionHashVector(builder, this.transactionHash);
const fid = ContractEvent.createFidVector(builder, this.fid);
const to = ContractEvent.createToVector(builder, this.to);
return ContractEvent.createContractEvent(builder,
this.blockNumber,
blockHash,
transactionHash,
this.logIndex,
fid,
to,
this.type
);
}
return ContractEvent.createContractEvent(
builder,
this.blockNumber,
blockHash,
transactionHash,
this.logIndex,
fid,
to,
this.type
);
}
}

View File

@@ -3,10 +3,13 @@
namespace Farcaster;
enum ContractEventType: uint8 {
IDRegistryRegister = 1,
IDRegistryTransfer = 2,
IdRegistryRegister = 1,
IdRegistryTransfer = 2,
}
/**
* Contains block, transaction and log data needed to parse events from the IdRegistry contract.
*/
table ContractEvent {
block_number: uint32;
block_hash: [ubyte] (required);