chore: switch to generating client libraries off snapchain protos (#2603)

## Why is this change needed?

This eliminates the need to keep 2 sets of protos in sync with one
another. Generated files should be backwards compatible because we've
kept the protos in sync so far.

Some material updates
- Snapchain doesn't have the bulk streaming rpcs we used for
reconciliation in hubs. Deleted all the shuttle code that depends on
these rpcs
- In the Snapchain protos, admin rpcs are specified in a different
proto. This caused some small amount of churn.
- The admin rpcs in snapchain do not mirror the admin rpcs on hubs. 

## Merge Checklist

_Choose all relevant options below by adding an `x` now or at any time
before submitting for review_

- [x] PR title adheres to the [conventional
commits](https://www.conventionalcommits.org/en/v1.0.0/) standard
- [x] PR has a
[changeset](https://github.com/farcasterxyz/hub-monorepo/blob/main/CONTRIBUTING.md#35-adding-changesets)
- [x] PR has been tagged with a change label(s) (i.e. documentation,
feature, bugfix, or chore)
- [ ] PR includes
[documentation](https://github.com/farcasterxyz/hub-monorepo/blob/main/CONTRIBUTING.md#32-writing-docs)
if necessary.

<!-- start pr-codex -->

---

## PR-Codex overview
This PR focuses on removing outdated `.proto` files and updating the
generation of client libraries from new `snapchain` protobufs. It also
introduces type guards for the `BlockConfirmed` event and modifies
various message structures in the codebase.

### Detailed summary
- Deleted outdated `.proto` files in `protobufs/schemas`.
- Updated `package.json` scripts to generate clients from `snapchain`
instead of local schemas.
- Added type guards for `BlockConfirmed` events in `typeguards.ts`.
- Modified message structures to include `BlockConfirmedBody`.
- Updated comments for clarity in `message.ts`.
- Refactored `MessageReconciliation` class to remove streaming logic.
- Introduced new methods for handling on-chain events in `admin_rpc.ts`.

> The following files were skipped due to too many changes:
`packages/hub-web/src/generated/hub_event.ts`,
`packages/hub-nodejs/src/generated/hub_event.ts`,
`packages/core/src/protobufs/generated/hub_event.ts`,
`packages/hub-web/src/generated/rpc.ts`,
`packages/core/src/protobufs/generated/gossip.ts`,
`packages/hub-nodejs/src/generated/rpc.ts`,
`packages/hub-web/src/generated/blocks.ts`,
`packages/hub-nodejs/src/generated/blocks.ts`,
`packages/core/src/protobufs/generated/blocks.ts`,
`packages/hub-web/src/generated/request_response.ts`,
`packages/hub-nodejs/src/generated/request_response.ts`,
`packages/core/src/protobufs/generated/request_response.ts`

>  Ask PR-Codex anything about this PR by commenting with `/codex {your
question}`

<!-- end pr-codex -->
This commit is contained in:
Aditi Srinivasan
2025-06-23 18:13:14 -04:00
committed by GitHub
parent d747f38e36
commit 56cf1302df
42 changed files with 14397 additions and 8936 deletions

View File

@@ -0,0 +1,8 @@
---
"@farcaster/hub-nodejs": minor
"@farcaster/hub-web": minor
"@farcaster/shuttle": minor
"@farcaster/core": minor
---
chore: switch to generating client libraries off snapchain protos

View File

@@ -0,0 +1,8 @@
---
"@farcaster/hub-nodejs": patch
"@farcaster/hub-web": patch
"@farcaster/shuttle": patch
"@farcaster/core": patch
---
chore: add typeguards for BlockConfirmed event

32
generate-protos.sh Executable file
View File

@@ -0,0 +1,32 @@
PROTO_REPO=https://github.com/farcasterxyz/snapchain
PROTO_PATH=src/proto
PROTO_REV=f82a7e559711deac60b819c4d92bad1aaca55946 # Update this if you want to generate off updated snapchain protos
TMPDIR=tmp-protogen
git clone $PROTO_REPO $TMPDIR
cd $TMPDIR
git checkout $PROTO_REV
cd ..
# Determine which files you care about
if [[ "$LIBRARY" == "core" ]]; then
RELEVANT_PROTOS=$(ls $TMPDIR/$PROTO_PATH/*.proto | xargs -n 1 basename | xargs -I{} echo '/defs/{}' | tr '\n' ' ')
OUT_PATH=src/protobufs/generated
CUSTOM_TS_PROTO_OPTS="outputServices=false"
elif [[ "$LIBRARY" == "hub-nodejs" ]]; then
RELEVANT_PROTOS="/defs/rpc.proto /defs/admin_rpc.proto"
OUT_PATH=src/generated
CUSTOM_TS_PROTO_OPTS="outputServices=grpc-js"
elif [[ "$LIBRARY" == "hub-web" ]]; then
RELEVANT_PROTOS="/defs/rpc.proto /defs/admin_rpc.proto"
OUT_PATH=src/generated
CUSTOM_TS_PROTO_OPTS="outputClientImpl=grpc-web,lowerCaseServiceMethods=true"
fi
echo "Generating relevant protos: $RELEVANT_PROTOS"
echo "Outputting generated files to: $OUT_PATH"
docker run --rm --user $(id -u):$(id -g) -v $(pwd)/../../node_modules:/node_modules -v $(pwd)/$TMPDIR/$PROTO_PATH:/defs -v $(pwd)/$OUT_PATH:/out namely/protoc:1.50_1 --plugin=/node_modules/ts-proto/protoc-gen-ts_proto --ts_proto_out=/out --ts_proto_opt=esModuleInterop=true,exportCommonSymbols=false,useOptionals=none,unrecognizedEnum=false,removeEnumPrefix=true,$CUSTOM_TS_PROTO_OPTS --proto_path=/defs $RELEVANT_PROTOS
rm -rf $TMPDIR

View File

@@ -30,7 +30,7 @@
"scripts": {
"build": "tsup --config tsup.config.ts",
"clean": "rimraf ./dist",
"protoc": "docker run --rm --user $(id -u):$(id -g) -v $(pwd)/../../node_modules:/node_modules -v $(pwd)/../../protobufs/schemas:/defs -v $(pwd)/src/protobufs/generated:/out namely/protoc:1.50_1 --plugin=/node_modules/ts-proto/protoc-gen-ts_proto --ts_proto_out=/out --ts_proto_opt=esModuleInterop=true,exportCommonSymbols=false,outputServices=false,useOptionals=none,unrecognizedEnum=false,removeEnumPrefix=true --proto_path=/defs $(ls ../../protobufs/schemas/*.proto | xargs -n 1 basename | xargs -I{} echo '/defs/{}' | tr '\n' ' ')",
"protoc": "LIBRARY=core ../../generate-protos.sh",
"lint": "biome format src/ --write && biome check src/ --apply",
"lint:ci": "biome ci src/",
"test": "NODE_OPTIONS=--experimental-vm-modules jest",

View File

@@ -0,0 +1,326 @@
/* eslint-disable */
import Long from "long";
import _m0 from "protobufjs/minimal";
export interface Empty {
}
export interface RetryBlockNumberRange {
startBlockNumber: number;
stopBlockNumber: number;
}
export interface RetryOnchainEventsRequest {
fid?: number | undefined;
blockRange?: RetryBlockNumberRange | undefined;
}
export interface UploadSnapshotRequest {
shardIndexes: number[];
}
function createBaseEmpty(): Empty {
return {};
}
export const Empty = {
encode(_: Empty, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): Empty {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseEmpty();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(_: any): Empty {
return {};
},
toJSON(_: Empty): unknown {
const obj: any = {};
return obj;
},
create<I extends Exact<DeepPartial<Empty>, I>>(base?: I): Empty {
return Empty.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<Empty>, I>>(_: I): Empty {
const message = createBaseEmpty();
return message;
},
};
function createBaseRetryBlockNumberRange(): RetryBlockNumberRange {
return { startBlockNumber: 0, stopBlockNumber: 0 };
}
export const RetryBlockNumberRange = {
encode(message: RetryBlockNumberRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.startBlockNumber !== 0) {
writer.uint32(8).uint64(message.startBlockNumber);
}
if (message.stopBlockNumber !== 0) {
writer.uint32(16).uint64(message.stopBlockNumber);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): RetryBlockNumberRange {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseRetryBlockNumberRange();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 8) {
break;
}
message.startBlockNumber = longToNumber(reader.uint64() as Long);
continue;
case 2:
if (tag != 16) {
break;
}
message.stopBlockNumber = longToNumber(reader.uint64() as Long);
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): RetryBlockNumberRange {
return {
startBlockNumber: isSet(object.startBlockNumber) ? Number(object.startBlockNumber) : 0,
stopBlockNumber: isSet(object.stopBlockNumber) ? Number(object.stopBlockNumber) : 0,
};
},
toJSON(message: RetryBlockNumberRange): unknown {
const obj: any = {};
message.startBlockNumber !== undefined && (obj.startBlockNumber = Math.round(message.startBlockNumber));
message.stopBlockNumber !== undefined && (obj.stopBlockNumber = Math.round(message.stopBlockNumber));
return obj;
},
create<I extends Exact<DeepPartial<RetryBlockNumberRange>, I>>(base?: I): RetryBlockNumberRange {
return RetryBlockNumberRange.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<RetryBlockNumberRange>, I>>(object: I): RetryBlockNumberRange {
const message = createBaseRetryBlockNumberRange();
message.startBlockNumber = object.startBlockNumber ?? 0;
message.stopBlockNumber = object.stopBlockNumber ?? 0;
return message;
},
};
function createBaseRetryOnchainEventsRequest(): RetryOnchainEventsRequest {
return { fid: undefined, blockRange: undefined };
}
export const RetryOnchainEventsRequest = {
encode(message: RetryOnchainEventsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.fid !== undefined) {
writer.uint32(8).uint64(message.fid);
}
if (message.blockRange !== undefined) {
RetryBlockNumberRange.encode(message.blockRange, writer.uint32(18).fork()).ldelim();
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): RetryOnchainEventsRequest {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseRetryOnchainEventsRequest();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 8) {
break;
}
message.fid = longToNumber(reader.uint64() as Long);
continue;
case 2:
if (tag != 18) {
break;
}
message.blockRange = RetryBlockNumberRange.decode(reader, reader.uint32());
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): RetryOnchainEventsRequest {
return {
fid: isSet(object.fid) ? Number(object.fid) : undefined,
blockRange: isSet(object.blockRange) ? RetryBlockNumberRange.fromJSON(object.blockRange) : undefined,
};
},
toJSON(message: RetryOnchainEventsRequest): unknown {
const obj: any = {};
message.fid !== undefined && (obj.fid = Math.round(message.fid));
message.blockRange !== undefined &&
(obj.blockRange = message.blockRange ? RetryBlockNumberRange.toJSON(message.blockRange) : undefined);
return obj;
},
create<I extends Exact<DeepPartial<RetryOnchainEventsRequest>, I>>(base?: I): RetryOnchainEventsRequest {
return RetryOnchainEventsRequest.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<RetryOnchainEventsRequest>, I>>(object: I): RetryOnchainEventsRequest {
const message = createBaseRetryOnchainEventsRequest();
message.fid = object.fid ?? undefined;
message.blockRange = (object.blockRange !== undefined && object.blockRange !== null)
? RetryBlockNumberRange.fromPartial(object.blockRange)
: undefined;
return message;
},
};
function createBaseUploadSnapshotRequest(): UploadSnapshotRequest {
return { shardIndexes: [] };
}
export const UploadSnapshotRequest = {
encode(message: UploadSnapshotRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
writer.uint32(10).fork();
for (const v of message.shardIndexes) {
writer.uint32(v);
}
writer.ldelim();
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): UploadSnapshotRequest {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseUploadSnapshotRequest();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag == 8) {
message.shardIndexes.push(reader.uint32());
continue;
}
if (tag == 10) {
const end2 = reader.uint32() + reader.pos;
while (reader.pos < end2) {
message.shardIndexes.push(reader.uint32());
}
continue;
}
break;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): UploadSnapshotRequest {
return { shardIndexes: Array.isArray(object?.shardIndexes) ? object.shardIndexes.map((e: any) => Number(e)) : [] };
},
toJSON(message: UploadSnapshotRequest): unknown {
const obj: any = {};
if (message.shardIndexes) {
obj.shardIndexes = message.shardIndexes.map((e) => Math.round(e));
} else {
obj.shardIndexes = [];
}
return obj;
},
create<I extends Exact<DeepPartial<UploadSnapshotRequest>, I>>(base?: I): UploadSnapshotRequest {
return UploadSnapshotRequest.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<UploadSnapshotRequest>, I>>(object: I): UploadSnapshotRequest {
const message = createBaseUploadSnapshotRequest();
message.shardIndexes = object.shardIndexes?.map((e) => e) || [];
return message;
},
};
declare var self: any | undefined;
declare var window: any | undefined;
declare var global: any | undefined;
var tsProtoGlobalThis: any = (() => {
if (typeof globalThis !== "undefined") {
return globalThis;
}
if (typeof self !== "undefined") {
return self;
}
if (typeof window !== "undefined") {
return window;
}
if (typeof global !== "undefined") {
return global;
}
throw "Unable to locate global object";
})();
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
type DeepPartial<T> = T extends Builtin ? T
: T extends Array<infer U> ? Array<DeepPartial<U>> : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
: T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> }
: Partial<T>;
type KeysOfUnion<T> = T extends T ? keyof T : never;
type Exact<P, I extends P> = P extends Builtin ? P
: P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never };
function longToNumber(long: Long): number {
if (long.gt(Number.MAX_SAFE_INTEGER)) {
throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER");
}
return long.toNumber();
}
if (_m0.util.Long !== Long) {
_m0.util.Long = Long as any;
_m0.configure();
}
function isSet(value: any): boolean {
return value !== null && value !== undefined;
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -23,6 +23,7 @@ export enum HubEventType {
*/
MERGE_ON_CHAIN_EVENT = 9,
MERGE_FAILURE = 10,
BLOCK_CONFIRMED = 11,
}
export function hubEventTypeFromJSON(object: any): HubEventType {
@@ -48,6 +49,9 @@ export function hubEventTypeFromJSON(object: any): HubEventType {
case 10:
case "HUB_EVENT_TYPE_MERGE_FAILURE":
return HubEventType.MERGE_FAILURE;
case 11:
case "HUB_EVENT_TYPE_BLOCK_CONFIRMED":
return HubEventType.BLOCK_CONFIRMED;
default:
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HubEventType");
}
@@ -69,6 +73,8 @@ export function hubEventTypeToJSON(object: HubEventType): string {
return "HUB_EVENT_TYPE_MERGE_ON_CHAIN_EVENT";
case HubEventType.MERGE_FAILURE:
return "HUB_EVENT_TYPE_MERGE_FAILURE";
case HubEventType.BLOCK_CONFIRMED:
return "HUB_EVENT_TYPE_BLOCK_CONFIRMED";
default:
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HubEventType");
}
@@ -79,20 +85,28 @@ export interface MergeMessageBody {
deletedMessages: Message[];
}
export interface PruneMessageBody {
message: Message | undefined;
}
export interface MergeFailureBody {
message: Message | undefined;
code: string;
reason: string;
}
export interface PruneMessageBody {
message: Message | undefined;
}
export interface RevokeMessageBody {
message: Message | undefined;
}
export interface BlockConfirmedBody {
blockNumber: number;
shardIndex: number;
timestamp: number;
blockHash: Uint8Array;
totalEvents: number;
}
export interface MergeOnChainEventBody {
onChainEvent: OnChainEvent | undefined;
}
@@ -127,6 +141,7 @@ export interface HubEvent {
*/
mergeOnChainEventBody?: MergeOnChainEventBody | undefined;
mergeFailure?: MergeFailureBody | undefined;
blockConfirmedBody?: BlockConfirmedBody | undefined;
blockNumber: number;
shardIndex: number;
timestamp: number;
@@ -211,64 +226,6 @@ export const MergeMessageBody = {
},
};
function createBasePruneMessageBody(): PruneMessageBody {
return { message: undefined };
}
export const PruneMessageBody = {
encode(message: PruneMessageBody, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.message !== undefined) {
Message.encode(message.message, writer.uint32(10).fork()).ldelim();
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): PruneMessageBody {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBasePruneMessageBody();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 10) {
break;
}
message.message = Message.decode(reader, reader.uint32());
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): PruneMessageBody {
return { message: isSet(object.message) ? Message.fromJSON(object.message) : undefined };
},
toJSON(message: PruneMessageBody): unknown {
const obj: any = {};
message.message !== undefined && (obj.message = message.message ? Message.toJSON(message.message) : undefined);
return obj;
},
create<I extends Exact<DeepPartial<PruneMessageBody>, I>>(base?: I): PruneMessageBody {
return PruneMessageBody.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<PruneMessageBody>, I>>(object: I): PruneMessageBody {
const message = createBasePruneMessageBody();
message.message = (object.message !== undefined && object.message !== null)
? Message.fromPartial(object.message)
: undefined;
return message;
},
};
function createBaseMergeFailureBody(): MergeFailureBody {
return { message: undefined, code: "", reason: "" };
}
@@ -355,6 +312,64 @@ export const MergeFailureBody = {
},
};
function createBasePruneMessageBody(): PruneMessageBody {
return { message: undefined };
}
export const PruneMessageBody = {
encode(message: PruneMessageBody, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.message !== undefined) {
Message.encode(message.message, writer.uint32(10).fork()).ldelim();
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): PruneMessageBody {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBasePruneMessageBody();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 10) {
break;
}
message.message = Message.decode(reader, reader.uint32());
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): PruneMessageBody {
return { message: isSet(object.message) ? Message.fromJSON(object.message) : undefined };
},
toJSON(message: PruneMessageBody): unknown {
const obj: any = {};
message.message !== undefined && (obj.message = message.message ? Message.toJSON(message.message) : undefined);
return obj;
},
create<I extends Exact<DeepPartial<PruneMessageBody>, I>>(base?: I): PruneMessageBody {
return PruneMessageBody.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<PruneMessageBody>, I>>(object: I): PruneMessageBody {
const message = createBasePruneMessageBody();
message.message = (object.message !== undefined && object.message !== null)
? Message.fromPartial(object.message)
: undefined;
return message;
},
};
function createBaseRevokeMessageBody(): RevokeMessageBody {
return { message: undefined };
}
@@ -413,6 +428,117 @@ export const RevokeMessageBody = {
},
};
function createBaseBlockConfirmedBody(): BlockConfirmedBody {
return { blockNumber: 0, shardIndex: 0, timestamp: 0, blockHash: new Uint8Array(), totalEvents: 0 };
}
export const BlockConfirmedBody = {
encode(message: BlockConfirmedBody, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.blockNumber !== 0) {
writer.uint32(8).uint64(message.blockNumber);
}
if (message.shardIndex !== 0) {
writer.uint32(16).uint32(message.shardIndex);
}
if (message.timestamp !== 0) {
writer.uint32(24).uint64(message.timestamp);
}
if (message.blockHash.length !== 0) {
writer.uint32(34).bytes(message.blockHash);
}
if (message.totalEvents !== 0) {
writer.uint32(40).uint64(message.totalEvents);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): BlockConfirmedBody {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseBlockConfirmedBody();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 8) {
break;
}
message.blockNumber = longToNumber(reader.uint64() as Long);
continue;
case 2:
if (tag != 16) {
break;
}
message.shardIndex = reader.uint32();
continue;
case 3:
if (tag != 24) {
break;
}
message.timestamp = longToNumber(reader.uint64() as Long);
continue;
case 4:
if (tag != 34) {
break;
}
message.blockHash = reader.bytes();
continue;
case 5:
if (tag != 40) {
break;
}
message.totalEvents = longToNumber(reader.uint64() as Long);
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): BlockConfirmedBody {
return {
blockNumber: isSet(object.blockNumber) ? Number(object.blockNumber) : 0,
shardIndex: isSet(object.shardIndex) ? Number(object.shardIndex) : 0,
timestamp: isSet(object.timestamp) ? Number(object.timestamp) : 0,
blockHash: isSet(object.blockHash) ? bytesFromBase64(object.blockHash) : new Uint8Array(),
totalEvents: isSet(object.totalEvents) ? Number(object.totalEvents) : 0,
};
},
toJSON(message: BlockConfirmedBody): unknown {
const obj: any = {};
message.blockNumber !== undefined && (obj.blockNumber = Math.round(message.blockNumber));
message.shardIndex !== undefined && (obj.shardIndex = Math.round(message.shardIndex));
message.timestamp !== undefined && (obj.timestamp = Math.round(message.timestamp));
message.blockHash !== undefined &&
(obj.blockHash = base64FromBytes(message.blockHash !== undefined ? message.blockHash : new Uint8Array()));
message.totalEvents !== undefined && (obj.totalEvents = Math.round(message.totalEvents));
return obj;
},
create<I extends Exact<DeepPartial<BlockConfirmedBody>, I>>(base?: I): BlockConfirmedBody {
return BlockConfirmedBody.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<BlockConfirmedBody>, I>>(object: I): BlockConfirmedBody {
const message = createBaseBlockConfirmedBody();
message.blockNumber = object.blockNumber ?? 0;
message.shardIndex = object.shardIndex ?? 0;
message.timestamp = object.timestamp ?? 0;
message.blockHash = object.blockHash ?? new Uint8Array();
message.totalEvents = object.totalEvents ?? 0;
return message;
},
};
function createBaseMergeOnChainEventBody(): MergeOnChainEventBody {
return { onChainEvent: undefined };
}
@@ -607,6 +733,7 @@ function createBaseHubEvent(): HubEvent {
mergeUsernameProofBody: undefined,
mergeOnChainEventBody: undefined,
mergeFailure: undefined,
blockConfirmedBody: undefined,
blockNumber: 0,
shardIndex: 0,
timestamp: 0,
@@ -639,6 +766,9 @@ export const HubEvent = {
if (message.mergeFailure !== undefined) {
MergeFailureBody.encode(message.mergeFailure, writer.uint32(106).fork()).ldelim();
}
if (message.blockConfirmedBody !== undefined) {
BlockConfirmedBody.encode(message.blockConfirmedBody, writer.uint32(130).fork()).ldelim();
}
if (message.blockNumber !== 0) {
writer.uint32(96).uint64(message.blockNumber);
}
@@ -714,6 +844,13 @@ export const HubEvent = {
message.mergeFailure = MergeFailureBody.decode(reader, reader.uint32());
continue;
case 16:
if (tag != 130) {
break;
}
message.blockConfirmedBody = BlockConfirmedBody.decode(reader, reader.uint32());
continue;
case 12:
if (tag != 96) {
break;
@@ -760,6 +897,9 @@ export const HubEvent = {
? MergeOnChainEventBody.fromJSON(object.mergeOnChainEventBody)
: undefined,
mergeFailure: isSet(object.mergeFailure) ? MergeFailureBody.fromJSON(object.mergeFailure) : undefined,
blockConfirmedBody: isSet(object.blockConfirmedBody)
? BlockConfirmedBody.fromJSON(object.blockConfirmedBody)
: undefined,
blockNumber: isSet(object.blockNumber) ? Number(object.blockNumber) : 0,
shardIndex: isSet(object.shardIndex) ? Number(object.shardIndex) : 0,
timestamp: isSet(object.timestamp) ? Number(object.timestamp) : 0,
@@ -785,6 +925,9 @@ export const HubEvent = {
: undefined);
message.mergeFailure !== undefined &&
(obj.mergeFailure = message.mergeFailure ? MergeFailureBody.toJSON(message.mergeFailure) : undefined);
message.blockConfirmedBody !== undefined && (obj.blockConfirmedBody = message.blockConfirmedBody
? BlockConfirmedBody.toJSON(message.blockConfirmedBody)
: undefined);
message.blockNumber !== undefined && (obj.blockNumber = Math.round(message.blockNumber));
message.shardIndex !== undefined && (obj.shardIndex = Math.round(message.shardIndex));
message.timestamp !== undefined && (obj.timestamp = Math.round(message.timestamp));
@@ -819,6 +962,9 @@ export const HubEvent = {
message.mergeFailure = (object.mergeFailure !== undefined && object.mergeFailure !== null)
? MergeFailureBody.fromPartial(object.mergeFailure)
: undefined;
message.blockConfirmedBody = (object.blockConfirmedBody !== undefined && object.blockConfirmedBody !== null)
? BlockConfirmedBody.fromPartial(object.blockConfirmedBody)
: undefined;
message.blockNumber = object.blockNumber ?? 0;
message.shardIndex = object.shardIndex ?? 0;
message.timestamp = object.timestamp ?? 0;
@@ -845,6 +991,31 @@ var tsProtoGlobalThis: any = (() => {
throw "Unable to locate global object";
})();
function bytesFromBase64(b64: string): Uint8Array {
if (tsProtoGlobalThis.Buffer) {
return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
} else {
const bin = tsProtoGlobalThis.atob(b64);
const arr = new Uint8Array(bin.length);
for (let i = 0; i < bin.length; ++i) {
arr[i] = bin.charCodeAt(i);
}
return arr;
}
}
function base64FromBytes(arr: Uint8Array): string {
if (tsProtoGlobalThis.Buffer) {
return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
} else {
const bin: string[] = [];
arr.forEach((byte) => {
bin.push(String.fromCharCode(byte));
});
return tsProtoGlobalThis.btoa(bin.join(""));
}
}
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
type DeepPartial<T> = T extends Builtin ? T

View File

@@ -244,7 +244,7 @@ export enum UserDataType {
USERNAME = 6,
/** LOCATION - Current location for the user */
LOCATION = 7,
/** TWITTER - Username of user on x */
/** TWITTER - Username of user on twitter */
TWITTER = 8,
/** GITHUB - Username of user on github */
GITHUB = 9,

View File

@@ -0,0 +1,169 @@
/* eslint-disable */
import Long from "long";
import _m0 from "protobufjs/minimal";
export interface OnChainEventState {
lastL2Block: number;
}
export interface FnameState {
lastFnameProof: number;
}
function createBaseOnChainEventState(): OnChainEventState {
return { lastL2Block: 0 };
}
export const OnChainEventState = {
encode(message: OnChainEventState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.lastL2Block !== 0) {
writer.uint32(24).uint64(message.lastL2Block);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): OnChainEventState {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseOnChainEventState();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 3:
if (tag != 24) {
break;
}
message.lastL2Block = longToNumber(reader.uint64() as Long);
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): OnChainEventState {
return { lastL2Block: isSet(object.lastL2Block) ? Number(object.lastL2Block) : 0 };
},
toJSON(message: OnChainEventState): unknown {
const obj: any = {};
message.lastL2Block !== undefined && (obj.lastL2Block = Math.round(message.lastL2Block));
return obj;
},
create<I extends Exact<DeepPartial<OnChainEventState>, I>>(base?: I): OnChainEventState {
return OnChainEventState.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<OnChainEventState>, I>>(object: I): OnChainEventState {
const message = createBaseOnChainEventState();
message.lastL2Block = object.lastL2Block ?? 0;
return message;
},
};
function createBaseFnameState(): FnameState {
return { lastFnameProof: 0 };
}
export const FnameState = {
encode(message: FnameState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.lastFnameProof !== 0) {
writer.uint32(24).uint64(message.lastFnameProof);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): FnameState {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseFnameState();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 3:
if (tag != 24) {
break;
}
message.lastFnameProof = longToNumber(reader.uint64() as Long);
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): FnameState {
return { lastFnameProof: isSet(object.lastFnameProof) ? Number(object.lastFnameProof) : 0 };
},
toJSON(message: FnameState): unknown {
const obj: any = {};
message.lastFnameProof !== undefined && (obj.lastFnameProof = Math.round(message.lastFnameProof));
return obj;
},
create<I extends Exact<DeepPartial<FnameState>, I>>(base?: I): FnameState {
return FnameState.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<FnameState>, I>>(object: I): FnameState {
const message = createBaseFnameState();
message.lastFnameProof = object.lastFnameProof ?? 0;
return message;
},
};
declare var self: any | undefined;
declare var window: any | undefined;
declare var global: any | undefined;
var tsProtoGlobalThis: any = (() => {
if (typeof globalThis !== "undefined") {
return globalThis;
}
if (typeof self !== "undefined") {
return self;
}
if (typeof window !== "undefined") {
return window;
}
if (typeof global !== "undefined") {
return global;
}
throw "Unable to locate global object";
})();
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
type DeepPartial<T> = T extends Builtin ? T
: T extends Array<infer U> ? Array<DeepPartial<U>> : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
: T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> }
: Partial<T>;
type KeysOfUnion<T> = T extends T ? keyof T : never;
type Exact<P, I extends P> = P extends Builtin ? P
: P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never };
function longToNumber(long: Long): number {
if (long.gt(Number.MAX_SAFE_INTEGER)) {
throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER");
}
return long.toNumber();
}
if (_m0.util.Long !== Long) {
_m0.util.Long = Long as any;
_m0.configure();
}
function isSet(value: any): boolean {
return value !== null && value !== undefined;
}

File diff suppressed because it is too large Load Diff

View File

@@ -5,11 +5,16 @@ export interface DbTrieNode {
key: Uint8Array;
childChars: number[];
items: number;
hash: Uint8Array;
childHashes: { [key: number]: Uint8Array };
}
export interface DbTrieNode_ChildHashesEntry {
key: number;
value: Uint8Array;
}
function createBaseDbTrieNode(): DbTrieNode {
return { key: new Uint8Array(), childChars: [], items: 0, hash: new Uint8Array() };
return { key: new Uint8Array(), childChars: [], items: 0, childHashes: {} };
}
export const DbTrieNode = {
@@ -25,9 +30,9 @@ export const DbTrieNode = {
if (message.items !== 0) {
writer.uint32(24).uint32(message.items);
}
if (message.hash.length !== 0) {
writer.uint32(34).bytes(message.hash);
}
Object.entries(message.childHashes).forEach(([key, value]) => {
DbTrieNode_ChildHashesEntry.encode({ key: key as any, value }, writer.uint32(34).fork()).ldelim();
});
return writer;
},
@@ -73,7 +78,10 @@ export const DbTrieNode = {
break;
}
message.hash = reader.bytes();
const entry4 = DbTrieNode_ChildHashesEntry.decode(reader, reader.uint32());
if (entry4.value !== undefined) {
message.childHashes[entry4.key] = entry4.value;
}
continue;
}
if ((tag & 7) == 4 || tag == 0) {
@@ -89,7 +97,12 @@ export const DbTrieNode = {
key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(),
childChars: Array.isArray(object?.childChars) ? object.childChars.map((e: any) => Number(e)) : [],
items: isSet(object.items) ? Number(object.items) : 0,
hash: isSet(object.hash) ? bytesFromBase64(object.hash) : new Uint8Array(),
childHashes: isObject(object.childHashes)
? Object.entries(object.childHashes).reduce<{ [key: number]: Uint8Array }>((acc, [key, value]) => {
acc[Number(key)] = bytesFromBase64(value as string);
return acc;
}, {})
: {},
};
},
@@ -103,8 +116,12 @@ export const DbTrieNode = {
obj.childChars = [];
}
message.items !== undefined && (obj.items = Math.round(message.items));
message.hash !== undefined &&
(obj.hash = base64FromBytes(message.hash !== undefined ? message.hash : new Uint8Array()));
obj.childHashes = {};
if (message.childHashes) {
Object.entries(message.childHashes).forEach(([k, v]) => {
obj.childHashes[k] = base64FromBytes(v);
});
}
return obj;
},
@@ -117,7 +134,87 @@ export const DbTrieNode = {
message.key = object.key ?? new Uint8Array();
message.childChars = object.childChars?.map((e) => e) || [];
message.items = object.items ?? 0;
message.hash = object.hash ?? new Uint8Array();
message.childHashes = Object.entries(object.childHashes ?? {}).reduce<{ [key: number]: Uint8Array }>(
(acc, [key, value]) => {
if (value !== undefined) {
acc[Number(key)] = value;
}
return acc;
},
{},
);
return message;
},
};
function createBaseDbTrieNode_ChildHashesEntry(): DbTrieNode_ChildHashesEntry {
return { key: 0, value: new Uint8Array() };
}
export const DbTrieNode_ChildHashesEntry = {
encode(message: DbTrieNode_ChildHashesEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.key !== 0) {
writer.uint32(8).uint32(message.key);
}
if (message.value.length !== 0) {
writer.uint32(18).bytes(message.value);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): DbTrieNode_ChildHashesEntry {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseDbTrieNode_ChildHashesEntry();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 8) {
break;
}
message.key = reader.uint32();
continue;
case 2:
if (tag != 18) {
break;
}
message.value = reader.bytes();
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): DbTrieNode_ChildHashesEntry {
return {
key: isSet(object.key) ? Number(object.key) : 0,
value: isSet(object.value) ? bytesFromBase64(object.value) : new Uint8Array(),
};
},
toJSON(message: DbTrieNode_ChildHashesEntry): unknown {
const obj: any = {};
message.key !== undefined && (obj.key = Math.round(message.key));
message.value !== undefined &&
(obj.value = base64FromBytes(message.value !== undefined ? message.value : new Uint8Array()));
return obj;
},
create<I extends Exact<DeepPartial<DbTrieNode_ChildHashesEntry>, I>>(base?: I): DbTrieNode_ChildHashesEntry {
return DbTrieNode_ChildHashesEntry.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<DbTrieNode_ChildHashesEntry>, I>>(object: I): DbTrieNode_ChildHashesEntry {
const message = createBaseDbTrieNode_ChildHashesEntry();
message.key = object.key ?? 0;
message.value = object.value ?? new Uint8Array();
return message;
},
};
@@ -177,6 +274,10 @@ type KeysOfUnion<T> = T extends T ? keyof T : never;
type Exact<P, I extends P> = P extends Builtin ? P
: P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never };
function isObject(value: any): boolean {
return typeof value === "object" && value !== null;
}
function isSet(value: any): boolean {
return value !== null && value !== undefined;
}

View File

@@ -253,3 +253,9 @@ export const isMergeUsernameProofHubEvent = (
typeof event.mergeUsernameProofBody.deletedUsernameProof !== "undefined")
);
};
export const isBlockConfirmedHubEvent = (event: hubEventProtobufs.HubEvent): event is types.BlockConfirmedHubEvent => {
return (
event.type === hubEventProtobufs.HubEventType.BLOCK_CONFIRMED && typeof event.blockConfirmedBody !== "undefined"
);
};

View File

@@ -190,3 +190,8 @@ export type MergeFailureHubEvent = hubEventProtobufs.HubEvent & {
type: hubEventProtobufs.HubEventType.MERGE_FAILURE;
mergeFailure: hubEventProtobufs.MergeFailureBody;
};
export type BlockConfirmedHubEvent = hubEventProtobufs.HubEvent & {
type: hubEventProtobufs.HubEventType.BLOCK_CONFIRMED;
mergeFailure: hubEventProtobufs.BlockConfirmedBody;
};

View File

@@ -28,7 +28,7 @@
"scripts": {
"build": "tsup --config tsup.config.ts",
"clean": "rimraf ./dist",
"protoc": "docker run --rm --user $(id -u):$(id -g) -v $(pwd)/../../node_modules:/node_modules -v $(pwd)/../../protobufs/schemas:/defs -v $(pwd)/src/generated:/out namely/protoc:1.50_1 --plugin=/node_modules/ts-proto/protoc-gen-ts_proto --ts_proto_out=/out --ts_proto_opt=esModuleInterop=true,exportCommonSymbols=false,outputServices=grpc-js,useOptionals=none,unrecognizedEnum=false,removeEnumPrefix=true --proto_path=/defs /defs/rpc.proto",
"protoc": "LIBRARY=hub-nodejs ../../generate-protos.sh",
"lint": "biome format src/ examples/ --write && biome check src/ examples --apply",
"lint:ci": "biome ci src/ examples/",
"test": "NODE_OPTIONS=--experimental-vm-modules jest",

View File

@@ -1,4 +1,5 @@
import { AdminServiceClient, HubServiceClient } from "./generated/rpc";
import { HubServiceClient } from "./generated/rpc";
import { AdminServiceClient } from "./generated/admin_rpc";
import * as grpc from "@grpc/grpc-js";
import { Metadata } from "@grpc/grpc-js";
import type {

View File

@@ -0,0 +1,456 @@
/* eslint-disable */
import {
CallOptions,
ChannelCredentials,
Client,
ClientOptions,
ClientUnaryCall,
handleUnaryCall,
makeGenericClientConstructor,
Metadata,
ServiceError,
UntypedServiceImplementation,
} from "@grpc/grpc-js";
import Long from "long";
import _m0 from "protobufjs/minimal";
import { OnChainEvent } from "./onchain_event";
import { UserNameProof } from "./username_proof";
export interface Empty {
}
export interface RetryBlockNumberRange {
startBlockNumber: number;
stopBlockNumber: number;
}
export interface RetryOnchainEventsRequest {
fid?: number | undefined;
blockRange?: RetryBlockNumberRange | undefined;
}
export interface UploadSnapshotRequest {
shardIndexes: number[];
}
function createBaseEmpty(): Empty {
return {};
}
export const Empty = {
encode(_: Empty, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): Empty {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseEmpty();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(_: any): Empty {
return {};
},
toJSON(_: Empty): unknown {
const obj: any = {};
return obj;
},
create<I extends Exact<DeepPartial<Empty>, I>>(base?: I): Empty {
return Empty.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<Empty>, I>>(_: I): Empty {
const message = createBaseEmpty();
return message;
},
};
function createBaseRetryBlockNumberRange(): RetryBlockNumberRange {
return { startBlockNumber: 0, stopBlockNumber: 0 };
}
export const RetryBlockNumberRange = {
encode(message: RetryBlockNumberRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.startBlockNumber !== 0) {
writer.uint32(8).uint64(message.startBlockNumber);
}
if (message.stopBlockNumber !== 0) {
writer.uint32(16).uint64(message.stopBlockNumber);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): RetryBlockNumberRange {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseRetryBlockNumberRange();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 8) {
break;
}
message.startBlockNumber = longToNumber(reader.uint64() as Long);
continue;
case 2:
if (tag != 16) {
break;
}
message.stopBlockNumber = longToNumber(reader.uint64() as Long);
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): RetryBlockNumberRange {
return {
startBlockNumber: isSet(object.startBlockNumber) ? Number(object.startBlockNumber) : 0,
stopBlockNumber: isSet(object.stopBlockNumber) ? Number(object.stopBlockNumber) : 0,
};
},
toJSON(message: RetryBlockNumberRange): unknown {
const obj: any = {};
message.startBlockNumber !== undefined && (obj.startBlockNumber = Math.round(message.startBlockNumber));
message.stopBlockNumber !== undefined && (obj.stopBlockNumber = Math.round(message.stopBlockNumber));
return obj;
},
create<I extends Exact<DeepPartial<RetryBlockNumberRange>, I>>(base?: I): RetryBlockNumberRange {
return RetryBlockNumberRange.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<RetryBlockNumberRange>, I>>(object: I): RetryBlockNumberRange {
const message = createBaseRetryBlockNumberRange();
message.startBlockNumber = object.startBlockNumber ?? 0;
message.stopBlockNumber = object.stopBlockNumber ?? 0;
return message;
},
};
function createBaseRetryOnchainEventsRequest(): RetryOnchainEventsRequest {
return { fid: undefined, blockRange: undefined };
}
export const RetryOnchainEventsRequest = {
encode(message: RetryOnchainEventsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.fid !== undefined) {
writer.uint32(8).uint64(message.fid);
}
if (message.blockRange !== undefined) {
RetryBlockNumberRange.encode(message.blockRange, writer.uint32(18).fork()).ldelim();
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): RetryOnchainEventsRequest {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseRetryOnchainEventsRequest();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 8) {
break;
}
message.fid = longToNumber(reader.uint64() as Long);
continue;
case 2:
if (tag != 18) {
break;
}
message.blockRange = RetryBlockNumberRange.decode(reader, reader.uint32());
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): RetryOnchainEventsRequest {
return {
fid: isSet(object.fid) ? Number(object.fid) : undefined,
blockRange: isSet(object.blockRange) ? RetryBlockNumberRange.fromJSON(object.blockRange) : undefined,
};
},
toJSON(message: RetryOnchainEventsRequest): unknown {
const obj: any = {};
message.fid !== undefined && (obj.fid = Math.round(message.fid));
message.blockRange !== undefined &&
(obj.blockRange = message.blockRange ? RetryBlockNumberRange.toJSON(message.blockRange) : undefined);
return obj;
},
create<I extends Exact<DeepPartial<RetryOnchainEventsRequest>, I>>(base?: I): RetryOnchainEventsRequest {
return RetryOnchainEventsRequest.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<RetryOnchainEventsRequest>, I>>(object: I): RetryOnchainEventsRequest {
const message = createBaseRetryOnchainEventsRequest();
message.fid = object.fid ?? undefined;
message.blockRange = (object.blockRange !== undefined && object.blockRange !== null)
? RetryBlockNumberRange.fromPartial(object.blockRange)
: undefined;
return message;
},
};
function createBaseUploadSnapshotRequest(): UploadSnapshotRequest {
return { shardIndexes: [] };
}
export const UploadSnapshotRequest = {
encode(message: UploadSnapshotRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
writer.uint32(10).fork();
for (const v of message.shardIndexes) {
writer.uint32(v);
}
writer.ldelim();
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): UploadSnapshotRequest {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseUploadSnapshotRequest();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag == 8) {
message.shardIndexes.push(reader.uint32());
continue;
}
if (tag == 10) {
const end2 = reader.uint32() + reader.pos;
while (reader.pos < end2) {
message.shardIndexes.push(reader.uint32());
}
continue;
}
break;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): UploadSnapshotRequest {
return { shardIndexes: Array.isArray(object?.shardIndexes) ? object.shardIndexes.map((e: any) => Number(e)) : [] };
},
toJSON(message: UploadSnapshotRequest): unknown {
const obj: any = {};
if (message.shardIndexes) {
obj.shardIndexes = message.shardIndexes.map((e) => Math.round(e));
} else {
obj.shardIndexes = [];
}
return obj;
},
create<I extends Exact<DeepPartial<UploadSnapshotRequest>, I>>(base?: I): UploadSnapshotRequest {
return UploadSnapshotRequest.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<UploadSnapshotRequest>, I>>(object: I): UploadSnapshotRequest {
const message = createBaseUploadSnapshotRequest();
message.shardIndexes = object.shardIndexes?.map((e) => e) || [];
return message;
},
};
export type AdminServiceService = typeof AdminServiceService;
export const AdminServiceService = {
submitOnChainEvent: {
path: "/AdminService/SubmitOnChainEvent",
requestStream: false,
responseStream: false,
requestSerialize: (value: OnChainEvent) => Buffer.from(OnChainEvent.encode(value).finish()),
requestDeserialize: (value: Buffer) => OnChainEvent.decode(value),
responseSerialize: (value: OnChainEvent) => Buffer.from(OnChainEvent.encode(value).finish()),
responseDeserialize: (value: Buffer) => OnChainEvent.decode(value),
},
submitUserNameProof: {
path: "/AdminService/SubmitUserNameProof",
requestStream: false,
responseStream: false,
requestSerialize: (value: UserNameProof) => Buffer.from(UserNameProof.encode(value).finish()),
requestDeserialize: (value: Buffer) => UserNameProof.decode(value),
responseSerialize: (value: UserNameProof) => Buffer.from(UserNameProof.encode(value).finish()),
responseDeserialize: (value: Buffer) => UserNameProof.decode(value),
},
uploadSnapshot: {
path: "/AdminService/UploadSnapshot",
requestStream: false,
responseStream: false,
requestSerialize: (value: UploadSnapshotRequest) => Buffer.from(UploadSnapshotRequest.encode(value).finish()),
requestDeserialize: (value: Buffer) => UploadSnapshotRequest.decode(value),
responseSerialize: (value: Empty) => Buffer.from(Empty.encode(value).finish()),
responseDeserialize: (value: Buffer) => Empty.decode(value),
},
retryOnchainEvents: {
path: "/AdminService/RetryOnchainEvents",
requestStream: false,
responseStream: false,
requestSerialize: (value: RetryOnchainEventsRequest) =>
Buffer.from(RetryOnchainEventsRequest.encode(value).finish()),
requestDeserialize: (value: Buffer) => RetryOnchainEventsRequest.decode(value),
responseSerialize: (value: Empty) => Buffer.from(Empty.encode(value).finish()),
responseDeserialize: (value: Buffer) => Empty.decode(value),
},
} as const;
export interface AdminServiceServer extends UntypedServiceImplementation {
submitOnChainEvent: handleUnaryCall<OnChainEvent, OnChainEvent>;
submitUserNameProof: handleUnaryCall<UserNameProof, UserNameProof>;
uploadSnapshot: handleUnaryCall<UploadSnapshotRequest, Empty>;
retryOnchainEvents: handleUnaryCall<RetryOnchainEventsRequest, Empty>;
}
export interface AdminServiceClient extends Client {
submitOnChainEvent(
request: OnChainEvent,
callback: (error: ServiceError | null, response: OnChainEvent) => void,
): ClientUnaryCall;
submitOnChainEvent(
request: OnChainEvent,
metadata: Metadata,
callback: (error: ServiceError | null, response: OnChainEvent) => void,
): ClientUnaryCall;
submitOnChainEvent(
request: OnChainEvent,
metadata: Metadata,
options: Partial<CallOptions>,
callback: (error: ServiceError | null, response: OnChainEvent) => void,
): ClientUnaryCall;
submitUserNameProof(
request: UserNameProof,
callback: (error: ServiceError | null, response: UserNameProof) => void,
): ClientUnaryCall;
submitUserNameProof(
request: UserNameProof,
metadata: Metadata,
callback: (error: ServiceError | null, response: UserNameProof) => void,
): ClientUnaryCall;
submitUserNameProof(
request: UserNameProof,
metadata: Metadata,
options: Partial<CallOptions>,
callback: (error: ServiceError | null, response: UserNameProof) => void,
): ClientUnaryCall;
uploadSnapshot(
request: UploadSnapshotRequest,
callback: (error: ServiceError | null, response: Empty) => void,
): ClientUnaryCall;
uploadSnapshot(
request: UploadSnapshotRequest,
metadata: Metadata,
callback: (error: ServiceError | null, response: Empty) => void,
): ClientUnaryCall;
uploadSnapshot(
request: UploadSnapshotRequest,
metadata: Metadata,
options: Partial<CallOptions>,
callback: (error: ServiceError | null, response: Empty) => void,
): ClientUnaryCall;
retryOnchainEvents(
request: RetryOnchainEventsRequest,
callback: (error: ServiceError | null, response: Empty) => void,
): ClientUnaryCall;
retryOnchainEvents(
request: RetryOnchainEventsRequest,
metadata: Metadata,
callback: (error: ServiceError | null, response: Empty) => void,
): ClientUnaryCall;
retryOnchainEvents(
request: RetryOnchainEventsRequest,
metadata: Metadata,
options: Partial<CallOptions>,
callback: (error: ServiceError | null, response: Empty) => void,
): ClientUnaryCall;
}
export const AdminServiceClient = makeGenericClientConstructor(AdminServiceService, "AdminService") as unknown as {
new (address: string, credentials: ChannelCredentials, options?: Partial<ClientOptions>): AdminServiceClient;
service: typeof AdminServiceService;
};
declare var self: any | undefined;
declare var window: any | undefined;
declare var global: any | undefined;
var tsProtoGlobalThis: any = (() => {
if (typeof globalThis !== "undefined") {
return globalThis;
}
if (typeof self !== "undefined") {
return self;
}
if (typeof window !== "undefined") {
return window;
}
if (typeof global !== "undefined") {
return global;
}
throw "Unable to locate global object";
})();
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
type DeepPartial<T> = T extends Builtin ? T
: T extends Array<infer U> ? Array<DeepPartial<U>> : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
: T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> }
: Partial<T>;
type KeysOfUnion<T> = T extends T ? keyof T : never;
type Exact<P, I extends P> = P extends Builtin ? P
: P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never };
function longToNumber(long: Long): number {
if (long.gt(Number.MAX_SAFE_INTEGER)) {
throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER");
}
return long.toNumber();
}
if (_m0.util.Long !== Long) {
_m0.util.Long = Long as any;
_m0.configure();
}
function isSet(value: any): boolean {
return value !== null && value !== undefined;
}

File diff suppressed because it is too large Load Diff

View File

@@ -23,6 +23,7 @@ export enum HubEventType {
*/
MERGE_ON_CHAIN_EVENT = 9,
MERGE_FAILURE = 10,
BLOCK_CONFIRMED = 11,
}
export function hubEventTypeFromJSON(object: any): HubEventType {
@@ -48,6 +49,9 @@ export function hubEventTypeFromJSON(object: any): HubEventType {
case 10:
case "HUB_EVENT_TYPE_MERGE_FAILURE":
return HubEventType.MERGE_FAILURE;
case 11:
case "HUB_EVENT_TYPE_BLOCK_CONFIRMED":
return HubEventType.BLOCK_CONFIRMED;
default:
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HubEventType");
}
@@ -69,6 +73,8 @@ export function hubEventTypeToJSON(object: HubEventType): string {
return "HUB_EVENT_TYPE_MERGE_ON_CHAIN_EVENT";
case HubEventType.MERGE_FAILURE:
return "HUB_EVENT_TYPE_MERGE_FAILURE";
case HubEventType.BLOCK_CONFIRMED:
return "HUB_EVENT_TYPE_BLOCK_CONFIRMED";
default:
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HubEventType");
}
@@ -79,20 +85,28 @@ export interface MergeMessageBody {
deletedMessages: Message[];
}
export interface PruneMessageBody {
message: Message | undefined;
}
export interface MergeFailureBody {
message: Message | undefined;
code: string;
reason: string;
}
export interface PruneMessageBody {
message: Message | undefined;
}
export interface RevokeMessageBody {
message: Message | undefined;
}
export interface BlockConfirmedBody {
blockNumber: number;
shardIndex: number;
timestamp: number;
blockHash: Uint8Array;
totalEvents: number;
}
export interface MergeOnChainEventBody {
onChainEvent: OnChainEvent | undefined;
}
@@ -127,6 +141,7 @@ export interface HubEvent {
*/
mergeOnChainEventBody?: MergeOnChainEventBody | undefined;
mergeFailure?: MergeFailureBody | undefined;
blockConfirmedBody?: BlockConfirmedBody | undefined;
blockNumber: number;
shardIndex: number;
timestamp: number;
@@ -211,64 +226,6 @@ export const MergeMessageBody = {
},
};
function createBasePruneMessageBody(): PruneMessageBody {
return { message: undefined };
}
export const PruneMessageBody = {
encode(message: PruneMessageBody, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.message !== undefined) {
Message.encode(message.message, writer.uint32(10).fork()).ldelim();
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): PruneMessageBody {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBasePruneMessageBody();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 10) {
break;
}
message.message = Message.decode(reader, reader.uint32());
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): PruneMessageBody {
return { message: isSet(object.message) ? Message.fromJSON(object.message) : undefined };
},
toJSON(message: PruneMessageBody): unknown {
const obj: any = {};
message.message !== undefined && (obj.message = message.message ? Message.toJSON(message.message) : undefined);
return obj;
},
create<I extends Exact<DeepPartial<PruneMessageBody>, I>>(base?: I): PruneMessageBody {
return PruneMessageBody.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<PruneMessageBody>, I>>(object: I): PruneMessageBody {
const message = createBasePruneMessageBody();
message.message = (object.message !== undefined && object.message !== null)
? Message.fromPartial(object.message)
: undefined;
return message;
},
};
function createBaseMergeFailureBody(): MergeFailureBody {
return { message: undefined, code: "", reason: "" };
}
@@ -355,6 +312,64 @@ export const MergeFailureBody = {
},
};
function createBasePruneMessageBody(): PruneMessageBody {
return { message: undefined };
}
export const PruneMessageBody = {
encode(message: PruneMessageBody, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.message !== undefined) {
Message.encode(message.message, writer.uint32(10).fork()).ldelim();
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): PruneMessageBody {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBasePruneMessageBody();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 10) {
break;
}
message.message = Message.decode(reader, reader.uint32());
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): PruneMessageBody {
return { message: isSet(object.message) ? Message.fromJSON(object.message) : undefined };
},
toJSON(message: PruneMessageBody): unknown {
const obj: any = {};
message.message !== undefined && (obj.message = message.message ? Message.toJSON(message.message) : undefined);
return obj;
},
create<I extends Exact<DeepPartial<PruneMessageBody>, I>>(base?: I): PruneMessageBody {
return PruneMessageBody.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<PruneMessageBody>, I>>(object: I): PruneMessageBody {
const message = createBasePruneMessageBody();
message.message = (object.message !== undefined && object.message !== null)
? Message.fromPartial(object.message)
: undefined;
return message;
},
};
function createBaseRevokeMessageBody(): RevokeMessageBody {
return { message: undefined };
}
@@ -413,6 +428,117 @@ export const RevokeMessageBody = {
},
};
function createBaseBlockConfirmedBody(): BlockConfirmedBody {
return { blockNumber: 0, shardIndex: 0, timestamp: 0, blockHash: new Uint8Array(), totalEvents: 0 };
}
export const BlockConfirmedBody = {
encode(message: BlockConfirmedBody, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.blockNumber !== 0) {
writer.uint32(8).uint64(message.blockNumber);
}
if (message.shardIndex !== 0) {
writer.uint32(16).uint32(message.shardIndex);
}
if (message.timestamp !== 0) {
writer.uint32(24).uint64(message.timestamp);
}
if (message.blockHash.length !== 0) {
writer.uint32(34).bytes(message.blockHash);
}
if (message.totalEvents !== 0) {
writer.uint32(40).uint64(message.totalEvents);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): BlockConfirmedBody {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseBlockConfirmedBody();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 8) {
break;
}
message.blockNumber = longToNumber(reader.uint64() as Long);
continue;
case 2:
if (tag != 16) {
break;
}
message.shardIndex = reader.uint32();
continue;
case 3:
if (tag != 24) {
break;
}
message.timestamp = longToNumber(reader.uint64() as Long);
continue;
case 4:
if (tag != 34) {
break;
}
message.blockHash = reader.bytes();
continue;
case 5:
if (tag != 40) {
break;
}
message.totalEvents = longToNumber(reader.uint64() as Long);
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): BlockConfirmedBody {
return {
blockNumber: isSet(object.blockNumber) ? Number(object.blockNumber) : 0,
shardIndex: isSet(object.shardIndex) ? Number(object.shardIndex) : 0,
timestamp: isSet(object.timestamp) ? Number(object.timestamp) : 0,
blockHash: isSet(object.blockHash) ? bytesFromBase64(object.blockHash) : new Uint8Array(),
totalEvents: isSet(object.totalEvents) ? Number(object.totalEvents) : 0,
};
},
toJSON(message: BlockConfirmedBody): unknown {
const obj: any = {};
message.blockNumber !== undefined && (obj.blockNumber = Math.round(message.blockNumber));
message.shardIndex !== undefined && (obj.shardIndex = Math.round(message.shardIndex));
message.timestamp !== undefined && (obj.timestamp = Math.round(message.timestamp));
message.blockHash !== undefined &&
(obj.blockHash = base64FromBytes(message.blockHash !== undefined ? message.blockHash : new Uint8Array()));
message.totalEvents !== undefined && (obj.totalEvents = Math.round(message.totalEvents));
return obj;
},
create<I extends Exact<DeepPartial<BlockConfirmedBody>, I>>(base?: I): BlockConfirmedBody {
return BlockConfirmedBody.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<BlockConfirmedBody>, I>>(object: I): BlockConfirmedBody {
const message = createBaseBlockConfirmedBody();
message.blockNumber = object.blockNumber ?? 0;
message.shardIndex = object.shardIndex ?? 0;
message.timestamp = object.timestamp ?? 0;
message.blockHash = object.blockHash ?? new Uint8Array();
message.totalEvents = object.totalEvents ?? 0;
return message;
},
};
function createBaseMergeOnChainEventBody(): MergeOnChainEventBody {
return { onChainEvent: undefined };
}
@@ -607,6 +733,7 @@ function createBaseHubEvent(): HubEvent {
mergeUsernameProofBody: undefined,
mergeOnChainEventBody: undefined,
mergeFailure: undefined,
blockConfirmedBody: undefined,
blockNumber: 0,
shardIndex: 0,
timestamp: 0,
@@ -639,6 +766,9 @@ export const HubEvent = {
if (message.mergeFailure !== undefined) {
MergeFailureBody.encode(message.mergeFailure, writer.uint32(106).fork()).ldelim();
}
if (message.blockConfirmedBody !== undefined) {
BlockConfirmedBody.encode(message.blockConfirmedBody, writer.uint32(130).fork()).ldelim();
}
if (message.blockNumber !== 0) {
writer.uint32(96).uint64(message.blockNumber);
}
@@ -714,6 +844,13 @@ export const HubEvent = {
message.mergeFailure = MergeFailureBody.decode(reader, reader.uint32());
continue;
case 16:
if (tag != 130) {
break;
}
message.blockConfirmedBody = BlockConfirmedBody.decode(reader, reader.uint32());
continue;
case 12:
if (tag != 96) {
break;
@@ -760,6 +897,9 @@ export const HubEvent = {
? MergeOnChainEventBody.fromJSON(object.mergeOnChainEventBody)
: undefined,
mergeFailure: isSet(object.mergeFailure) ? MergeFailureBody.fromJSON(object.mergeFailure) : undefined,
blockConfirmedBody: isSet(object.blockConfirmedBody)
? BlockConfirmedBody.fromJSON(object.blockConfirmedBody)
: undefined,
blockNumber: isSet(object.blockNumber) ? Number(object.blockNumber) : 0,
shardIndex: isSet(object.shardIndex) ? Number(object.shardIndex) : 0,
timestamp: isSet(object.timestamp) ? Number(object.timestamp) : 0,
@@ -785,6 +925,9 @@ export const HubEvent = {
: undefined);
message.mergeFailure !== undefined &&
(obj.mergeFailure = message.mergeFailure ? MergeFailureBody.toJSON(message.mergeFailure) : undefined);
message.blockConfirmedBody !== undefined && (obj.blockConfirmedBody = message.blockConfirmedBody
? BlockConfirmedBody.toJSON(message.blockConfirmedBody)
: undefined);
message.blockNumber !== undefined && (obj.blockNumber = Math.round(message.blockNumber));
message.shardIndex !== undefined && (obj.shardIndex = Math.round(message.shardIndex));
message.timestamp !== undefined && (obj.timestamp = Math.round(message.timestamp));
@@ -819,6 +962,9 @@ export const HubEvent = {
message.mergeFailure = (object.mergeFailure !== undefined && object.mergeFailure !== null)
? MergeFailureBody.fromPartial(object.mergeFailure)
: undefined;
message.blockConfirmedBody = (object.blockConfirmedBody !== undefined && object.blockConfirmedBody !== null)
? BlockConfirmedBody.fromPartial(object.blockConfirmedBody)
: undefined;
message.blockNumber = object.blockNumber ?? 0;
message.shardIndex = object.shardIndex ?? 0;
message.timestamp = object.timestamp ?? 0;
@@ -845,6 +991,31 @@ var tsProtoGlobalThis: any = (() => {
throw "Unable to locate global object";
})();
function bytesFromBase64(b64: string): Uint8Array {
if (tsProtoGlobalThis.Buffer) {
return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
} else {
const bin = tsProtoGlobalThis.atob(b64);
const arr = new Uint8Array(bin.length);
for (let i = 0; i < bin.length; ++i) {
arr[i] = bin.charCodeAt(i);
}
return arr;
}
}
function base64FromBytes(arr: Uint8Array): string {
if (tsProtoGlobalThis.Buffer) {
return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
} else {
const bin: string[] = [];
arr.forEach((byte) => {
bin.push(String.fromCharCode(byte));
});
return tsProtoGlobalThis.btoa(bin.join(""));
}
}
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
type DeepPartial<T> = T extends Builtin ? T

View File

@@ -244,7 +244,7 @@ export enum UserDataType {
USERNAME = 6,
/** LOCATION - Current location for the user */
LOCATION = 7,
/** TWITTER - Username of user on x */
/** TWITTER - Username of user on twitter */
TWITTER = 8,
/** GITHUB - Username of user on github */
GITHUB = 9,

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -22,7 +22,7 @@
"scripts": {
"build": "tsup --config tsup.config.ts",
"clean": "rimraf ./dist",
"protoc": "docker run --rm --user $(id -u):$(id -g) -v $(pwd)/../../node_modules:/node_modules -v $(pwd)/../../protobufs/schemas:/defs -v $(pwd)/src/generated:/out namely/protoc:1.50_1 --plugin=/node_modules/ts-proto/protoc-gen-ts_proto --ts_proto_out=/out --ts_proto_opt=esModuleInterop=true,exportCommonSymbols=false,outputClientImpl=grpc-web,useOptionals=none,unrecognizedEnum=false,removeEnumPrefix=true,lowerCaseServiceMethods=true --proto_path=/defs /defs/rpc.proto",
"protoc": "LIBRARY=hub-web ../../generate-protos.sh",
"lint": "biome format src/ examples/ --write && biome check src/ examples/ --apply",
"lint:ci": "biome ci src/ examples/",
"prepublishOnly": "yarn run build"

View File

@@ -1,11 +1,6 @@
import {
HubService,
HubServiceClientImpl,
GrpcWebError,
GrpcWebImpl,
AdminService,
AdminServiceClientImpl,
} from "./generated/rpc";
import { HubService, HubServiceClientImpl, GrpcWebError, GrpcWebImpl } from "./generated/rpc";
import { AdminService, AdminServiceClientImpl } from "./generated/admin_rpc";
import grpcWeb from "@improbable-eng/grpc-web";
import { err, ok } from "neverthrow";

View File

@@ -0,0 +1,533 @@
/* eslint-disable */
import { grpc } from "@improbable-eng/grpc-web";
import { BrowserHeaders } from "browser-headers";
import Long from "long";
import _m0 from "protobufjs/minimal";
import { OnChainEvent } from "./onchain_event";
import { UserNameProof } from "./username_proof";
export interface Empty {
}
export interface RetryBlockNumberRange {
startBlockNumber: number;
stopBlockNumber: number;
}
export interface RetryOnchainEventsRequest {
fid?: number | undefined;
blockRange?: RetryBlockNumberRange | undefined;
}
export interface UploadSnapshotRequest {
shardIndexes: number[];
}
function createBaseEmpty(): Empty {
return {};
}
export const Empty = {
encode(_: Empty, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): Empty {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseEmpty();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(_: any): Empty {
return {};
},
toJSON(_: Empty): unknown {
const obj: any = {};
return obj;
},
create<I extends Exact<DeepPartial<Empty>, I>>(base?: I): Empty {
return Empty.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<Empty>, I>>(_: I): Empty {
const message = createBaseEmpty();
return message;
},
};
function createBaseRetryBlockNumberRange(): RetryBlockNumberRange {
return { startBlockNumber: 0, stopBlockNumber: 0 };
}
export const RetryBlockNumberRange = {
encode(message: RetryBlockNumberRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.startBlockNumber !== 0) {
writer.uint32(8).uint64(message.startBlockNumber);
}
if (message.stopBlockNumber !== 0) {
writer.uint32(16).uint64(message.stopBlockNumber);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): RetryBlockNumberRange {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseRetryBlockNumberRange();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 8) {
break;
}
message.startBlockNumber = longToNumber(reader.uint64() as Long);
continue;
case 2:
if (tag != 16) {
break;
}
message.stopBlockNumber = longToNumber(reader.uint64() as Long);
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): RetryBlockNumberRange {
return {
startBlockNumber: isSet(object.startBlockNumber) ? Number(object.startBlockNumber) : 0,
stopBlockNumber: isSet(object.stopBlockNumber) ? Number(object.stopBlockNumber) : 0,
};
},
toJSON(message: RetryBlockNumberRange): unknown {
const obj: any = {};
message.startBlockNumber !== undefined && (obj.startBlockNumber = Math.round(message.startBlockNumber));
message.stopBlockNumber !== undefined && (obj.stopBlockNumber = Math.round(message.stopBlockNumber));
return obj;
},
create<I extends Exact<DeepPartial<RetryBlockNumberRange>, I>>(base?: I): RetryBlockNumberRange {
return RetryBlockNumberRange.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<RetryBlockNumberRange>, I>>(object: I): RetryBlockNumberRange {
const message = createBaseRetryBlockNumberRange();
message.startBlockNumber = object.startBlockNumber ?? 0;
message.stopBlockNumber = object.stopBlockNumber ?? 0;
return message;
},
};
function createBaseRetryOnchainEventsRequest(): RetryOnchainEventsRequest {
return { fid: undefined, blockRange: undefined };
}
export const RetryOnchainEventsRequest = {
encode(message: RetryOnchainEventsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.fid !== undefined) {
writer.uint32(8).uint64(message.fid);
}
if (message.blockRange !== undefined) {
RetryBlockNumberRange.encode(message.blockRange, writer.uint32(18).fork()).ldelim();
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): RetryOnchainEventsRequest {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseRetryOnchainEventsRequest();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 8) {
break;
}
message.fid = longToNumber(reader.uint64() as Long);
continue;
case 2:
if (tag != 18) {
break;
}
message.blockRange = RetryBlockNumberRange.decode(reader, reader.uint32());
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): RetryOnchainEventsRequest {
return {
fid: isSet(object.fid) ? Number(object.fid) : undefined,
blockRange: isSet(object.blockRange) ? RetryBlockNumberRange.fromJSON(object.blockRange) : undefined,
};
},
toJSON(message: RetryOnchainEventsRequest): unknown {
const obj: any = {};
message.fid !== undefined && (obj.fid = Math.round(message.fid));
message.blockRange !== undefined &&
(obj.blockRange = message.blockRange ? RetryBlockNumberRange.toJSON(message.blockRange) : undefined);
return obj;
},
create<I extends Exact<DeepPartial<RetryOnchainEventsRequest>, I>>(base?: I): RetryOnchainEventsRequest {
return RetryOnchainEventsRequest.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<RetryOnchainEventsRequest>, I>>(object: I): RetryOnchainEventsRequest {
const message = createBaseRetryOnchainEventsRequest();
message.fid = object.fid ?? undefined;
message.blockRange = (object.blockRange !== undefined && object.blockRange !== null)
? RetryBlockNumberRange.fromPartial(object.blockRange)
: undefined;
return message;
},
};
function createBaseUploadSnapshotRequest(): UploadSnapshotRequest {
return { shardIndexes: [] };
}
export const UploadSnapshotRequest = {
encode(message: UploadSnapshotRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
writer.uint32(10).fork();
for (const v of message.shardIndexes) {
writer.uint32(v);
}
writer.ldelim();
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): UploadSnapshotRequest {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseUploadSnapshotRequest();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag == 8) {
message.shardIndexes.push(reader.uint32());
continue;
}
if (tag == 10) {
const end2 = reader.uint32() + reader.pos;
while (reader.pos < end2) {
message.shardIndexes.push(reader.uint32());
}
continue;
}
break;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): UploadSnapshotRequest {
return { shardIndexes: Array.isArray(object?.shardIndexes) ? object.shardIndexes.map((e: any) => Number(e)) : [] };
},
toJSON(message: UploadSnapshotRequest): unknown {
const obj: any = {};
if (message.shardIndexes) {
obj.shardIndexes = message.shardIndexes.map((e) => Math.round(e));
} else {
obj.shardIndexes = [];
}
return obj;
},
create<I extends Exact<DeepPartial<UploadSnapshotRequest>, I>>(base?: I): UploadSnapshotRequest {
return UploadSnapshotRequest.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<UploadSnapshotRequest>, I>>(object: I): UploadSnapshotRequest {
const message = createBaseUploadSnapshotRequest();
message.shardIndexes = object.shardIndexes?.map((e) => e) || [];
return message;
},
};
export interface AdminService {
submitOnChainEvent(request: DeepPartial<OnChainEvent>, metadata?: grpc.Metadata): Promise<OnChainEvent>;
submitUserNameProof(request: DeepPartial<UserNameProof>, metadata?: grpc.Metadata): Promise<UserNameProof>;
uploadSnapshot(request: DeepPartial<UploadSnapshotRequest>, metadata?: grpc.Metadata): Promise<Empty>;
retryOnchainEvents(request: DeepPartial<RetryOnchainEventsRequest>, metadata?: grpc.Metadata): Promise<Empty>;
}
export class AdminServiceClientImpl implements AdminService {
private readonly rpc: Rpc;
constructor(rpc: Rpc) {
this.rpc = rpc;
this.submitOnChainEvent = this.submitOnChainEvent.bind(this);
this.submitUserNameProof = this.submitUserNameProof.bind(this);
this.uploadSnapshot = this.uploadSnapshot.bind(this);
this.retryOnchainEvents = this.retryOnchainEvents.bind(this);
}
submitOnChainEvent(request: DeepPartial<OnChainEvent>, metadata?: grpc.Metadata): Promise<OnChainEvent> {
return this.rpc.unary(AdminServiceSubmitOnChainEventDesc, OnChainEvent.fromPartial(request), metadata);
}
submitUserNameProof(request: DeepPartial<UserNameProof>, metadata?: grpc.Metadata): Promise<UserNameProof> {
return this.rpc.unary(AdminServiceSubmitUserNameProofDesc, UserNameProof.fromPartial(request), metadata);
}
uploadSnapshot(request: DeepPartial<UploadSnapshotRequest>, metadata?: grpc.Metadata): Promise<Empty> {
return this.rpc.unary(AdminServiceUploadSnapshotDesc, UploadSnapshotRequest.fromPartial(request), metadata);
}
retryOnchainEvents(request: DeepPartial<RetryOnchainEventsRequest>, metadata?: grpc.Metadata): Promise<Empty> {
return this.rpc.unary(AdminServiceRetryOnchainEventsDesc, RetryOnchainEventsRequest.fromPartial(request), metadata);
}
}
export const AdminServiceDesc = { serviceName: "AdminService" };
export const AdminServiceSubmitOnChainEventDesc: UnaryMethodDefinitionish = {
methodName: "SubmitOnChainEvent",
service: AdminServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return OnChainEvent.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = OnChainEvent.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const AdminServiceSubmitUserNameProofDesc: UnaryMethodDefinitionish = {
methodName: "SubmitUserNameProof",
service: AdminServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return UserNameProof.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = UserNameProof.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const AdminServiceUploadSnapshotDesc: UnaryMethodDefinitionish = {
methodName: "UploadSnapshot",
service: AdminServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return UploadSnapshotRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = Empty.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const AdminServiceRetryOnchainEventsDesc: UnaryMethodDefinitionish = {
methodName: "RetryOnchainEvents",
service: AdminServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return RetryOnchainEventsRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = Empty.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
interface UnaryMethodDefinitionishR extends grpc.UnaryMethodDefinition<any, any> {
requestStream: any;
responseStream: any;
}
type UnaryMethodDefinitionish = UnaryMethodDefinitionishR;
interface Rpc {
unary<T extends UnaryMethodDefinitionish>(
methodDesc: T,
request: any,
metadata: grpc.Metadata | undefined,
): Promise<any>;
}
export class GrpcWebImpl {
private host: string;
private options: {
transport?: grpc.TransportFactory;
debug?: boolean;
metadata?: grpc.Metadata;
upStreamRetryCodes?: number[];
};
constructor(
host: string,
options: {
transport?: grpc.TransportFactory;
debug?: boolean;
metadata?: grpc.Metadata;
upStreamRetryCodes?: number[];
},
) {
this.host = host;
this.options = options;
}
unary<T extends UnaryMethodDefinitionish>(
methodDesc: T,
_request: any,
metadata: grpc.Metadata | undefined,
): Promise<any> {
const request = { ..._request, ...methodDesc.requestType };
const maybeCombinedMetadata = metadata && this.options.metadata
? new BrowserHeaders({ ...this.options?.metadata.headersMap, ...metadata?.headersMap })
: metadata || this.options.metadata;
return new Promise((resolve, reject) => {
grpc.unary(methodDesc, {
request,
host: this.host,
metadata: maybeCombinedMetadata,
transport: this.options.transport,
debug: this.options.debug,
onEnd: function (response) {
if (response.status === grpc.Code.OK) {
resolve(response.message!.toObject());
} else {
const err = new GrpcWebError(response.statusMessage, response.status, response.trailers);
reject(err);
}
},
});
});
}
}
declare var self: any | undefined;
declare var window: any | undefined;
declare var global: any | undefined;
var tsProtoGlobalThis: any = (() => {
if (typeof globalThis !== "undefined") {
return globalThis;
}
if (typeof self !== "undefined") {
return self;
}
if (typeof window !== "undefined") {
return window;
}
if (typeof global !== "undefined") {
return global;
}
throw "Unable to locate global object";
})();
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
type DeepPartial<T> = T extends Builtin ? T
: T extends Array<infer U> ? Array<DeepPartial<U>> : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
: T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> }
: Partial<T>;
type KeysOfUnion<T> = T extends T ? keyof T : never;
type Exact<P, I extends P> = P extends Builtin ? P
: P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never };
function longToNumber(long: Long): number {
if (long.gt(Number.MAX_SAFE_INTEGER)) {
throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER");
}
return long.toNumber();
}
if (_m0.util.Long !== Long) {
_m0.util.Long = Long as any;
_m0.configure();
}
function isSet(value: any): boolean {
return value !== null && value !== undefined;
}
export class GrpcWebError extends tsProtoGlobalThis.Error {
constructor(message: string, public code: grpc.Code, public metadata: grpc.Metadata) {
super(message);
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -23,6 +23,7 @@ export enum HubEventType {
*/
MERGE_ON_CHAIN_EVENT = 9,
MERGE_FAILURE = 10,
BLOCK_CONFIRMED = 11,
}
export function hubEventTypeFromJSON(object: any): HubEventType {
@@ -48,6 +49,9 @@ export function hubEventTypeFromJSON(object: any): HubEventType {
case 10:
case "HUB_EVENT_TYPE_MERGE_FAILURE":
return HubEventType.MERGE_FAILURE;
case 11:
case "HUB_EVENT_TYPE_BLOCK_CONFIRMED":
return HubEventType.BLOCK_CONFIRMED;
default:
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HubEventType");
}
@@ -69,6 +73,8 @@ export function hubEventTypeToJSON(object: HubEventType): string {
return "HUB_EVENT_TYPE_MERGE_ON_CHAIN_EVENT";
case HubEventType.MERGE_FAILURE:
return "HUB_EVENT_TYPE_MERGE_FAILURE";
case HubEventType.BLOCK_CONFIRMED:
return "HUB_EVENT_TYPE_BLOCK_CONFIRMED";
default:
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HubEventType");
}
@@ -79,20 +85,28 @@ export interface MergeMessageBody {
deletedMessages: Message[];
}
export interface PruneMessageBody {
message: Message | undefined;
}
export interface MergeFailureBody {
message: Message | undefined;
code: string;
reason: string;
}
export interface PruneMessageBody {
message: Message | undefined;
}
export interface RevokeMessageBody {
message: Message | undefined;
}
export interface BlockConfirmedBody {
blockNumber: number;
shardIndex: number;
timestamp: number;
blockHash: Uint8Array;
totalEvents: number;
}
export interface MergeOnChainEventBody {
onChainEvent: OnChainEvent | undefined;
}
@@ -127,6 +141,7 @@ export interface HubEvent {
*/
mergeOnChainEventBody?: MergeOnChainEventBody | undefined;
mergeFailure?: MergeFailureBody | undefined;
blockConfirmedBody?: BlockConfirmedBody | undefined;
blockNumber: number;
shardIndex: number;
timestamp: number;
@@ -211,64 +226,6 @@ export const MergeMessageBody = {
},
};
function createBasePruneMessageBody(): PruneMessageBody {
return { message: undefined };
}
export const PruneMessageBody = {
encode(message: PruneMessageBody, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.message !== undefined) {
Message.encode(message.message, writer.uint32(10).fork()).ldelim();
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): PruneMessageBody {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBasePruneMessageBody();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 10) {
break;
}
message.message = Message.decode(reader, reader.uint32());
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): PruneMessageBody {
return { message: isSet(object.message) ? Message.fromJSON(object.message) : undefined };
},
toJSON(message: PruneMessageBody): unknown {
const obj: any = {};
message.message !== undefined && (obj.message = message.message ? Message.toJSON(message.message) : undefined);
return obj;
},
create<I extends Exact<DeepPartial<PruneMessageBody>, I>>(base?: I): PruneMessageBody {
return PruneMessageBody.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<PruneMessageBody>, I>>(object: I): PruneMessageBody {
const message = createBasePruneMessageBody();
message.message = (object.message !== undefined && object.message !== null)
? Message.fromPartial(object.message)
: undefined;
return message;
},
};
function createBaseMergeFailureBody(): MergeFailureBody {
return { message: undefined, code: "", reason: "" };
}
@@ -355,6 +312,64 @@ export const MergeFailureBody = {
},
};
function createBasePruneMessageBody(): PruneMessageBody {
return { message: undefined };
}
export const PruneMessageBody = {
encode(message: PruneMessageBody, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.message !== undefined) {
Message.encode(message.message, writer.uint32(10).fork()).ldelim();
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): PruneMessageBody {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBasePruneMessageBody();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 10) {
break;
}
message.message = Message.decode(reader, reader.uint32());
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): PruneMessageBody {
return { message: isSet(object.message) ? Message.fromJSON(object.message) : undefined };
},
toJSON(message: PruneMessageBody): unknown {
const obj: any = {};
message.message !== undefined && (obj.message = message.message ? Message.toJSON(message.message) : undefined);
return obj;
},
create<I extends Exact<DeepPartial<PruneMessageBody>, I>>(base?: I): PruneMessageBody {
return PruneMessageBody.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<PruneMessageBody>, I>>(object: I): PruneMessageBody {
const message = createBasePruneMessageBody();
message.message = (object.message !== undefined && object.message !== null)
? Message.fromPartial(object.message)
: undefined;
return message;
},
};
function createBaseRevokeMessageBody(): RevokeMessageBody {
return { message: undefined };
}
@@ -413,6 +428,117 @@ export const RevokeMessageBody = {
},
};
function createBaseBlockConfirmedBody(): BlockConfirmedBody {
return { blockNumber: 0, shardIndex: 0, timestamp: 0, blockHash: new Uint8Array(), totalEvents: 0 };
}
export const BlockConfirmedBody = {
encode(message: BlockConfirmedBody, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.blockNumber !== 0) {
writer.uint32(8).uint64(message.blockNumber);
}
if (message.shardIndex !== 0) {
writer.uint32(16).uint32(message.shardIndex);
}
if (message.timestamp !== 0) {
writer.uint32(24).uint64(message.timestamp);
}
if (message.blockHash.length !== 0) {
writer.uint32(34).bytes(message.blockHash);
}
if (message.totalEvents !== 0) {
writer.uint32(40).uint64(message.totalEvents);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): BlockConfirmedBody {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseBlockConfirmedBody();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag != 8) {
break;
}
message.blockNumber = longToNumber(reader.uint64() as Long);
continue;
case 2:
if (tag != 16) {
break;
}
message.shardIndex = reader.uint32();
continue;
case 3:
if (tag != 24) {
break;
}
message.timestamp = longToNumber(reader.uint64() as Long);
continue;
case 4:
if (tag != 34) {
break;
}
message.blockHash = reader.bytes();
continue;
case 5:
if (tag != 40) {
break;
}
message.totalEvents = longToNumber(reader.uint64() as Long);
continue;
}
if ((tag & 7) == 4 || tag == 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): BlockConfirmedBody {
return {
blockNumber: isSet(object.blockNumber) ? Number(object.blockNumber) : 0,
shardIndex: isSet(object.shardIndex) ? Number(object.shardIndex) : 0,
timestamp: isSet(object.timestamp) ? Number(object.timestamp) : 0,
blockHash: isSet(object.blockHash) ? bytesFromBase64(object.blockHash) : new Uint8Array(),
totalEvents: isSet(object.totalEvents) ? Number(object.totalEvents) : 0,
};
},
toJSON(message: BlockConfirmedBody): unknown {
const obj: any = {};
message.blockNumber !== undefined && (obj.blockNumber = Math.round(message.blockNumber));
message.shardIndex !== undefined && (obj.shardIndex = Math.round(message.shardIndex));
message.timestamp !== undefined && (obj.timestamp = Math.round(message.timestamp));
message.blockHash !== undefined &&
(obj.blockHash = base64FromBytes(message.blockHash !== undefined ? message.blockHash : new Uint8Array()));
message.totalEvents !== undefined && (obj.totalEvents = Math.round(message.totalEvents));
return obj;
},
create<I extends Exact<DeepPartial<BlockConfirmedBody>, I>>(base?: I): BlockConfirmedBody {
return BlockConfirmedBody.fromPartial(base ?? {});
},
fromPartial<I extends Exact<DeepPartial<BlockConfirmedBody>, I>>(object: I): BlockConfirmedBody {
const message = createBaseBlockConfirmedBody();
message.blockNumber = object.blockNumber ?? 0;
message.shardIndex = object.shardIndex ?? 0;
message.timestamp = object.timestamp ?? 0;
message.blockHash = object.blockHash ?? new Uint8Array();
message.totalEvents = object.totalEvents ?? 0;
return message;
},
};
function createBaseMergeOnChainEventBody(): MergeOnChainEventBody {
return { onChainEvent: undefined };
}
@@ -607,6 +733,7 @@ function createBaseHubEvent(): HubEvent {
mergeUsernameProofBody: undefined,
mergeOnChainEventBody: undefined,
mergeFailure: undefined,
blockConfirmedBody: undefined,
blockNumber: 0,
shardIndex: 0,
timestamp: 0,
@@ -639,6 +766,9 @@ export const HubEvent = {
if (message.mergeFailure !== undefined) {
MergeFailureBody.encode(message.mergeFailure, writer.uint32(106).fork()).ldelim();
}
if (message.blockConfirmedBody !== undefined) {
BlockConfirmedBody.encode(message.blockConfirmedBody, writer.uint32(130).fork()).ldelim();
}
if (message.blockNumber !== 0) {
writer.uint32(96).uint64(message.blockNumber);
}
@@ -714,6 +844,13 @@ export const HubEvent = {
message.mergeFailure = MergeFailureBody.decode(reader, reader.uint32());
continue;
case 16:
if (tag != 130) {
break;
}
message.blockConfirmedBody = BlockConfirmedBody.decode(reader, reader.uint32());
continue;
case 12:
if (tag != 96) {
break;
@@ -760,6 +897,9 @@ export const HubEvent = {
? MergeOnChainEventBody.fromJSON(object.mergeOnChainEventBody)
: undefined,
mergeFailure: isSet(object.mergeFailure) ? MergeFailureBody.fromJSON(object.mergeFailure) : undefined,
blockConfirmedBody: isSet(object.blockConfirmedBody)
? BlockConfirmedBody.fromJSON(object.blockConfirmedBody)
: undefined,
blockNumber: isSet(object.blockNumber) ? Number(object.blockNumber) : 0,
shardIndex: isSet(object.shardIndex) ? Number(object.shardIndex) : 0,
timestamp: isSet(object.timestamp) ? Number(object.timestamp) : 0,
@@ -785,6 +925,9 @@ export const HubEvent = {
: undefined);
message.mergeFailure !== undefined &&
(obj.mergeFailure = message.mergeFailure ? MergeFailureBody.toJSON(message.mergeFailure) : undefined);
message.blockConfirmedBody !== undefined && (obj.blockConfirmedBody = message.blockConfirmedBody
? BlockConfirmedBody.toJSON(message.blockConfirmedBody)
: undefined);
message.blockNumber !== undefined && (obj.blockNumber = Math.round(message.blockNumber));
message.shardIndex !== undefined && (obj.shardIndex = Math.round(message.shardIndex));
message.timestamp !== undefined && (obj.timestamp = Math.round(message.timestamp));
@@ -819,6 +962,9 @@ export const HubEvent = {
message.mergeFailure = (object.mergeFailure !== undefined && object.mergeFailure !== null)
? MergeFailureBody.fromPartial(object.mergeFailure)
: undefined;
message.blockConfirmedBody = (object.blockConfirmedBody !== undefined && object.blockConfirmedBody !== null)
? BlockConfirmedBody.fromPartial(object.blockConfirmedBody)
: undefined;
message.blockNumber = object.blockNumber ?? 0;
message.shardIndex = object.shardIndex ?? 0;
message.timestamp = object.timestamp ?? 0;
@@ -845,6 +991,31 @@ var tsProtoGlobalThis: any = (() => {
throw "Unable to locate global object";
})();
function bytesFromBase64(b64: string): Uint8Array {
if (tsProtoGlobalThis.Buffer) {
return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
} else {
const bin = tsProtoGlobalThis.atob(b64);
const arr = new Uint8Array(bin.length);
for (let i = 0; i < bin.length; ++i) {
arr[i] = bin.charCodeAt(i);
}
return arr;
}
}
function base64FromBytes(arr: Uint8Array): string {
if (tsProtoGlobalThis.Buffer) {
return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
} else {
const bin: string[] = [];
arr.forEach((byte) => {
bin.push(String.fromCharCode(byte));
});
return tsProtoGlobalThis.btoa(bin.join(""));
}
}
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
type DeepPartial<T> = T extends Builtin ? T

View File

@@ -244,7 +244,7 @@ export enum UserDataType {
USERNAME = 6,
/** LOCATION - Current location for the user */
LOCATION = 7,
/** TWITTER - Username of user on x */
/** TWITTER - Username of user on twitter */
TWITTER = 8,
/** GITHUB - Username of user on github */
GITHUB = 9,

File diff suppressed because it is too large Load Diff

View File

@@ -3,20 +3,24 @@ import grpcWeb from "@improbable-eng/grpc-web";
import { BrowserHeaders } from "browser-headers";
import { Observable } from "rxjs";
import { share } from "rxjs/operators";
import { Block } from "./blocks";
import { HubEvent } from "./hub_event";
import { CastId, Message } from "./message";
import { OnChainEvent } from "./onchain_event";
import {
BlocksRequest,
CastsByParentRequest,
ContactInfoResponse,
Empty,
EventRequest,
EventsRequest,
EventsResponse,
FidAddressTypeRequest,
FidAddressTypeResponse,
FidRequest,
FidsRequest,
FidsResponse,
FidTimestampRequest,
HubInfoRequest,
HubInfoResponse,
GetInfoRequest,
GetInfoResponse,
IdRegistryEventByAddressRequest,
LinkRequest,
LinksByFidRequest,
@@ -27,21 +31,13 @@ import {
ReactionRequest,
ReactionsByFidRequest,
ReactionsByTargetRequest,
ShardChunksRequest,
ShardChunksResponse,
SignerRequest,
StorageLimitsResponse,
StreamFetchRequest,
StreamFetchResponse,
StreamSyncRequest,
StreamSyncResponse,
SubmitBulkMessagesRequest,
SubmitBulkMessagesResponse,
SubscribeRequest,
SyncIds,
SyncStatusRequest,
SyncStatusResponse,
TrieNodeMetadataRequest,
TrieNodeMetadataResponse,
TrieNodePrefix,
TrieNodeSnapshotResponse,
UserDataRequest,
UsernameProofRequest,
UsernameProofsResponse,
@@ -51,29 +47,25 @@ import {
import { UserNameProof } from "./username_proof";
export interface HubService {
/** Submit Methods */
/** Write API */
submitMessage(request: DeepPartial<Message>, metadata?: grpcWeb.grpc.Metadata): Promise<Message>;
/** Validation Methods */
validateMessage(request: DeepPartial<Message>, metadata?: grpcWeb.grpc.Metadata): Promise<ValidationResponse>;
/**
* Event Methods
* @http-api: none
*/
/** Block API */
getBlocks(request: DeepPartial<BlocksRequest>, metadata?: grpcWeb.grpc.Metadata): Observable<Block>;
getShardChunks(request: DeepPartial<ShardChunksRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<ShardChunksResponse>;
getInfo(request: DeepPartial<GetInfoRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<GetInfoResponse>;
getFids(request: DeepPartial<FidsRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<FidsResponse>;
/** Events */
subscribe(request: DeepPartial<SubscribeRequest>, metadata?: grpcWeb.grpc.Metadata): Observable<HubEvent>;
/** @http-api: events */
getEvent(request: DeepPartial<EventRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<HubEvent>;
/**
* Casts
* @http-api: castById
*/
getEvents(request: DeepPartial<EventsRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<EventsResponse>;
/** Casts */
getCast(request: DeepPartial<CastId>, metadata?: grpcWeb.grpc.Metadata): Promise<Message>;
getCastsByFid(request: DeepPartial<FidRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<MessagesResponse>;
getCastsByParent(request: DeepPartial<CastsByParentRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<MessagesResponse>;
getCastsByMention(request: DeepPartial<FidRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<MessagesResponse>;
/**
* Reactions
* @http-api: reactionById
*/
/** Reactions */
getReaction(request: DeepPartial<ReactionRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<Message>;
getReactionsByFid(request: DeepPartial<ReactionsByFidRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<MessagesResponse>;
/** To be deprecated */
@@ -85,129 +77,65 @@ export interface HubService {
request: DeepPartial<ReactionsByTargetRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse>;
/**
* User Data
* @http-api: none
*/
/** User Data */
getUserData(request: DeepPartial<UserDataRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<Message>;
getUserDataByFid(request: DeepPartial<FidRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<MessagesResponse>;
/**
* Username Proof
* @http-api: userNameProofByName
*/
/** Username Proof */
getUsernameProof(request: DeepPartial<UsernameProofRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<UserNameProof>;
getUserNameProofsByFid(request: DeepPartial<FidRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<UsernameProofsResponse>;
/**
* Verifications
* @http-api: none
*/
/** Verifications */
getVerification(request: DeepPartial<VerificationRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<Message>;
getVerificationsByFid(request: DeepPartial<FidRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<MessagesResponse>;
/**
* OnChain Events
* @http-api: none
*/
/** OnChain Events */
getOnChainSigner(request: DeepPartial<SignerRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<OnChainEvent>;
getOnChainSignersByFid(request: DeepPartial<FidRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<OnChainEventResponse>;
/** @http-api: none */
getOnChainEvents(request: DeepPartial<OnChainEventRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<OnChainEventResponse>;
/** @http-api: none */
getIdRegistryOnChainEvent(request: DeepPartial<FidRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<OnChainEvent>;
/** @http-api: onChainIdRegistryEventByAddress */
getIdRegistryOnChainEventByAddress(
request: DeepPartial<IdRegistryEventByAddressRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<OnChainEvent>;
/** @http-api: storageLimitsByFid */
getCurrentStorageLimitsByFid(
request: DeepPartial<FidRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<StorageLimitsResponse>;
getFids(request: DeepPartial<FidsRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<FidsResponse>;
/**
* Links
* @http-api: linkById
*/
getFidAddressType(
request: DeepPartial<FidAddressTypeRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<FidAddressTypeResponse>;
/** Links */
getLink(request: DeepPartial<LinkRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<Message>;
getLinksByFid(request: DeepPartial<LinksByFidRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<MessagesResponse>;
/** @http-api: linksByTargetFid */
getLinksByTarget(request: DeepPartial<LinksByTargetRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<MessagesResponse>;
/**
* Bulk Methods
* The Bulk methods don't have corresponding HTTP API endpoints because the
* regular endpoints can be used to get all the messages
* @http-api: none
*/
getAllCastMessagesByFid(
request: DeepPartial<FidTimestampRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse>;
/** @http-api: none */
getAllReactionMessagesByFid(
request: DeepPartial<FidTimestampRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse>;
/** @http-api: none */
getAllVerificationMessagesByFid(
request: DeepPartial<FidTimestampRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse>;
/** @http-api: none */
getAllUserDataMessagesByFid(
request: DeepPartial<FidTimestampRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse>;
/** @http-api: none */
getAllLinkMessagesByFid(
request: DeepPartial<FidTimestampRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse>;
/** @http-api: none */
getLinkCompactStateMessageByFid(
request: DeepPartial<FidRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse>;
/** @http-api: none */
submitBulkMessages(
request: DeepPartial<SubmitBulkMessagesRequest>,
/** Bulk Methods */
getAllCastMessagesByFid(
request: DeepPartial<FidTimestampRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<SubmitBulkMessagesResponse>;
/** Sync Methods */
getInfo(request: DeepPartial<HubInfoRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<HubInfoResponse>;
getCurrentPeers(request: DeepPartial<Empty>, metadata?: grpcWeb.grpc.Metadata): Promise<ContactInfoResponse>;
/** @http-api: none */
stopSync(request: DeepPartial<Empty>, metadata?: grpcWeb.grpc.Metadata): Promise<SyncStatusResponse>;
/**
* This is experimental, do not rely on this endpoint existing in the future
* @http-api: none
*/
forceSync(request: DeepPartial<SyncStatusRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<SyncStatusResponse>;
/** @http-api: none */
getSyncStatus(request: DeepPartial<SyncStatusRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<SyncStatusResponse>;
/** @http-api: none */
getAllSyncIdsByPrefix(request: DeepPartial<TrieNodePrefix>, metadata?: grpcWeb.grpc.Metadata): Promise<SyncIds>;
/** @http-api: none */
getAllMessagesBySyncIds(request: DeepPartial<SyncIds>, metadata?: grpcWeb.grpc.Metadata): Promise<MessagesResponse>;
/** @http-api: none */
getSyncMetadataByPrefix(
request: DeepPartial<TrieNodePrefix>,
): Promise<MessagesResponse>;
getAllReactionMessagesByFid(
request: DeepPartial<FidTimestampRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse>;
getAllVerificationMessagesByFid(
request: DeepPartial<FidTimestampRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse>;
getAllUserDataMessagesByFid(
request: DeepPartial<FidTimestampRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse>;
getAllLinkMessagesByFid(
request: DeepPartial<FidTimestampRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse>;
getTrieMetadataByPrefix(
request: DeepPartial<TrieNodeMetadataRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<TrieNodeMetadataResponse>;
/** @http-api: none */
getSyncSnapshotByPrefix(
request: DeepPartial<TrieNodePrefix>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<TrieNodeSnapshotResponse>;
/** @http-api: none */
streamSync(
request: Observable<DeepPartial<StreamSyncRequest>>,
metadata?: grpcWeb.grpc.Metadata,
): Observable<StreamSyncResponse>;
/** @http-api: none */
streamFetch(
request: Observable<DeepPartial<StreamFetchRequest>>,
metadata?: grpcWeb.grpc.Metadata,
): Observable<StreamFetchResponse>;
}
export class HubServiceClientImpl implements HubService {
@@ -217,8 +145,13 @@ export class HubServiceClientImpl implements HubService {
this.rpc = rpc;
this.submitMessage = this.submitMessage.bind(this);
this.validateMessage = this.validateMessage.bind(this);
this.getBlocks = this.getBlocks.bind(this);
this.getShardChunks = this.getShardChunks.bind(this);
this.getInfo = this.getInfo.bind(this);
this.getFids = this.getFids.bind(this);
this.subscribe = this.subscribe.bind(this);
this.getEvent = this.getEvent.bind(this);
this.getEvents = this.getEvents.bind(this);
this.getCast = this.getCast.bind(this);
this.getCastsByFid = this.getCastsByFid.bind(this);
this.getCastsByParent = this.getCastsByParent.bind(this);
@@ -239,28 +172,17 @@ export class HubServiceClientImpl implements HubService {
this.getIdRegistryOnChainEvent = this.getIdRegistryOnChainEvent.bind(this);
this.getIdRegistryOnChainEventByAddress = this.getIdRegistryOnChainEventByAddress.bind(this);
this.getCurrentStorageLimitsByFid = this.getCurrentStorageLimitsByFid.bind(this);
this.getFids = this.getFids.bind(this);
this.getFidAddressType = this.getFidAddressType.bind(this);
this.getLink = this.getLink.bind(this);
this.getLinksByFid = this.getLinksByFid.bind(this);
this.getLinksByTarget = this.getLinksByTarget.bind(this);
this.getLinkCompactStateMessageByFid = this.getLinkCompactStateMessageByFid.bind(this);
this.getAllCastMessagesByFid = this.getAllCastMessagesByFid.bind(this);
this.getAllReactionMessagesByFid = this.getAllReactionMessagesByFid.bind(this);
this.getAllVerificationMessagesByFid = this.getAllVerificationMessagesByFid.bind(this);
this.getAllUserDataMessagesByFid = this.getAllUserDataMessagesByFid.bind(this);
this.getAllLinkMessagesByFid = this.getAllLinkMessagesByFid.bind(this);
this.getLinkCompactStateMessageByFid = this.getLinkCompactStateMessageByFid.bind(this);
this.submitBulkMessages = this.submitBulkMessages.bind(this);
this.getInfo = this.getInfo.bind(this);
this.getCurrentPeers = this.getCurrentPeers.bind(this);
this.stopSync = this.stopSync.bind(this);
this.forceSync = this.forceSync.bind(this);
this.getSyncStatus = this.getSyncStatus.bind(this);
this.getAllSyncIdsByPrefix = this.getAllSyncIdsByPrefix.bind(this);
this.getAllMessagesBySyncIds = this.getAllMessagesBySyncIds.bind(this);
this.getSyncMetadataByPrefix = this.getSyncMetadataByPrefix.bind(this);
this.getSyncSnapshotByPrefix = this.getSyncSnapshotByPrefix.bind(this);
this.streamSync = this.streamSync.bind(this);
this.streamFetch = this.streamFetch.bind(this);
this.getTrieMetadataByPrefix = this.getTrieMetadataByPrefix.bind(this);
}
submitMessage(request: DeepPartial<Message>, metadata?: grpcWeb.grpc.Metadata): Promise<Message> {
@@ -271,6 +193,22 @@ export class HubServiceClientImpl implements HubService {
return this.rpc.unary(HubServiceValidateMessageDesc, Message.fromPartial(request), metadata);
}
getBlocks(request: DeepPartial<BlocksRequest>, metadata?: grpcWeb.grpc.Metadata): Observable<Block> {
return this.rpc.invoke(HubServiceGetBlocksDesc, BlocksRequest.fromPartial(request), metadata);
}
getShardChunks(request: DeepPartial<ShardChunksRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<ShardChunksResponse> {
return this.rpc.unary(HubServiceGetShardChunksDesc, ShardChunksRequest.fromPartial(request), metadata);
}
getInfo(request: DeepPartial<GetInfoRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<GetInfoResponse> {
return this.rpc.unary(HubServiceGetInfoDesc, GetInfoRequest.fromPartial(request), metadata);
}
getFids(request: DeepPartial<FidsRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<FidsResponse> {
return this.rpc.unary(HubServiceGetFidsDesc, FidsRequest.fromPartial(request), metadata);
}
subscribe(request: DeepPartial<SubscribeRequest>, metadata?: grpcWeb.grpc.Metadata): Observable<HubEvent> {
return this.rpc.invoke(HubServiceSubscribeDesc, SubscribeRequest.fromPartial(request), metadata);
}
@@ -279,6 +217,10 @@ export class HubServiceClientImpl implements HubService {
return this.rpc.unary(HubServiceGetEventDesc, EventRequest.fromPartial(request), metadata);
}
getEvents(request: DeepPartial<EventsRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<EventsResponse> {
return this.rpc.unary(HubServiceGetEventsDesc, EventsRequest.fromPartial(request), metadata);
}
getCast(request: DeepPartial<CastId>, metadata?: grpcWeb.grpc.Metadata): Promise<Message> {
return this.rpc.unary(HubServiceGetCastDesc, CastId.fromPartial(request), metadata);
}
@@ -375,8 +317,11 @@ export class HubServiceClientImpl implements HubService {
return this.rpc.unary(HubServiceGetCurrentStorageLimitsByFidDesc, FidRequest.fromPartial(request), metadata);
}
getFids(request: DeepPartial<FidsRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<FidsResponse> {
return this.rpc.unary(HubServiceGetFidsDesc, FidsRequest.fromPartial(request), metadata);
getFidAddressType(
request: DeepPartial<FidAddressTypeRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<FidAddressTypeResponse> {
return this.rpc.unary(HubServiceGetFidAddressTypeDesc, FidAddressTypeRequest.fromPartial(request), metadata);
}
getLink(request: DeepPartial<LinkRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<Message> {
@@ -391,6 +336,13 @@ export class HubServiceClientImpl implements HubService {
return this.rpc.unary(HubServiceGetLinksByTargetDesc, LinksByTargetRequest.fromPartial(request), metadata);
}
getLinkCompactStateMessageByFid(
request: DeepPartial<FidRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse> {
return this.rpc.unary(HubServiceGetLinkCompactStateMessageByFidDesc, FidRequest.fromPartial(request), metadata);
}
getAllCastMessagesByFid(
request: DeepPartial<FidTimestampRequest>,
metadata?: grpcWeb.grpc.Metadata,
@@ -438,74 +390,15 @@ export class HubServiceClientImpl implements HubService {
return this.rpc.unary(HubServiceGetAllLinkMessagesByFidDesc, FidTimestampRequest.fromPartial(request), metadata);
}
getLinkCompactStateMessageByFid(
request: DeepPartial<FidRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<MessagesResponse> {
return this.rpc.unary(HubServiceGetLinkCompactStateMessageByFidDesc, FidRequest.fromPartial(request), metadata);
}
submitBulkMessages(
request: DeepPartial<SubmitBulkMessagesRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<SubmitBulkMessagesResponse> {
return this.rpc.unary(HubServiceSubmitBulkMessagesDesc, SubmitBulkMessagesRequest.fromPartial(request), metadata);
}
getInfo(request: DeepPartial<HubInfoRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<HubInfoResponse> {
return this.rpc.unary(HubServiceGetInfoDesc, HubInfoRequest.fromPartial(request), metadata);
}
getCurrentPeers(request: DeepPartial<Empty>, metadata?: grpcWeb.grpc.Metadata): Promise<ContactInfoResponse> {
return this.rpc.unary(HubServiceGetCurrentPeersDesc, Empty.fromPartial(request), metadata);
}
stopSync(request: DeepPartial<Empty>, metadata?: grpcWeb.grpc.Metadata): Promise<SyncStatusResponse> {
return this.rpc.unary(HubServiceStopSyncDesc, Empty.fromPartial(request), metadata);
}
forceSync(request: DeepPartial<SyncStatusRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<SyncStatusResponse> {
return this.rpc.unary(HubServiceForceSyncDesc, SyncStatusRequest.fromPartial(request), metadata);
}
getSyncStatus(request: DeepPartial<SyncStatusRequest>, metadata?: grpcWeb.grpc.Metadata): Promise<SyncStatusResponse> {
return this.rpc.unary(HubServiceGetSyncStatusDesc, SyncStatusRequest.fromPartial(request), metadata);
}
getAllSyncIdsByPrefix(request: DeepPartial<TrieNodePrefix>, metadata?: grpcWeb.grpc.Metadata): Promise<SyncIds> {
return this.rpc.unary(HubServiceGetAllSyncIdsByPrefixDesc, TrieNodePrefix.fromPartial(request), metadata);
}
getAllMessagesBySyncIds(request: DeepPartial<SyncIds>, metadata?: grpcWeb.grpc.Metadata): Promise<MessagesResponse> {
return this.rpc.unary(HubServiceGetAllMessagesBySyncIdsDesc, SyncIds.fromPartial(request), metadata);
}
getSyncMetadataByPrefix(
request: DeepPartial<TrieNodePrefix>,
getTrieMetadataByPrefix(
request: DeepPartial<TrieNodeMetadataRequest>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<TrieNodeMetadataResponse> {
return this.rpc.unary(HubServiceGetSyncMetadataByPrefixDesc, TrieNodePrefix.fromPartial(request), metadata);
}
getSyncSnapshotByPrefix(
request: DeepPartial<TrieNodePrefix>,
metadata?: grpcWeb.grpc.Metadata,
): Promise<TrieNodeSnapshotResponse> {
return this.rpc.unary(HubServiceGetSyncSnapshotByPrefixDesc, TrieNodePrefix.fromPartial(request), metadata);
}
streamSync(
request: Observable<DeepPartial<StreamSyncRequest>>,
metadata?: grpcWeb.grpc.Metadata,
): Observable<StreamSyncResponse> {
throw new Error("ts-proto does not yet support client streaming!");
}
streamFetch(
request: Observable<DeepPartial<StreamFetchRequest>>,
metadata?: grpcWeb.grpc.Metadata,
): Observable<StreamFetchResponse> {
throw new Error("ts-proto does not yet support client streaming!");
return this.rpc.unary(
HubServiceGetTrieMetadataByPrefixDesc,
TrieNodeMetadataRequest.fromPartial(request),
metadata,
);
}
}
@@ -557,6 +450,98 @@ export const HubServiceValidateMessageDesc: UnaryMethodDefinitionish = {
} as any,
};
export const HubServiceGetBlocksDesc: UnaryMethodDefinitionish = {
methodName: "GetBlocks",
service: HubServiceDesc,
requestStream: false,
responseStream: true,
requestType: {
serializeBinary() {
return BlocksRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = Block.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceGetShardChunksDesc: UnaryMethodDefinitionish = {
methodName: "GetShardChunks",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return ShardChunksRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = ShardChunksResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceGetInfoDesc: UnaryMethodDefinitionish = {
methodName: "GetInfo",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return GetInfoRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = GetInfoResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceGetFidsDesc: UnaryMethodDefinitionish = {
methodName: "GetFids",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return FidsRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = FidsResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceSubscribeDesc: UnaryMethodDefinitionish = {
methodName: "Subscribe",
service: HubServiceDesc,
@@ -603,6 +588,29 @@ export const HubServiceGetEventDesc: UnaryMethodDefinitionish = {
} as any,
};
export const HubServiceGetEventsDesc: UnaryMethodDefinitionish = {
methodName: "GetEvents",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return EventsRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = EventsResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceGetCastDesc: UnaryMethodDefinitionish = {
methodName: "GetCast",
service: HubServiceDesc,
@@ -1063,19 +1071,19 @@ export const HubServiceGetCurrentStorageLimitsByFidDesc: UnaryMethodDefinitionis
} as any,
};
export const HubServiceGetFidsDesc: UnaryMethodDefinitionish = {
methodName: "GetFids",
export const HubServiceGetFidAddressTypeDesc: UnaryMethodDefinitionish = {
methodName: "GetFidAddressType",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return FidsRequest.encode(this).finish();
return FidAddressTypeRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = FidsResponse.decode(data);
const value = FidAddressTypeResponse.decode(data);
return {
...value,
toObject() {
@@ -1155,6 +1163,29 @@ export const HubServiceGetLinksByTargetDesc: UnaryMethodDefinitionish = {
} as any,
};
export const HubServiceGetLinkCompactStateMessageByFidDesc: UnaryMethodDefinitionish = {
methodName: "GetLinkCompactStateMessageByFid",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return FidRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = MessagesResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceGetAllCastMessagesByFidDesc: UnaryMethodDefinitionish = {
methodName: "GetAllCastMessagesByFid",
service: HubServiceDesc,
@@ -1270,221 +1301,14 @@ export const HubServiceGetAllLinkMessagesByFidDesc: UnaryMethodDefinitionish = {
} as any,
};
export const HubServiceGetLinkCompactStateMessageByFidDesc: UnaryMethodDefinitionish = {
methodName: "GetLinkCompactStateMessageByFid",
export const HubServiceGetTrieMetadataByPrefixDesc: UnaryMethodDefinitionish = {
methodName: "GetTrieMetadataByPrefix",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return FidRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = MessagesResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceSubmitBulkMessagesDesc: UnaryMethodDefinitionish = {
methodName: "SubmitBulkMessages",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return SubmitBulkMessagesRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = SubmitBulkMessagesResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceGetInfoDesc: UnaryMethodDefinitionish = {
methodName: "GetInfo",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return HubInfoRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = HubInfoResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceGetCurrentPeersDesc: UnaryMethodDefinitionish = {
methodName: "GetCurrentPeers",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return Empty.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = ContactInfoResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceStopSyncDesc: UnaryMethodDefinitionish = {
methodName: "StopSync",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return Empty.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = SyncStatusResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceForceSyncDesc: UnaryMethodDefinitionish = {
methodName: "ForceSync",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return SyncStatusRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = SyncStatusResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceGetSyncStatusDesc: UnaryMethodDefinitionish = {
methodName: "GetSyncStatus",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return SyncStatusRequest.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = SyncStatusResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceGetAllSyncIdsByPrefixDesc: UnaryMethodDefinitionish = {
methodName: "GetAllSyncIdsByPrefix",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return TrieNodePrefix.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = SyncIds.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceGetAllMessagesBySyncIdsDesc: UnaryMethodDefinitionish = {
methodName: "GetAllMessagesBySyncIds",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return SyncIds.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = MessagesResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const HubServiceGetSyncMetadataByPrefixDesc: UnaryMethodDefinitionish = {
methodName: "GetSyncMetadataByPrefix",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return TrieNodePrefix.encode(this).finish();
return TrieNodeMetadataRequest.encode(this).finish();
},
} as any,
responseType: {
@@ -1500,129 +1324,6 @@ export const HubServiceGetSyncMetadataByPrefixDesc: UnaryMethodDefinitionish = {
} as any,
};
export const HubServiceGetSyncSnapshotByPrefixDesc: UnaryMethodDefinitionish = {
methodName: "GetSyncSnapshotByPrefix",
service: HubServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return TrieNodePrefix.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = TrieNodeSnapshotResponse.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export interface AdminService {
rebuildSyncTrie(request: DeepPartial<Empty>, metadata?: grpcWeb.grpc.Metadata): Promise<Empty>;
deleteAllMessagesFromDb(request: DeepPartial<Empty>, metadata?: grpcWeb.grpc.Metadata): Promise<Empty>;
submitOnChainEvent(request: DeepPartial<OnChainEvent>, metadata?: grpcWeb.grpc.Metadata): Promise<OnChainEvent>;
}
export class AdminServiceClientImpl implements AdminService {
private readonly rpc: Rpc;
constructor(rpc: Rpc) {
this.rpc = rpc;
this.rebuildSyncTrie = this.rebuildSyncTrie.bind(this);
this.deleteAllMessagesFromDb = this.deleteAllMessagesFromDb.bind(this);
this.submitOnChainEvent = this.submitOnChainEvent.bind(this);
}
rebuildSyncTrie(request: DeepPartial<Empty>, metadata?: grpcWeb.grpc.Metadata): Promise<Empty> {
return this.rpc.unary(AdminServiceRebuildSyncTrieDesc, Empty.fromPartial(request), metadata);
}
deleteAllMessagesFromDb(request: DeepPartial<Empty>, metadata?: grpcWeb.grpc.Metadata): Promise<Empty> {
return this.rpc.unary(AdminServiceDeleteAllMessagesFromDbDesc, Empty.fromPartial(request), metadata);
}
submitOnChainEvent(request: DeepPartial<OnChainEvent>, metadata?: grpcWeb.grpc.Metadata): Promise<OnChainEvent> {
return this.rpc.unary(AdminServiceSubmitOnChainEventDesc, OnChainEvent.fromPartial(request), metadata);
}
}
export const AdminServiceDesc = { serviceName: "AdminService" };
export const AdminServiceRebuildSyncTrieDesc: UnaryMethodDefinitionish = {
methodName: "RebuildSyncTrie",
service: AdminServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return Empty.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = Empty.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const AdminServiceDeleteAllMessagesFromDbDesc: UnaryMethodDefinitionish = {
methodName: "DeleteAllMessagesFromDb",
service: AdminServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return Empty.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = Empty.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
export const AdminServiceSubmitOnChainEventDesc: UnaryMethodDefinitionish = {
methodName: "SubmitOnChainEvent",
service: AdminServiceDesc,
requestStream: false,
responseStream: false,
requestType: {
serializeBinary() {
return OnChainEvent.encode(this).finish();
},
} as any,
responseType: {
deserializeBinary(data: Uint8Array) {
const value = OnChainEvent.decode(data);
return {
...value,
toObject() {
return value;
},
};
},
} as any,
};
interface UnaryMethodDefinitionishR extends grpcWeb.grpc.UnaryMethodDefinition<any, any> {
requestStream: any;
responseStream: any;

View File

@@ -1,5 +1,4 @@
import {
ClientDuplexStream,
FidTimestampRequest,
fromFarcasterTime,
HubError,
@@ -9,8 +8,6 @@ import {
Message,
MessagesResponse,
MessageType,
StreamFetchRequest,
StreamFetchResponse,
} from "@farcaster/hub-nodejs";
import { DB, MessageRow, sql } from "./db";
import { pino } from "pino";
@@ -32,35 +29,15 @@ type DBMessage = {
// Ensures that all messages for a given FID are present in the database. Can be used for both backfilling and reconciliation.
export class MessageReconciliation {
private client: HubRpcClient;
private stream: ClientDuplexStream<StreamFetchRequest, StreamFetchResponse> | undefined;
private db: DB;
private log: pino.Logger;
private connectionTimeout: number; // milliseconds
constructor(client: HubRpcClient, db: DB, log: pino.Logger, connectionTimeout = 30000, useStreamingRpcs = true) {
constructor(client: HubRpcClient, db: DB, log: pino.Logger, connectionTimeout = 30000) {
this.client = client;
this.db = db;
this.log = log;
this.connectionTimeout = connectionTimeout;
if (useStreamingRpcs) {
this.establishStream();
}
}
async establishStream() {
const maybeStream = await this.client.streamFetch();
if (maybeStream.isOk()) {
this.stream = maybeStream.value;
} else {
this.log.warn(maybeStream.error, "could not establish stream");
}
}
async close() {
if (this.stream) {
this.stream.cancel();
this.stream = undefined;
}
}
async reconcileMessagesForFid(
@@ -179,88 +156,24 @@ export class MessageReconciliation {
}
}
private async doCallWithFailover(
request: Partial<StreamFetchRequest>,
fallback: () => Promise<HubResult<MessagesResponse>>,
) {
const id = randomUUID();
const result = new Promise<HubResult<MessagesResponse>>((resolve) => {
if (!this.stream) {
fallback().then((result) => resolve(result));
return;
}
const process = async (response: StreamFetchResponse) => {
// Do not allow hanging unresponsive connections to linger:
const cancel = setTimeout(() => {
this.log.warn("Stream fetch timed out, falling back to RPC");
this.stream?.cancel();
this.stream = undefined;
fallback().then((result) => resolve(result));
}, this.connectionTimeout);
if (!this.stream) {
clearTimeout(cancel);
this.log.warn("Stream unavailable, falling back to RPC");
fallback().then((result) => resolve(result));
return;
}
this.stream.off("data", process);
if (response.idempotencyKey !== id || !response.messages) {
if (response?.error) {
clearTimeout(cancel);
resolve(err(new HubError(response.error.errCode as HubErrorCode, { message: response.error.message })));
return;
}
this.stream.cancel();
this.stream = undefined;
fallback()
.then((result) => resolve(result))
.finally(() => clearTimeout(cancel));
} else {
clearTimeout(cancel);
resolve(ok(response.messages));
}
};
this.stream.on("data", process);
});
this.stream?.write({
...request,
idempotencyKey: id,
});
return await result;
}
private async getAllCastMessagesByFid(request: FidTimestampRequest) {
return await this.doCallWithFailover({ castMessagesByFid: request }, () =>
this.client.getAllCastMessagesByFid(request),
);
return await this.client.getAllCastMessagesByFid(request);
}
private async getAllReactionMessagesByFid(request: FidTimestampRequest) {
return await this.doCallWithFailover({ reactionMessagesByFid: request }, () =>
this.client.getAllReactionMessagesByFid(request),
);
return await this.client.getAllReactionMessagesByFid(request);
}
private async getAllLinkMessagesByFid(request: FidTimestampRequest) {
return await this.doCallWithFailover({ linkMessagesByFid: request }, () =>
this.client.getAllLinkMessagesByFid(request),
);
return await this.client.getAllLinkMessagesByFid(request);
}
private async getAllVerificationMessagesByFid(request: FidTimestampRequest) {
return await this.doCallWithFailover({ verificationMessagesByFid: request }, () =>
this.client.getAllVerificationMessagesByFid(request),
);
return await this.client.getAllVerificationMessagesByFid(request);
}
private async getAllUserDataMessagesByFid(request: FidTimestampRequest) {
return await this.doCallWithFailover({ userDataMessagesByFid: request }, () =>
this.client.getAllUserDataMessagesByFid(request),
);
return await this.client.getAllUserDataMessagesByFid(request);
}
private async *getAllCastMessagesByFidInBatchesOf(
@@ -331,20 +244,18 @@ export class MessageReconciliation {
result = await this.getAllLinkMessagesByFid({ pageSize, pageToken, fid, startTimestamp, stopTimestamp });
}
if (!this.stream) {
let deltaResult = await this.client.getLinkCompactStateMessageByFid({ fid, pageSize });
for (;;) {
if (deltaResult.isErr()) {
throw new Error(`Unable to get all link compact results for FID ${fid}: ${deltaResult.error?.message}`);
}
const { messages, nextPageToken: pageToken } = deltaResult.value;
yield messages;
if (!pageToken?.length) break;
deltaResult = await this.client.getLinkCompactStateMessageByFid({ pageSize, pageToken, fid });
let deltaResult = await this.client.getLinkCompactStateMessageByFid({ fid, pageSize });
for (;;) {
if (deltaResult.isErr()) {
throw new Error(`Unable to get all link compact results for FID ${fid}: ${deltaResult.error?.message}`);
}
const { messages, nextPageToken: pageToken } = deltaResult.value;
yield messages;
if (!pageToken?.length) break;
deltaResult = await this.client.getLinkCompactStateMessageByFid({ pageSize, pageToken, fid });
}
}

View File

@@ -1,28 +0,0 @@
# @farcaster/protobufs
Specifications for API's and data formats used in Hubble, including both Farcaster protocol types and Hubble specific types.
| Schema | Type Description | Docs |
|---------------------------------------------|------------------------------------------| ----------------------- |
| [Message](schemas/message.proto) | Types for Farcaster deltas | [docs](../apps/hubble/www/docs/docs/messages.md) |
| [OnChainEvent](schemas/onchain_event.proto) | Types for Farcaster onchain events | [docs](../apps/hubble/www/docs/docs/onchain_events.md) |
| [HubEvent](schemas/hub_event.proto) | Types for hub events | [docs](../apps/hubble/www/docs/docs/events.md) |
| [RPC](schemas/rpc.proto) | Types for gRPC APIs exposed by Hubs | [docs](../apps/hubble/www/docs/docs/api.md) |
| [Gossip](schemas/gossip.proto) | Types for gossiping data between Hubs | |
| [HubState](schemas/hub_state.proto) | Types for maintaining internal state | |
## Getting Started
### Compiling Protobufs
If you make changes to the protobufs, you will need to run `yarn protoc` in the following directories to compile and generate the JS files
- `packages/core`
- `packages/hub-nodejs`
- `packages/hub-web`
### Generate Bindings
Coming soon
### Docs
Docs for the protobufs are under `../apps/hubble/www/docs/docs`

View File

@@ -1,80 +0,0 @@
syntax = "proto3";
import "message.proto";
enum GossipVersion {
GOSSIP_VERSION_V1 = 0;
GOSSIP_VERSION_V1_1 = 1;
}
message GossipAddressInfo {
string address = 1;
uint32 family = 2;
uint32 port = 3;
string dns_name = 4;
}
message ContactInfoContentBody {
GossipAddressInfo gossip_address = 1;
GossipAddressInfo rpc_address = 2;
repeated string excluded_hashes = 3;
uint32 count = 4;
string hub_version = 5;
FarcasterNetwork network = 6;
string app_version = 7;
uint64 timestamp = 8;
}
message ContactInfoContent {
GossipAddressInfo gossip_address = 1;
GossipAddressInfo rpc_address = 2;
repeated string excluded_hashes = 3;
uint32 count = 4;
string hub_version = 5;
FarcasterNetwork network = 6;
string app_version = 7;
uint64 timestamp = 8;
ContactInfoContentBody body = 9;
bytes signature = 10; // Signature of the message digest
bytes signer = 11; // Public key of the peer that originated the contact info
optional bytes data_bytes = 12; // Optional alternative serialization used for signing
}
message PingMessageBody {
bytes ping_origin_peer_id = 1;
uint64 ping_timestamp = 2;
}
message AckMessageBody {
bytes ping_origin_peer_id = 1;
bytes ack_origin_peer_id = 2;
uint64 ping_timestamp = 3;
uint64 ack_timestamp = 4;
}
message NetworkLatencyMessage {
oneof body {
PingMessageBody ping_message = 2;
AckMessageBody ack_message = 3;
}
}
message MessageBundle {
bytes hash = 1;
repeated Message messages = 2;
}
message GossipMessage {
oneof content {
Message message = 1;
// Deprecated
// IdRegistryEvent id_registry_event = 2;
ContactInfoContent contact_info_content = 3;
NetworkLatencyMessage network_latency_message = 7;
MessageBundle message_bundle = 9;
}
repeated string topics = 4;
bytes peer_id = 5;
GossipVersion version = 6;
uint32 timestamp = 8; // Farcaster epoch timestamp in seconds when this message was first created
}

View File

@@ -1,73 +0,0 @@
syntax = "proto3";
import "message.proto";
import "onchain_event.proto";
import "username_proof.proto";
enum HubEventType {
HUB_EVENT_TYPE_NONE = 0;
HUB_EVENT_TYPE_MERGE_MESSAGE = 1;
HUB_EVENT_TYPE_PRUNE_MESSAGE = 2;
HUB_EVENT_TYPE_REVOKE_MESSAGE = 3;
// Deprecated
// HUB_EVENT_TYPE_MERGE_ID_REGISTRY_EVENT = 4;
// HUB_EVENT_TYPE_MERGE_NAME_REGISTRY_EVENT = 5;
HUB_EVENT_TYPE_MERGE_USERNAME_PROOF = 6;
// Deprecated
// HUB_EVENT_TYPE_MERGE_RENT_REGISTRY_EVENT = 7;
// HUB_EVENT_TYPE_MERGE_STORAGE_ADMIN_REGISTRY_EVENT = 8;
HUB_EVENT_TYPE_MERGE_ON_CHAIN_EVENT = 9;
HUB_EVENT_TYPE_MERGE_FAILURE = 10;
}
message MergeMessageBody {
Message message = 1;
repeated Message deleted_messages = 2;
}
message PruneMessageBody {
Message message = 1;
}
message MergeFailureBody {
Message message = 1;
string code = 2;
string reason = 3;
}
message RevokeMessageBody {
Message message = 1;
}
message MergeOnChainEventBody {
OnChainEvent on_chain_event = 1;
}
message MergeUserNameProofBody {
UserNameProof username_proof = 1;
UserNameProof deleted_username_proof = 2;
Message username_proof_message = 3;
Message deleted_username_proof_message = 4;
}
message HubEvent {
HubEventType type = 1;
uint64 id = 2;
oneof body {
MergeMessageBody merge_message_body = 3;
PruneMessageBody prune_message_body = 4;
RevokeMessageBody revoke_message_body = 5;
// Deprecated
// MergeIdRegistryEventBody merge_id_registry_event_body = 6;
// MergeNameRegistryEventBody merge_name_registry_event_body = 7;
MergeUserNameProofBody merge_username_proof_body = 8;
// Deprecated
// MergeRentRegistryEventBody merge_rent_registry_event_body = 9;
// MergeStorageAdminRegistryEventBody merge_storage_admin_registry_event_body = 10;
MergeOnChainEventBody merge_on_chain_event_body = 11;
MergeFailureBody merge_failure = 13;
};
uint64 block_number = 12;
uint32 shard_index = 14;
uint64 timestamp = 15;
}

View File

@@ -1,14 +0,0 @@
syntax = "proto3";
message ValidateOrRevokeJobState {
uint32 last_job_timestamp = 1; // The (Farcaster time epoch) timestamp where the last job started
uint32 last_fid = 2; // The last FID to complete successfully. If this is 0, then the last job finished successfully
}
message HubState {
// uint32 last_eth_block = 1; // Deprecated
uint64 last_fname_proof = 2;
uint64 last_l2_block = 3;
// bool syncEvents = 4; // Deprecated
ValidateOrRevokeJobState validate_or_revoke_state = 5;
}

View File

@@ -1,10 +0,0 @@
syntax = "proto3";
message RevokeMessagesBySignerJobPayload {
uint32 fid = 1;
bytes signer = 2;
}
message UpdateNameRegistryEventExpiryJobPayload {
bytes fname = 1;
}

View File

@@ -1,210 +0,0 @@
syntax = "proto3";
import "username_proof.proto";
/**
* A Message is a delta operation on the Farcaster network. The message protobuf is an envelope
* that wraps a MessageData object and contains a hash and signature which can verify its authenticity.
*/
message Message {
MessageData data = 1; // Contents of the message
bytes hash = 2; // Hash digest of data
HashScheme hash_scheme = 3; // Hash scheme that produced the hash digest
bytes signature = 4; // Signature of the hash digest
SignatureScheme signature_scheme = 5; // Signature scheme that produced the signature
bytes signer = 6; // Public key or address of the key pair that produced the signature
optional bytes data_bytes = 7; // MessageData serialized to bytes if using protobuf serialization other than ts-proto
}
/**
* A MessageData object contains properties common to all messages and wraps a body object which
* contains properties specific to the MessageType.
*/
message MessageData {
MessageType type = 1; // Type of message contained in the body
uint64 fid = 2; // Farcaster ID of the user producing the message
uint32 timestamp = 3; // Farcaster epoch timestamp in seconds
FarcasterNetwork network = 4; // Farcaster network the message is intended for
oneof body {
CastAddBody cast_add_body = 5;
CastRemoveBody cast_remove_body = 6;
ReactionBody reaction_body = 7;
VerificationAddAddressBody verification_add_address_body = 9;
VerificationRemoveBody verification_remove_body = 10;
// SignerAddBody signer_add_body = 11; // Deprecated
UserDataBody user_data_body = 12;
// SignerRemoveBody signer_remove_body = 13; // Deprecated
LinkBody link_body = 14;
UserNameProof username_proof_body = 15;
FrameActionBody frame_action_body = 16;
// Compaction messages
LinkCompactStateBody link_compact_state_body = 17;
} // Properties specific to the MessageType
}
/** Type of hashing scheme used to produce a digest of MessageData */
enum HashScheme {
HASH_SCHEME_NONE = 0;
HASH_SCHEME_BLAKE3 = 1; // Default scheme for hashing MessageData
}
/** Type of signature scheme used to sign the Message hash */
enum SignatureScheme {
SIGNATURE_SCHEME_NONE = 0;
SIGNATURE_SCHEME_ED25519 = 1; // Ed25519 signature (default)
SIGNATURE_SCHEME_EIP712 = 2; // ECDSA signature using EIP-712 scheme
}
/** Type of the MessageBody */
enum MessageType {
MESSAGE_TYPE_NONE = 0;
MESSAGE_TYPE_CAST_ADD = 1; // Add a new Cast
MESSAGE_TYPE_CAST_REMOVE = 2; // Remove an existing Cast
MESSAGE_TYPE_REACTION_ADD = 3; // Add a Reaction to a Cast
MESSAGE_TYPE_REACTION_REMOVE = 4; // Remove a Reaction from a Cast
MESSAGE_TYPE_LINK_ADD = 5; // Add a new Link
MESSAGE_TYPE_LINK_REMOVE = 6; // Remove an existing Link
MESSAGE_TYPE_VERIFICATION_ADD_ETH_ADDRESS = 7; // Add a Verification of an Ethereum Address
MESSAGE_TYPE_VERIFICATION_REMOVE = 8; // Remove a Verification
// Deprecated
// MESSAGE_TYPE_SIGNER_ADD = 9; // Add a new Ed25519 key pair that signs messages for a user
// MESSAGE_TYPE_SIGNER_REMOVE = 10; // Remove an Ed25519 key pair that signs messages for a user
MESSAGE_TYPE_USER_DATA_ADD = 11; // Add metadata about a user
MESSAGE_TYPE_USERNAME_PROOF = 12; // Add or replace a username proof
MESSAGE_TYPE_FRAME_ACTION = 13; // A Farcaster Frame action
MESSAGE_TYPE_LINK_COMPACT_STATE = 14; // Link Compaction State Message
}
/** Farcaster network the message is intended for */
enum FarcasterNetwork {
FARCASTER_NETWORK_NONE = 0;
FARCASTER_NETWORK_MAINNET = 1; // Public primary network
FARCASTER_NETWORK_TESTNET = 2; // Public test network
FARCASTER_NETWORK_DEVNET = 3; // Private test network
}
/** Adds metadata about a user */
message UserDataBody {
UserDataType type = 1; // Type of metadata
string value = 2; // Value of the metadata
}
/** Type of UserData */
enum UserDataType {
USER_DATA_TYPE_NONE = 0;
USER_DATA_TYPE_PFP = 1; // Profile Picture for the user
USER_DATA_TYPE_DISPLAY = 2; // Display Name for the user
USER_DATA_TYPE_BIO = 3; // Bio for the user
USER_DATA_TYPE_URL = 5; // URL of the user
USER_DATA_TYPE_USERNAME = 6; // Preferred Name for the user
USER_DATA_TYPE_LOCATION = 7; // Current location for the user
USER_DATA_TYPE_TWITTER = 8; // Username of user on x
USER_DATA_TYPE_GITHUB = 9; // Username of user on github
USER_DATA_TYPE_BANNER = 10; // Banner image for the user
USER_DATA_PRIMARY_ADDRESS_ETHEREUM = 11; // Primary address for the user on Ethereum
USER_DATA_PRIMARY_ADDRESS_SOLANA = 12; // Primary address for the user on Solana
}
message Embed {
oneof embed {
string url = 1;
CastId cast_id = 2;
}
}
/** Type of cast */
enum CastType {
CAST = 0;
LONG_CAST = 1;
TEN_K_CAST = 2;
}
/** Adds a new Cast */
message CastAddBody {
repeated string embeds_deprecated = 1; // URLs to be embedded in the cast
repeated uint64 mentions = 2; // Fids mentioned in the cast
oneof parent {
CastId parent_cast_id = 3; // Parent cast of the cast
string parent_url = 7; // Parent URL
};
string text = 4; // Text of the cast
repeated uint32 mentions_positions = 5; // Positions of the mentions in the text
repeated Embed embeds = 6; // URLs or cast ids to be embedded in the cast
CastType type = 8; // Type of cast
}
/** Removes an existing Cast */
message CastRemoveBody {
bytes target_hash = 1; // Hash of the cast to remove
}
/** Identifier used to look up a Cast */
message CastId {
uint64 fid = 1; // Fid of the user who created the cast
bytes hash = 2; // Hash of the cast
}
/** Adds or removes a Reaction from a Cast */
message ReactionBody {
ReactionType type = 1; // Type of reaction
oneof target {
CastId target_cast_id = 2; // CastId of the Cast to react to
string target_url = 3; // URL to react to
}
}
/** Type of Reaction */
enum ReactionType {
REACTION_TYPE_NONE = 0;
REACTION_TYPE_LIKE = 1; // Like the target cast
REACTION_TYPE_RECAST = 2; // Share target cast to the user's audience
}
/** Type of Protocol to disambiguate verification addresses */
enum Protocol {
PROTOCOL_ETHEREUM = 0;
PROTOCOL_SOLANA = 1;
}
/** Adds a Verification of ownership of an Address based on Protocol */
message VerificationAddAddressBody {
bytes address = 1; // Address being verified for a given Protocol
bytes claim_signature = 2; // Signature produced by the user's address for a given Protocol
bytes block_hash = 3; // Hash of the latest Ethereum block when the signature was produced
uint32 verification_type = 4; // Type of verification. 0 = EOA, 1 = contract
uint32 chain_id = 5; // 0 for EOA verifications, 1 or 10 for contract verifications
Protocol protocol = 7; // Protocol of the Verification
}
/** Removes a Verification of a given protocol */
message VerificationRemoveBody {
bytes address = 1; // Address of the Verification to remove
Protocol protocol = 2; // Protocol of the Verification to remove
}
/** Adds or removes a Link */
message LinkBody {
string type = 1; // Type of link, <= 8 characters
optional uint32 displayTimestamp = 2; // User-defined timestamp that preserves original timestamp when message.data.timestamp needs to be updated for compaction
oneof target {
uint64 target_fid = 3; // The fid the link relates to
}
}
/** A Compaction message for the Link Store */
message LinkCompactStateBody {
string type = 1; // Type of link, <= 8 characters
repeated uint64 target_fids = 2;
}
/** A Farcaster Frame action */
message FrameActionBody {
bytes url = 1; // URL of the Frame triggering the action
uint32 button_index = 2; // The index of the button pressed (1-4)
CastId cast_id = 3; // The cast which contained the frame url
bytes input_text = 4; // Text input from the user, if present
bytes state = 5; // Serialized frame state value
bytes transaction_id = 6; // Chain-specific transaction ID for tx actions
bytes address = 7; // Chain-specific address for tx actions
}

View File

@@ -1,80 +0,0 @@
syntax = "proto3";
enum OnChainEventType {
EVENT_TYPE_NONE = 0;
EVENT_TYPE_SIGNER = 1;
EVENT_TYPE_SIGNER_MIGRATED = 2;
EVENT_TYPE_ID_REGISTER = 3;
EVENT_TYPE_STORAGE_RENT = 4;
EVENT_TYPE_TIER_PURCHASE = 5;
}
message OnChainEvent {
OnChainEventType type = 1;
uint32 chain_id = 2;
uint32 block_number = 3;
bytes block_hash = 4;
uint64 block_timestamp = 5;
bytes transaction_hash = 6;
uint32 log_index = 7;
uint64 fid = 8;
oneof body {
SignerEventBody signer_event_body = 9;
SignerMigratedEventBody signer_migrated_event_body = 10;
IdRegisterEventBody id_register_event_body = 11;
StorageRentEventBody storage_rent_event_body = 12;
TierPurchaseBody tier_purchase_event_body = 15;
}
uint32 tx_index = 13;
uint32 version = 14;
}
enum TierType {
None = 0;
Pro = 1;
}
message TierPurchaseBody {
TierType tier_type = 1;
uint64 for_days = 2;
bytes payer = 3;
}
enum SignerEventType {
SIGNER_EVENT_TYPE_NONE = 0;
SIGNER_EVENT_TYPE_ADD = 1;
SIGNER_EVENT_TYPE_REMOVE = 2;
SIGNER_EVENT_TYPE_ADMIN_RESET = 3;
}
message SignerEventBody {
bytes key = 1;
uint32 key_type = 2;
SignerEventType event_type = 3;
bytes metadata = 4;
uint32 metadata_type = 5;
}
message SignerMigratedEventBody {
uint32 migratedAt = 1;
}
enum IdRegisterEventType {
ID_REGISTER_EVENT_TYPE_NONE = 0;
ID_REGISTER_EVENT_TYPE_REGISTER = 1;
ID_REGISTER_EVENT_TYPE_TRANSFER = 2;
ID_REGISTER_EVENT_TYPE_CHANGE_RECOVERY = 3;
}
message IdRegisterEventBody {
bytes to = 1;
IdRegisterEventType event_type = 2;
bytes from = 3;
bytes recovery_address = 4;
}
message StorageRentEventBody {
bytes payer = 1;
uint32 units = 2;
uint32 expiry = 3;
}

View File

@@ -1,359 +0,0 @@
syntax = "proto3";
import "message.proto";
import "onchain_event.proto";
import "hub_event.proto";
import "username_proof.proto";
import "gossip.proto";
message Empty {}
message SubscribeRequest {
repeated HubEventType event_types = 1;
optional uint64 from_id = 2;
optional uint64 total_shards = 3;
optional uint64 shard_index = 4;
}
message EventRequest {
uint64 id = 1;
}
message HubInfoRequest {
bool db_stats = 1;
}
// Response Types for the Sync RPC Methods
message HubInfoResponse {
string version = 1;
bool is_syncing = 2;
string nickname = 3;
string root_hash = 4;
DbStats db_stats = 5;
string peerId = 6;
uint64 hub_operator_fid = 7;
}
message DbStats {
uint64 num_messages = 1;
uint64 num_fid_events = 2;
uint64 num_fname_events = 3;
uint64 approx_size = 4;
}
message SyncStatusRequest {
optional string peerId = 1;
}
message SyncStatusResponse {
bool is_syncing = 1;
repeated SyncStatus sync_status = 2;
bool engine_started = 3;
}
message SyncStatus {
string peerId = 1;
string inSync = 2;
bool shouldSync = 3;
string divergencePrefix = 4;
int32 divergenceSecondsAgo = 5;
uint64 theirMessages = 6;
uint64 ourMessages = 7;
int64 lastBadSync = 8;
int64 score = 9;
}
message TrieNodeMetadataResponse {
bytes prefix = 1;
uint64 num_messages = 2;
string hash = 3;
repeated TrieNodeMetadataResponse children = 4;
}
message TrieNodeSnapshotResponse {
bytes prefix = 1;
repeated string excluded_hashes = 2;
uint64 num_messages = 3;
string root_hash = 4;
}
message TrieNodePrefix {
bytes prefix = 1;
}
message SyncIds {
repeated bytes sync_ids = 1;
}
message FidRequest {
uint64 fid = 1;
optional uint32 page_size = 2;
optional bytes page_token = 3;
optional bool reverse = 4;
}
message FidTimestampRequest {
uint64 fid = 1;
optional uint32 page_size = 2;
optional bytes page_token = 3;
optional bool reverse = 4;
optional uint64 start_timestamp = 5;
optional uint64 stop_timestamp = 6;
}
message FidsRequest {
optional uint32 page_size = 1;
optional bytes page_token = 2;
optional bool reverse = 3;
}
message FidsResponse {
repeated uint64 fids = 1;
optional bytes next_page_token = 2;
}
message MessagesResponse {
repeated Message messages = 1;
optional bytes next_page_token = 2;
}
message CastsByParentRequest {
oneof parent {
CastId parent_cast_id = 1;
string parent_url = 5;
}
optional uint32 page_size = 2;
optional bytes page_token = 3;
optional bool reverse = 4;
}
message ReactionRequest {
uint64 fid = 1;
ReactionType reaction_type = 2;
oneof target {
CastId target_cast_id = 3;
string target_url = 4;
}
}
message ReactionsByFidRequest {
uint64 fid = 1;
optional ReactionType reaction_type = 2;
optional uint32 page_size = 3;
optional bytes page_token = 4;
optional bool reverse = 5;
}
message ReactionsByTargetRequest {
oneof target {
CastId target_cast_id = 1;
string target_url = 6;
}
optional ReactionType reaction_type = 2;
optional uint32 page_size = 3;
optional bytes page_token = 4;
optional bool reverse = 5;
}
message UserDataRequest {
uint64 fid = 1;
UserDataType user_data_type = 2;
}
message NameRegistryEventRequest {
bytes name = 1;
}
message RentRegistryEventsRequest {
uint64 fid = 1;
}
message OnChainEventRequest {
uint64 fid = 1;
OnChainEventType event_type = 2;
optional uint32 page_size = 3;
optional bytes page_token = 4;
optional bool reverse = 5;
}
message OnChainEventResponse {
repeated OnChainEvent events = 1;
optional bytes next_page_token = 2;
}
message TierDetails {
TierType tier_type = 1;
uint64 expires_at = 2;
}
message StorageLimitsResponse {
repeated StorageLimit limits = 1;
uint32 units = 2;
repeated StorageUnitDetails unit_details = 3;
repeated TierDetails tier_subscriptions = 4;
}
enum StoreType {
STORE_TYPE_NONE = 0;
STORE_TYPE_CASTS = 1;
STORE_TYPE_LINKS = 2;
STORE_TYPE_REACTIONS = 3;
STORE_TYPE_USER_DATA = 4;
STORE_TYPE_VERIFICATIONS = 5;
STORE_TYPE_USERNAME_PROOFS = 6;
}
enum StorageUnitType {
UNIT_TYPE_LEGACY = 0;
UNIT_TYPE_2024 = 1;
}
message StorageUnitDetails {
StorageUnitType unit_type = 1;
uint32 unit_size = 2;
}
message StorageLimit {
StoreType store_type = 1;
string name = 2;
uint64 limit = 3;
uint64 used = 4;
uint64 earliestTimestamp = 5;
bytes earliestHash = 6;
}
message UsernameProofRequest {
bytes name = 1;
}
message UsernameProofsResponse {
repeated UserNameProof proofs = 1;
}
message VerificationRequest {
uint64 fid = 1;
bytes address = 2;
}
message SignerRequest {
uint64 fid = 1;
bytes signer = 2;
}
message LinkRequest {
uint64 fid = 1;
string link_type = 2;
oneof target {
uint64 target_fid = 3;
}
}
message LinksByFidRequest {
uint64 fid = 1;
optional string link_type = 2;
optional uint32 page_size = 3;
optional bytes page_token = 4;
optional bool reverse = 5;
}
message LinksByTargetRequest {
oneof target {
uint64 target_fid = 1;
}
optional string link_type = 2;
optional uint32 page_size = 3;
optional bytes page_token = 4;
optional bool reverse = 5;
}
message IdRegistryEventByAddressRequest {
bytes address = 1;
}
message ContactInfoResponse {
repeated ContactInfoContentBody contacts = 1;
}
message ValidationResponse {
bool valid = 1;
Message message = 2;
}
message SubmitBulkMessagesRequest {
repeated Message messages = 1;
}
message MessageError {
bytes hash = 1;
string errCode = 2;
string message = 3;
}
message BulkMessageResponse {
oneof response {
Message message = 1;
MessageError message_error = 2;
}
}
message SubmitBulkMessagesResponse {
repeated BulkMessageResponse messages = 1;
}
message StreamSyncRequest {
oneof request {
HubInfoRequest get_info = 1;
Empty get_current_peers = 2;
Empty stop_sync = 3;
SyncStatusRequest force_sync = 4;
SyncStatusRequest get_sync_status = 5;
TrieNodePrefix get_all_sync_ids_by_prefix = 6;
SyncIds get_all_messages_by_sync_ids = 7;
TrieNodePrefix get_sync_metadata_by_prefix = 8;
TrieNodePrefix get_sync_snapshot_by_prefix = 9;
OnChainEventRequest get_on_chain_events = 10;
FidRequest get_on_chain_signers_by_fid = 11;
}
}
message StreamError {
string errCode = 1;
string message = 2;
string request = 3;
}
message StreamSyncResponse {
oneof response {
HubInfoResponse get_info = 1;
ContactInfoResponse get_current_peers = 2;
SyncStatusResponse stop_sync = 3;
SyncStatusResponse force_sync = 4;
SyncStatusResponse get_sync_status = 5;
SyncIds get_all_sync_ids_by_prefix = 6;
MessagesResponse get_all_messages_by_sync_ids = 7;
TrieNodeMetadataResponse get_sync_metadata_by_prefix = 8;
TrieNodeSnapshotResponse get_sync_snapshot_by_prefix = 9;
OnChainEventResponse get_on_chain_events = 10;
OnChainEventResponse get_on_chain_signers_by_fid = 11;
StreamError error = 12;
}
}
message StreamFetchRequest {
string idempotency_key = 1;
oneof request {
FidTimestampRequest cast_messages_by_fid = 2;
FidTimestampRequest reaction_messages_by_fid = 3;
FidTimestampRequest verification_messages_by_fid = 4;
FidTimestampRequest user_data_messages_by_fid = 5;
FidTimestampRequest link_messages_by_fid = 6;
}
}
message StreamFetchResponse {
string idempotency_key = 1;
oneof response {
MessagesResponse messages = 2;
StreamError error = 3;
}
}

View File

@@ -1,128 +0,0 @@
syntax = "proto3";
import "message.proto";
import "hub_event.proto";
import "request_response.proto";
import "username_proof.proto";
import "onchain_event.proto";
// Note about http-api annotations:
// The `httpServer.ts` class implements a HTTP API wrapper on top of this gRPC API.
// The annotations below are used to verify that all the HTTP API endpoints are implemented.
// If you are adding a new RPC method, if there needs to be a corresponding HTTP API endpoint,
// add the annotation to the method. @http-api: none means that there is no corresponding HTTP API
// If there is no annotation, we assume there is a corresponding HTTP API endpoint with the same name as the RPC method
// Please see `httpServer.ts` for more details
service HubService {
// Submit Methods
rpc SubmitMessage(Message) returns (Message);
// Validation Methods
rpc ValidateMessage(Message) returns (ValidationResponse);
// Event Methods
// @http-api: none
rpc Subscribe(SubscribeRequest) returns (stream HubEvent);
// @http-api: events
rpc GetEvent(EventRequest) returns (HubEvent);
// Casts
// @http-api: castById
rpc GetCast(CastId) returns (Message);
rpc GetCastsByFid(FidRequest) returns (MessagesResponse);
rpc GetCastsByParent(CastsByParentRequest) returns (MessagesResponse);
rpc GetCastsByMention(FidRequest) returns (MessagesResponse);
// Reactions
// @http-api: reactionById
rpc GetReaction(ReactionRequest) returns (Message);
rpc GetReactionsByFid(ReactionsByFidRequest) returns (MessagesResponse);
rpc GetReactionsByCast(ReactionsByTargetRequest) returns (MessagesResponse); // To be deprecated
rpc GetReactionsByTarget(ReactionsByTargetRequest) returns (MessagesResponse);
// User Data
// @http-api: none
rpc GetUserData(UserDataRequest) returns (Message);
rpc GetUserDataByFid(FidRequest) returns (MessagesResponse);
// Username Proof
// @http-api: userNameProofByName
rpc GetUsernameProof(UsernameProofRequest) returns (UserNameProof);
rpc GetUserNameProofsByFid(FidRequest) returns (UsernameProofsResponse);
// Verifications
// @http-api: none
rpc GetVerification(VerificationRequest) returns (Message);
rpc GetVerificationsByFid(FidRequest) returns (MessagesResponse);
// OnChain Events
// @http-api: none
rpc GetOnChainSigner(SignerRequest) returns (OnChainEvent);
rpc GetOnChainSignersByFid(FidRequest) returns (OnChainEventResponse);
// @http-api: none
rpc GetOnChainEvents(OnChainEventRequest) returns (OnChainEventResponse);
// @http-api: none
rpc GetIdRegistryOnChainEvent(FidRequest) returns (OnChainEvent);
// @http-api: onChainIdRegistryEventByAddress
rpc GetIdRegistryOnChainEventByAddress(IdRegistryEventByAddressRequest) returns (OnChainEvent);
// @http-api: storageLimitsByFid
rpc GetCurrentStorageLimitsByFid(FidRequest) returns (StorageLimitsResponse);
rpc GetFids(FidsRequest) returns (FidsResponse);
// Links
// @http-api: linkById
rpc GetLink(LinkRequest) returns (Message);
rpc GetLinksByFid(LinksByFidRequest) returns (MessagesResponse);
// @http-api: linksByTargetFid
rpc GetLinksByTarget(LinksByTargetRequest) returns (MessagesResponse);
// Bulk Methods
// The Bulk methods don't have corresponding HTTP API endpoints because the
// regular endpoints can be used to get all the messages
// @http-api: none
rpc GetAllCastMessagesByFid(FidTimestampRequest) returns (MessagesResponse);
// @http-api: none
rpc GetAllReactionMessagesByFid(FidTimestampRequest) returns (MessagesResponse);
// @http-api: none
rpc GetAllVerificationMessagesByFid(FidTimestampRequest) returns (MessagesResponse);
// @http-api: none
rpc GetAllUserDataMessagesByFid(FidTimestampRequest) returns (MessagesResponse);
// @http-api: none
rpc GetAllLinkMessagesByFid(FidTimestampRequest) returns (MessagesResponse);
// @http-api: none
rpc GetLinkCompactStateMessageByFid(FidRequest) returns (MessagesResponse);
// @http-api: none
rpc SubmitBulkMessages(SubmitBulkMessagesRequest) returns (SubmitBulkMessagesResponse);
// Sync Methods
rpc GetInfo(HubInfoRequest) returns (HubInfoResponse);
rpc GetCurrentPeers(Empty) returns (ContactInfoResponse);
// @http-api: none
rpc StopSync(Empty) returns (SyncStatusResponse);
// This is experimental, do not rely on this endpoint existing in the future
// @http-api: none
rpc ForceSync(SyncStatusRequest) returns (SyncStatusResponse);
// @http-api: none
rpc GetSyncStatus(SyncStatusRequest) returns (SyncStatusResponse);
// @http-api: none
rpc GetAllSyncIdsByPrefix(TrieNodePrefix) returns (SyncIds);
// @http-api: none
rpc GetAllMessagesBySyncIds(SyncIds) returns (MessagesResponse);
// @http-api: none
rpc GetSyncMetadataByPrefix(TrieNodePrefix) returns (TrieNodeMetadataResponse);
// @http-api: none
rpc GetSyncSnapshotByPrefix(TrieNodePrefix) returns (TrieNodeSnapshotResponse);
// @http-api: none
rpc StreamSync(stream StreamSyncRequest) returns (stream StreamSyncResponse);
// @http-api: none
rpc StreamFetch(stream StreamFetchRequest) returns (stream StreamFetchResponse);
}
service AdminService {
rpc RebuildSyncTrie(Empty) returns (Empty);
rpc DeleteAllMessagesFromDb(Empty) returns (Empty);
rpc SubmitOnChainEvent(OnChainEvent) returns (OnChainEvent);
}

View File

@@ -1,9 +0,0 @@
syntax = "proto3";
message DbTrieNode {
bytes key = 1;
repeated uint32 childChars = 2;
uint32 items = 3;
bytes hash = 4;
}

View File

@@ -1,17 +0,0 @@
syntax = "proto3";
enum UserNameType {
USERNAME_TYPE_NONE = 0;
USERNAME_TYPE_FNAME = 1;
USERNAME_TYPE_ENS_L1 = 2;
USERNAME_TYPE_BASENAME = 3;
}
message UserNameProof {
uint64 timestamp = 1;
bytes name = 2;
bytes owner = 3;
bytes signature = 4;
uint64 fid = 5;
UserNameType type = 6;
}