diff --git a/package.json b/package.json index a710617..ab8629a 100644 --- a/package.json +++ b/package.json @@ -47,6 +47,8 @@ }, "dependencies": { "@cosmjs/proto-signing": "0.31.3", + "@cosmjs/stargate": "0.32.4", + "@cosmjs/tendermint-rpc": "0.32.4", "base64js": "1.0.1", "cosmjs-types": "0.9.0", "long": "5.2.0", diff --git a/src/codec.ts b/src/codec.ts index 9db513f..f4214a3 100644 --- a/src/codec.ts +++ b/src/codec.ts @@ -2,7 +2,7 @@ import { GeneratedType } from '@cosmjs/proto-signing' import { Writer, Reader } from 'protobufjs' import { Registry } from './registry' import { convertToProtoFactory } from './util' -import { Any } from 'cosmjs-types/google/protobuf/any' +import { Any } from './proto/google/protobuf/any' export type AnyWithUnpacked = | Any diff --git a/src/proto/binary.ts b/src/proto/binary.ts index 9cb4c41..10a8f40 100644 --- a/src/proto/binary.ts +++ b/src/proto/binary.ts @@ -55,7 +55,7 @@ import { writeByte, zzDecode, zzEncode -} from './varint' +} from './stridejs/codegen/varint' export enum WireType { Varint = 0, diff --git a/src/proto/google/bundle.ts b/src/proto/google/bundle.ts new file mode 100644 index 0000000..8a053b9 --- /dev/null +++ b/src/proto/google/bundle.ts @@ -0,0 +1,13 @@ +/* eslint-disable @typescript-eslint/no-namespace */ +import * as _80 from './protobuf/descriptor' +import * as _81 from './protobuf/any' +import * as _82 from './protobuf/timestamp' +import * as _83 from './protobuf/duration' +export namespace google { + export const protobuf = { + ..._80, + ..._81, + ..._82, + ..._83 + } +} diff --git a/src/proto/google/protobuf/any.ts b/src/proto/google/protobuf/any.ts new file mode 100644 index 0000000..da6ff31 --- /dev/null +++ b/src/proto/google/protobuf/any.ts @@ -0,0 +1,403 @@ +/* eslint-disable prefer-const */ +import { BinaryReader, BinaryWriter } from '../../binary' +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := ptypes.MarshalAny(foo) + * ... + * foo := &pb.Foo{} + * if err := ptypes.UnmarshalAny(any, foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface Any { + $typeUrl?: '/google.protobuf.Any' | string + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + typeUrl: string + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: Uint8Array +} +export interface AnyProtoMsg { + typeUrl: '/google.protobuf.Any' + value: Uint8Array +} +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := ptypes.MarshalAny(foo) + * ... + * foo := &pb.Foo{} + * if err := ptypes.UnmarshalAny(any, foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface AnyAmino { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + type: string + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: any +} +export interface AnyAminoMsg { + type: string + value: AnyAmino +} +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := ptypes.MarshalAny(foo) + * ... + * foo := &pb.Foo{} + * if err := ptypes.UnmarshalAny(any, foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface AnySDKType { + $typeUrl?: '/google.protobuf.Any' | string + type_url: string + value: Uint8Array +} +function createBaseAny(): Any { + return { + $typeUrl: '/google.protobuf.Any', + typeUrl: '', + value: new Uint8Array() + } +} +export const Any = { + typeUrl: '/google.protobuf.Any', + encode( + message: Any, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.typeUrl !== '') { + writer.uint32(10).string(message.typeUrl) + } + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value) + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): Any { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseAny() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.typeUrl = reader.string() + break + case 2: + message.value = reader.bytes() + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): Any { + const message = createBaseAny() + message.typeUrl = object.typeUrl ?? '' + message.value = object.value ?? new Uint8Array() + return message + }, + fromAmino(object: AnyAmino): Any { + return { + typeUrl: object.type, + value: object.value + } + }, + toAmino(message: Any): AnyAmino { + const obj: any = {} + obj.type = message.typeUrl + obj.value = message.value + return obj + }, + fromAminoMsg(object: AnyAminoMsg): Any { + return Any.fromAmino(object.value) + }, + fromProtoMsg(message: AnyProtoMsg): Any { + return Any.decode(message.value) + }, + toProto(message: Any): Uint8Array { + return Any.encode(message).finish() + }, + toProtoMsg(message: Any): AnyProtoMsg { + return { + typeUrl: '/google.protobuf.Any', + value: Any.encode(message).finish() + } + } +} diff --git a/src/proto/google/protobuf/descriptor.ts b/src/proto/google/protobuf/descriptor.ts new file mode 100644 index 0000000..e913326 --- /dev/null +++ b/src/proto/google/protobuf/descriptor.ts @@ -0,0 +1,5873 @@ +/* eslint-disable prefer-const */ +import { BinaryReader, BinaryWriter } from '../../binary' +import { bytesFromBase64, base64FromBytes } from '../../helpers' +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1 +} +export const FieldDescriptorProto_TypeSDKType = FieldDescriptorProto_Type +export const FieldDescriptorProto_TypeAmino = FieldDescriptorProto_Type +export function fieldDescriptorProto_TypeFromJSON( + object: any +): FieldDescriptorProto_Type { + switch (object) { + case 1: + case 'TYPE_DOUBLE': + return FieldDescriptorProto_Type.TYPE_DOUBLE + case 2: + case 'TYPE_FLOAT': + return FieldDescriptorProto_Type.TYPE_FLOAT + case 3: + case 'TYPE_INT64': + return FieldDescriptorProto_Type.TYPE_INT64 + case 4: + case 'TYPE_UINT64': + return FieldDescriptorProto_Type.TYPE_UINT64 + case 5: + case 'TYPE_INT32': + return FieldDescriptorProto_Type.TYPE_INT32 + case 6: + case 'TYPE_FIXED64': + return FieldDescriptorProto_Type.TYPE_FIXED64 + case 7: + case 'TYPE_FIXED32': + return FieldDescriptorProto_Type.TYPE_FIXED32 + case 8: + case 'TYPE_BOOL': + return FieldDescriptorProto_Type.TYPE_BOOL + case 9: + case 'TYPE_STRING': + return FieldDescriptorProto_Type.TYPE_STRING + case 10: + case 'TYPE_GROUP': + return FieldDescriptorProto_Type.TYPE_GROUP + case 11: + case 'TYPE_MESSAGE': + return FieldDescriptorProto_Type.TYPE_MESSAGE + case 12: + case 'TYPE_BYTES': + return FieldDescriptorProto_Type.TYPE_BYTES + case 13: + case 'TYPE_UINT32': + return FieldDescriptorProto_Type.TYPE_UINT32 + case 14: + case 'TYPE_ENUM': + return FieldDescriptorProto_Type.TYPE_ENUM + case 15: + case 'TYPE_SFIXED32': + return FieldDescriptorProto_Type.TYPE_SFIXED32 + case 16: + case 'TYPE_SFIXED64': + return FieldDescriptorProto_Type.TYPE_SFIXED64 + case 17: + case 'TYPE_SINT32': + return FieldDescriptorProto_Type.TYPE_SINT32 + case 18: + case 'TYPE_SINT64': + return FieldDescriptorProto_Type.TYPE_SINT64 + case -1: + case 'UNRECOGNIZED': + default: + return FieldDescriptorProto_Type.UNRECOGNIZED + } +} +export function fieldDescriptorProto_TypeToJSON( + object: FieldDescriptorProto_Type +): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return 'TYPE_DOUBLE' + case FieldDescriptorProto_Type.TYPE_FLOAT: + return 'TYPE_FLOAT' + case FieldDescriptorProto_Type.TYPE_INT64: + return 'TYPE_INT64' + case FieldDescriptorProto_Type.TYPE_UINT64: + return 'TYPE_UINT64' + case FieldDescriptorProto_Type.TYPE_INT32: + return 'TYPE_INT32' + case FieldDescriptorProto_Type.TYPE_FIXED64: + return 'TYPE_FIXED64' + case FieldDescriptorProto_Type.TYPE_FIXED32: + return 'TYPE_FIXED32' + case FieldDescriptorProto_Type.TYPE_BOOL: + return 'TYPE_BOOL' + case FieldDescriptorProto_Type.TYPE_STRING: + return 'TYPE_STRING' + case FieldDescriptorProto_Type.TYPE_GROUP: + return 'TYPE_GROUP' + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return 'TYPE_MESSAGE' + case FieldDescriptorProto_Type.TYPE_BYTES: + return 'TYPE_BYTES' + case FieldDescriptorProto_Type.TYPE_UINT32: + return 'TYPE_UINT32' + case FieldDescriptorProto_Type.TYPE_ENUM: + return 'TYPE_ENUM' + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return 'TYPE_SFIXED32' + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return 'TYPE_SFIXED64' + case FieldDescriptorProto_Type.TYPE_SINT32: + return 'TYPE_SINT32' + case FieldDescriptorProto_Type.TYPE_SINT64: + return 'TYPE_SINT64' + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return 'UNRECOGNIZED' + } +} +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1 +} +export const FieldDescriptorProto_LabelSDKType = FieldDescriptorProto_Label +export const FieldDescriptorProto_LabelAmino = FieldDescriptorProto_Label +export function fieldDescriptorProto_LabelFromJSON( + object: any +): FieldDescriptorProto_Label { + switch (object) { + case 1: + case 'LABEL_OPTIONAL': + return FieldDescriptorProto_Label.LABEL_OPTIONAL + case 2: + case 'LABEL_REQUIRED': + return FieldDescriptorProto_Label.LABEL_REQUIRED + case 3: + case 'LABEL_REPEATED': + return FieldDescriptorProto_Label.LABEL_REPEATED + case -1: + case 'UNRECOGNIZED': + default: + return FieldDescriptorProto_Label.UNRECOGNIZED + } +} +export function fieldDescriptorProto_LabelToJSON( + object: FieldDescriptorProto_Label +): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return 'LABEL_OPTIONAL' + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return 'LABEL_REQUIRED' + case FieldDescriptorProto_Label.LABEL_REPEATED: + return 'LABEL_REPEATED' + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return 'UNRECOGNIZED' + } +} +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1 +} +export const FileOptions_OptimizeModeSDKType = FileOptions_OptimizeMode +export const FileOptions_OptimizeModeAmino = FileOptions_OptimizeMode +export function fileOptions_OptimizeModeFromJSON( + object: any +): FileOptions_OptimizeMode { + switch (object) { + case 1: + case 'SPEED': + return FileOptions_OptimizeMode.SPEED + case 2: + case 'CODE_SIZE': + return FileOptions_OptimizeMode.CODE_SIZE + case 3: + case 'LITE_RUNTIME': + return FileOptions_OptimizeMode.LITE_RUNTIME + case -1: + case 'UNRECOGNIZED': + default: + return FileOptions_OptimizeMode.UNRECOGNIZED + } +} +export function fileOptions_OptimizeModeToJSON( + object: FileOptions_OptimizeMode +): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return 'SPEED' + case FileOptions_OptimizeMode.CODE_SIZE: + return 'CODE_SIZE' + case FileOptions_OptimizeMode.LITE_RUNTIME: + return 'LITE_RUNTIME' + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return 'UNRECOGNIZED' + } +} +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1 +} +export const FieldOptions_CTypeSDKType = FieldOptions_CType +export const FieldOptions_CTypeAmino = FieldOptions_CType +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case 'STRING': + return FieldOptions_CType.STRING + case 1: + case 'CORD': + return FieldOptions_CType.CORD + case 2: + case 'STRING_PIECE': + return FieldOptions_CType.STRING_PIECE + case -1: + case 'UNRECOGNIZED': + default: + return FieldOptions_CType.UNRECOGNIZED + } +} +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return 'STRING' + case FieldOptions_CType.CORD: + return 'CORD' + case FieldOptions_CType.STRING_PIECE: + return 'STRING_PIECE' + case FieldOptions_CType.UNRECOGNIZED: + default: + return 'UNRECOGNIZED' + } +} +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1 +} +export const FieldOptions_JSTypeSDKType = FieldOptions_JSType +export const FieldOptions_JSTypeAmino = FieldOptions_JSType +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case 'JS_NORMAL': + return FieldOptions_JSType.JS_NORMAL + case 1: + case 'JS_STRING': + return FieldOptions_JSType.JS_STRING + case 2: + case 'JS_NUMBER': + return FieldOptions_JSType.JS_NUMBER + case -1: + case 'UNRECOGNIZED': + default: + return FieldOptions_JSType.UNRECOGNIZED + } +} +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return 'JS_NORMAL' + case FieldOptions_JSType.JS_STRING: + return 'JS_STRING' + case FieldOptions_JSType.JS_NUMBER: + return 'JS_NUMBER' + case FieldOptions_JSType.UNRECOGNIZED: + default: + return 'UNRECOGNIZED' + } +} +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1 +} +export const MethodOptions_IdempotencyLevelSDKType = + MethodOptions_IdempotencyLevel +export const MethodOptions_IdempotencyLevelAmino = + MethodOptions_IdempotencyLevel +export function methodOptions_IdempotencyLevelFromJSON( + object: any +): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case 'IDEMPOTENCY_UNKNOWN': + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN + case 1: + case 'NO_SIDE_EFFECTS': + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS + case 2: + case 'IDEMPOTENT': + return MethodOptions_IdempotencyLevel.IDEMPOTENT + case -1: + case 'UNRECOGNIZED': + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED + } +} +export function methodOptions_IdempotencyLevelToJSON( + object: MethodOptions_IdempotencyLevel +): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return 'IDEMPOTENCY_UNKNOWN' + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return 'NO_SIDE_EFFECTS' + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return 'IDEMPOTENT' + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return 'UNRECOGNIZED' + } +} +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[] +} +export interface FileDescriptorSetProtoMsg { + typeUrl: '/google.protobuf.FileDescriptorSet' + value: Uint8Array +} +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSetAmino { + file?: FileDescriptorProtoAmino[] +} +export interface FileDescriptorSetAminoMsg { + type: '/google.protobuf.FileDescriptorSet' + value: FileDescriptorSetAmino +} +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSetSDKType { + file: FileDescriptorProtoSDKType[] +} +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string + /** e.g. "foo", "foo.bar", etc. */ + package: string + /** Names of files imported by this file. */ + dependency: string[] + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[] + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[] + /** All top-level definitions in this file. */ + messageType: DescriptorProto[] + enumType: EnumDescriptorProto[] + service: ServiceDescriptorProto[] + extension: FieldDescriptorProto[] + options?: FileOptions + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo?: SourceCodeInfo + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string +} +export interface FileDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.FileDescriptorProto' + value: Uint8Array +} +/** Describes a complete .proto file. */ +export interface FileDescriptorProtoAmino { + /** file name, relative to root of source tree */ + name?: string + /** e.g. "foo", "foo.bar", etc. */ + package?: string + /** Names of files imported by this file. */ + dependency?: string[] + /** Indexes of the public imported files in the dependency list above. */ + public_dependency?: number[] + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weak_dependency?: number[] + /** All top-level definitions in this file. */ + message_type?: DescriptorProtoAmino[] + enum_type?: EnumDescriptorProtoAmino[] + service?: ServiceDescriptorProtoAmino[] + extension?: FieldDescriptorProtoAmino[] + options?: FileOptionsAmino + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + source_code_info?: SourceCodeInfoAmino + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax?: string +} +export interface FileDescriptorProtoAminoMsg { + type: '/google.protobuf.FileDescriptorProto' + value: FileDescriptorProtoAmino +} +/** Describes a complete .proto file. */ +export interface FileDescriptorProtoSDKType { + name: string + package: string + dependency: string[] + public_dependency: number[] + weak_dependency: number[] + message_type: DescriptorProtoSDKType[] + enum_type: EnumDescriptorProtoSDKType[] + service: ServiceDescriptorProtoSDKType[] + extension: FieldDescriptorProtoSDKType[] + options?: FileOptionsSDKType + source_code_info?: SourceCodeInfoSDKType + syntax: string +} +/** Describes a message type. */ +export interface DescriptorProto { + name: string + field: FieldDescriptorProto[] + extension: FieldDescriptorProto[] + nestedType: DescriptorProto[] + enumType: EnumDescriptorProto[] + extensionRange: DescriptorProto_ExtensionRange[] + oneofDecl: OneofDescriptorProto[] + options?: MessageOptions + reservedRange: DescriptorProto_ReservedRange[] + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[] +} +export interface DescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.DescriptorProto' + value: Uint8Array +} +/** Describes a message type. */ +export interface DescriptorProtoAmino { + name?: string + field?: FieldDescriptorProtoAmino[] + extension?: FieldDescriptorProtoAmino[] + nested_type?: DescriptorProtoAmino[] + enum_type?: EnumDescriptorProtoAmino[] + extension_range?: DescriptorProto_ExtensionRangeAmino[] + oneof_decl?: OneofDescriptorProtoAmino[] + options?: MessageOptionsAmino + reserved_range?: DescriptorProto_ReservedRangeAmino[] + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reserved_name?: string[] +} +export interface DescriptorProtoAminoMsg { + type: '/google.protobuf.DescriptorProto' + value: DescriptorProtoAmino +} +/** Describes a message type. */ +export interface DescriptorProtoSDKType { + name: string + field: FieldDescriptorProtoSDKType[] + extension: FieldDescriptorProtoSDKType[] + nested_type: DescriptorProtoSDKType[] + enum_type: EnumDescriptorProtoSDKType[] + extension_range: DescriptorProto_ExtensionRangeSDKType[] + oneof_decl: OneofDescriptorProtoSDKType[] + options?: MessageOptionsSDKType + reserved_range: DescriptorProto_ReservedRangeSDKType[] + reserved_name: string[] +} +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number + /** Exclusive. */ + end: number + options?: ExtensionRangeOptions +} +export interface DescriptorProto_ExtensionRangeProtoMsg { + typeUrl: '/google.protobuf.ExtensionRange' + value: Uint8Array +} +export interface DescriptorProto_ExtensionRangeAmino { + /** Inclusive. */ + start?: number + /** Exclusive. */ + end?: number + options?: ExtensionRangeOptionsAmino +} +export interface DescriptorProto_ExtensionRangeAminoMsg { + type: '/google.protobuf.ExtensionRange' + value: DescriptorProto_ExtensionRangeAmino +} +export interface DescriptorProto_ExtensionRangeSDKType { + start: number + end: number + options?: ExtensionRangeOptionsSDKType +} +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number + /** Exclusive. */ + end: number +} +export interface DescriptorProto_ReservedRangeProtoMsg { + typeUrl: '/google.protobuf.ReservedRange' + value: Uint8Array +} +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRangeAmino { + /** Inclusive. */ + start?: number + /** Exclusive. */ + end?: number +} +export interface DescriptorProto_ReservedRangeAminoMsg { + type: '/google.protobuf.ReservedRange' + value: DescriptorProto_ReservedRangeAmino +} +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRangeSDKType { + start: number + end: number +} +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[] +} +export interface ExtensionRangeOptionsProtoMsg { + typeUrl: '/google.protobuf.ExtensionRangeOptions' + value: Uint8Array +} +export interface ExtensionRangeOptionsAmino { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[] +} +export interface ExtensionRangeOptionsAminoMsg { + type: '/google.protobuf.ExtensionRangeOptions' + value: ExtensionRangeOptionsAmino +} +export interface ExtensionRangeOptionsSDKType { + uninterpreted_option: UninterpretedOptionSDKType[] +} +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string + number: number + label: FieldDescriptorProto_Label + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string + options?: FieldOptions +} +export interface FieldDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.FieldDescriptorProto' + value: Uint8Array +} +/** Describes a field within a message. */ +export interface FieldDescriptorProtoAmino { + name?: string + number?: number + label?: FieldDescriptorProto_Label + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type?: FieldDescriptorProto_Type + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + type_name?: string + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee?: string + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + default_value?: string + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneof_index?: number + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + json_name?: string + options?: FieldOptionsAmino +} +export interface FieldDescriptorProtoAminoMsg { + type: '/google.protobuf.FieldDescriptorProto' + value: FieldDescriptorProtoAmino +} +/** Describes a field within a message. */ +export interface FieldDescriptorProtoSDKType { + name: string + number: number + label: FieldDescriptorProto_Label + type: FieldDescriptorProto_Type + type_name: string + extendee: string + default_value: string + oneof_index: number + json_name: string + options?: FieldOptionsSDKType +} +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string + options?: OneofOptions +} +export interface OneofDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.OneofDescriptorProto' + value: Uint8Array +} +/** Describes a oneof. */ +export interface OneofDescriptorProtoAmino { + name?: string + options?: OneofOptionsAmino +} +export interface OneofDescriptorProtoAminoMsg { + type: '/google.protobuf.OneofDescriptorProto' + value: OneofDescriptorProtoAmino +} +/** Describes a oneof. */ +export interface OneofDescriptorProtoSDKType { + name: string + options?: OneofOptionsSDKType +} +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string + value: EnumValueDescriptorProto[] + options?: EnumOptions + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[] + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[] +} +export interface EnumDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.EnumDescriptorProto' + value: Uint8Array +} +/** Describes an enum type. */ +export interface EnumDescriptorProtoAmino { + name?: string + value?: EnumValueDescriptorProtoAmino[] + options?: EnumOptionsAmino + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reserved_range?: EnumDescriptorProto_EnumReservedRangeAmino[] + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reserved_name?: string[] +} +export interface EnumDescriptorProtoAminoMsg { + type: '/google.protobuf.EnumDescriptorProto' + value: EnumDescriptorProtoAmino +} +/** Describes an enum type. */ +export interface EnumDescriptorProtoSDKType { + name: string + value: EnumValueDescriptorProtoSDKType[] + options?: EnumOptionsSDKType + reserved_range: EnumDescriptorProto_EnumReservedRangeSDKType[] + reserved_name: string[] +} +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number + /** Inclusive. */ + end: number +} +export interface EnumDescriptorProto_EnumReservedRangeProtoMsg { + typeUrl: '/google.protobuf.EnumReservedRange' + value: Uint8Array +} +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRangeAmino { + /** Inclusive. */ + start?: number + /** Inclusive. */ + end?: number +} +export interface EnumDescriptorProto_EnumReservedRangeAminoMsg { + type: '/google.protobuf.EnumReservedRange' + value: EnumDescriptorProto_EnumReservedRangeAmino +} +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRangeSDKType { + start: number + end: number +} +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string + number: number + options?: EnumValueOptions +} +export interface EnumValueDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.EnumValueDescriptorProto' + value: Uint8Array +} +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProtoAmino { + name?: string + number?: number + options?: EnumValueOptionsAmino +} +export interface EnumValueDescriptorProtoAminoMsg { + type: '/google.protobuf.EnumValueDescriptorProto' + value: EnumValueDescriptorProtoAmino +} +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProtoSDKType { + name: string + number: number + options?: EnumValueOptionsSDKType +} +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string + method: MethodDescriptorProto[] + options?: ServiceOptions +} +export interface ServiceDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.ServiceDescriptorProto' + value: Uint8Array +} +/** Describes a service. */ +export interface ServiceDescriptorProtoAmino { + name?: string + method?: MethodDescriptorProtoAmino[] + options?: ServiceOptionsAmino +} +export interface ServiceDescriptorProtoAminoMsg { + type: '/google.protobuf.ServiceDescriptorProto' + value: ServiceDescriptorProtoAmino +} +/** Describes a service. */ +export interface ServiceDescriptorProtoSDKType { + name: string + method: MethodDescriptorProtoSDKType[] + options?: ServiceOptionsSDKType +} +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string + outputType: string + options?: MethodOptions + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean +} +export interface MethodDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.MethodDescriptorProto' + value: Uint8Array +} +/** Describes a method of a service. */ +export interface MethodDescriptorProtoAmino { + name?: string + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + input_type?: string + output_type?: string + options?: MethodOptionsAmino + /** Identifies if client streams multiple client messages */ + client_streaming?: boolean + /** Identifies if server streams multiple server messages */ + server_streaming?: boolean +} +export interface MethodDescriptorProtoAminoMsg { + type: '/google.protobuf.MethodDescriptorProto' + value: MethodDescriptorProtoAmino +} +/** Describes a method of a service. */ +export interface MethodDescriptorProtoSDKType { + name: string + input_type: string + output_type: string + options?: MethodOptionsSDKType + client_streaming: boolean + server_streaming: boolean +} +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string + /** + * If set, all the classes from the .proto file are wrapped in a single + * outer class with the given name. This applies to both Proto1 + * (equivalent to the old "--one_java_file" option) and Proto2 (where + * a .proto always translates to a single class, but you may want to + * explicitly choose the class name). + */ + javaOuterClassname: string + /** + * If set true, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the outer class + * named by java_outer_classname. However, the outer class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean + /** This option does nothing. */ + /** @deprecated */ + javaGenerateEqualsAndHash: boolean + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean + optimizeFor: FileOptions_OptimizeMode + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean + javaGenericServices: boolean + pyGenericServices: boolean + phpGenericServices: boolean + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[] +} +export interface FileOptionsProtoMsg { + typeUrl: '/google.protobuf.FileOptions' + value: Uint8Array +} +export interface FileOptionsAmino { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + java_package?: string + /** + * If set, all the classes from the .proto file are wrapped in a single + * outer class with the given name. This applies to both Proto1 + * (equivalent to the old "--one_java_file" option) and Proto2 (where + * a .proto always translates to a single class, but you may want to + * explicitly choose the class name). + */ + java_outer_classname?: string + /** + * If set true, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the outer class + * named by java_outer_classname. However, the outer class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + java_multiple_files?: boolean + /** This option does nothing. */ + /** @deprecated */ + java_generate_equals_and_hash?: boolean + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + java_string_check_utf8?: boolean + optimize_for?: FileOptions_OptimizeMode + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + go_package?: string + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + cc_generic_services?: boolean + java_generic_services?: boolean + py_generic_services?: boolean + php_generic_services?: boolean + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated?: boolean + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + cc_enable_arenas?: boolean + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objc_class_prefix?: string + /** Namespace for generated classes; defaults to the package. */ + csharp_namespace?: string + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swift_prefix?: string + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + php_class_prefix?: string + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + php_namespace?: string + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + php_metadata_namespace?: string + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + ruby_package?: string + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpreted_option?: UninterpretedOptionAmino[] +} +export interface FileOptionsAminoMsg { + type: '/google.protobuf.FileOptions' + value: FileOptionsAmino +} +export interface FileOptionsSDKType { + java_package: string + java_outer_classname: string + java_multiple_files: boolean + /** @deprecated */ + java_generate_equals_and_hash: boolean + java_string_check_utf8: boolean + optimize_for: FileOptions_OptimizeMode + go_package: string + cc_generic_services: boolean + java_generic_services: boolean + py_generic_services: boolean + php_generic_services: boolean + deprecated: boolean + cc_enable_arenas: boolean + objc_class_prefix: string + csharp_namespace: string + swift_prefix: string + php_class_prefix: string + php_namespace: string + php_metadata_namespace: string + ruby_package: string + uninterpreted_option: UninterpretedOptionSDKType[] +} +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[] +} +export interface MessageOptionsProtoMsg { + typeUrl: '/google.protobuf.MessageOptions' + value: Uint8Array +} +export interface MessageOptionsAmino { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + message_set_wire_format?: boolean + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + no_standard_descriptor_accessor?: boolean + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated?: boolean + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + map_entry?: boolean + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[] +} +export interface MessageOptionsAminoMsg { + type: '/google.protobuf.MessageOptions' + value: MessageOptionsAmino +} +export interface MessageOptionsSDKType { + message_set_wire_format: boolean + no_standard_descriptor_accessor: boolean + deprecated: boolean + map_entry: boolean + uninterpreted_option: UninterpretedOptionSDKType[] +} +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean + /** For Google-internal migration only. Do not use. */ + weak: boolean + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[] +} +export interface FieldOptionsProtoMsg { + typeUrl: '/google.protobuf.FieldOptions' + value: Uint8Array +} +export interface FieldOptionsAmino { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype?: FieldOptions_CType + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed?: boolean + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype?: FieldOptions_JSType + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy?: boolean + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated?: boolean + /** For Google-internal migration only. Do not use. */ + weak?: boolean + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[] +} +export interface FieldOptionsAminoMsg { + type: '/google.protobuf.FieldOptions' + value: FieldOptionsAmino +} +export interface FieldOptionsSDKType { + ctype: FieldOptions_CType + packed: boolean + jstype: FieldOptions_JSType + lazy: boolean + deprecated: boolean + weak: boolean + uninterpreted_option: UninterpretedOptionSDKType[] +} +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[] +} +export interface OneofOptionsProtoMsg { + typeUrl: '/google.protobuf.OneofOptions' + value: Uint8Array +} +export interface OneofOptionsAmino { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[] +} +export interface OneofOptionsAminoMsg { + type: '/google.protobuf.OneofOptions' + value: OneofOptionsAmino +} +export interface OneofOptionsSDKType { + uninterpreted_option: UninterpretedOptionSDKType[] +} +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[] +} +export interface EnumOptionsProtoMsg { + typeUrl: '/google.protobuf.EnumOptions' + value: Uint8Array +} +export interface EnumOptionsAmino { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allow_alias?: boolean + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated?: boolean + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[] +} +export interface EnumOptionsAminoMsg { + type: '/google.protobuf.EnumOptions' + value: EnumOptionsAmino +} +export interface EnumOptionsSDKType { + allow_alias: boolean + deprecated: boolean + uninterpreted_option: UninterpretedOptionSDKType[] +} +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[] +} +export interface EnumValueOptionsProtoMsg { + typeUrl: '/google.protobuf.EnumValueOptions' + value: Uint8Array +} +export interface EnumValueOptionsAmino { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated?: boolean + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[] +} +export interface EnumValueOptionsAminoMsg { + type: '/google.protobuf.EnumValueOptions' + value: EnumValueOptionsAmino +} +export interface EnumValueOptionsSDKType { + deprecated: boolean + uninterpreted_option: UninterpretedOptionSDKType[] +} +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[] +} +export interface ServiceOptionsProtoMsg { + typeUrl: '/google.protobuf.ServiceOptions' + value: Uint8Array +} +export interface ServiceOptionsAmino { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated?: boolean + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[] +} +export interface ServiceOptionsAminoMsg { + type: '/google.protobuf.ServiceOptions' + value: ServiceOptionsAmino +} +export interface ServiceOptionsSDKType { + deprecated: boolean + uninterpreted_option: UninterpretedOptionSDKType[] +} +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean + idempotencyLevel: MethodOptions_IdempotencyLevel + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[] +} +export interface MethodOptionsProtoMsg { + typeUrl: '/google.protobuf.MethodOptions' + value: Uint8Array +} +export interface MethodOptionsAmino { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated?: boolean + idempotency_level?: MethodOptions_IdempotencyLevel + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[] +} +export interface MethodOptionsAminoMsg { + type: '/google.protobuf.MethodOptions' + value: MethodOptionsAmino +} +export interface MethodOptionsSDKType { + deprecated: boolean + idempotency_level: MethodOptions_IdempotencyLevel + uninterpreted_option: UninterpretedOptionSDKType[] +} +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[] + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string + positiveIntValue: bigint + negativeIntValue: bigint + doubleValue: number + stringValue: Uint8Array + aggregateValue: string +} +export interface UninterpretedOptionProtoMsg { + typeUrl: '/google.protobuf.UninterpretedOption' + value: Uint8Array +} +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOptionAmino { + name?: UninterpretedOption_NamePartAmino[] + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifier_value?: string + positive_int_value?: string + negative_int_value?: string + double_value?: number + string_value?: string + aggregate_value?: string +} +export interface UninterpretedOptionAminoMsg { + type: '/google.protobuf.UninterpretedOption' + value: UninterpretedOptionAmino +} +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOptionSDKType { + name: UninterpretedOption_NamePartSDKType[] + identifier_value: string + positive_int_value: bigint + negative_int_value: bigint + double_value: number + string_value: Uint8Array + aggregate_value: string +} +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string + isExtension: boolean +} +export interface UninterpretedOption_NamePartProtoMsg { + typeUrl: '/google.protobuf.NamePart' + value: Uint8Array +} +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePartAmino { + name_part?: string + is_extension?: boolean +} +export interface UninterpretedOption_NamePartAminoMsg { + type: '/google.protobuf.NamePart' + value: UninterpretedOption_NamePartAmino +} +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePartSDKType { + name_part: string + is_extension: boolean +} +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[] +} +export interface SourceCodeInfoProtoMsg { + typeUrl: '/google.protobuf.SourceCodeInfo' + value: Uint8Array +} +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfoAmino { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location?: SourceCodeInfo_LocationAmino[] +} +export interface SourceCodeInfoAminoMsg { + type: '/google.protobuf.SourceCodeInfo' + value: SourceCodeInfoAmino +} +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfoSDKType { + location: SourceCodeInfo_LocationSDKType[] +} +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[] + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[] + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. *\/ + * /* Block comment attached to + * * grault. *\/ + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string + trailingComments: string + leadingDetachedComments: string[] +} +export interface SourceCodeInfo_LocationProtoMsg { + typeUrl: '/google.protobuf.Location' + value: Uint8Array +} +export interface SourceCodeInfo_LocationAmino { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path?: number[] + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span?: number[] + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. *\/ + * /* Block comment attached to + * * grault. *\/ + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leading_comments?: string + trailing_comments?: string + leading_detached_comments?: string[] +} +export interface SourceCodeInfo_LocationAminoMsg { + type: '/google.protobuf.Location' + value: SourceCodeInfo_LocationAmino +} +export interface SourceCodeInfo_LocationSDKType { + path: number[] + span: number[] + leading_comments: string + trailing_comments: string + leading_detached_comments: string[] +} +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[] +} +export interface GeneratedCodeInfoProtoMsg { + typeUrl: '/google.protobuf.GeneratedCodeInfo' + value: Uint8Array +} +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfoAmino { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation?: GeneratedCodeInfo_AnnotationAmino[] +} +export interface GeneratedCodeInfoAminoMsg { + type: '/google.protobuf.GeneratedCodeInfo' + value: GeneratedCodeInfoAmino +} +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfoSDKType { + annotation: GeneratedCodeInfo_AnnotationSDKType[] +} +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[] + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number +} +export interface GeneratedCodeInfo_AnnotationProtoMsg { + typeUrl: '/google.protobuf.Annotation' + value: Uint8Array +} +export interface GeneratedCodeInfo_AnnotationAmino { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path?: number[] + /** Identifies the filesystem path to the original source .proto. */ + source_file?: string + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin?: number + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end?: number +} +export interface GeneratedCodeInfo_AnnotationAminoMsg { + type: '/google.protobuf.Annotation' + value: GeneratedCodeInfo_AnnotationAmino +} +export interface GeneratedCodeInfo_AnnotationSDKType { + path: number[] + source_file: string + begin: number + end: number +} +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { + file: [] + } +} +export const FileDescriptorSet = { + typeUrl: '/google.protobuf.FileDescriptorSet', + encode( + message: FileDescriptorSet, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim() + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): FileDescriptorSet { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseFileDescriptorSet() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): FileDescriptorSet { + const message = createBaseFileDescriptorSet() + message.file = + object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || [] + return message + }, + fromAmino(object: FileDescriptorSetAmino): FileDescriptorSet { + const message = createBaseFileDescriptorSet() + message.file = + object.file?.map((e) => FileDescriptorProto.fromAmino(e)) || [] + return message + }, + toAmino(message: FileDescriptorSet): FileDescriptorSetAmino { + const obj: any = {} + if (message.file) { + obj.file = message.file.map((e) => + e ? FileDescriptorProto.toAmino(e) : undefined + ) + } else { + obj.file = message.file + } + return obj + }, + fromAminoMsg(object: FileDescriptorSetAminoMsg): FileDescriptorSet { + return FileDescriptorSet.fromAmino(object.value) + }, + fromProtoMsg(message: FileDescriptorSetProtoMsg): FileDescriptorSet { + return FileDescriptorSet.decode(message.value) + }, + toProto(message: FileDescriptorSet): Uint8Array { + return FileDescriptorSet.encode(message).finish() + }, + toProtoMsg(message: FileDescriptorSet): FileDescriptorSetProtoMsg { + return { + typeUrl: '/google.protobuf.FileDescriptorSet', + value: FileDescriptorSet.encode(message).finish() + } + } +} +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: '', + package: '', + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: '' + } +} +export const FileDescriptorProto = { + typeUrl: '/google.protobuf.FileDescriptorProto', + encode( + message: FileDescriptorProto, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name) + } + if (message.package !== '') { + writer.uint32(18).string(message.package) + } + for (const v of message.dependency) { + writer.uint32(26).string(v!) + } + writer.uint32(82).fork() + for (const v of message.publicDependency) { + writer.int32(v) + } + writer.ldelim() + writer.uint32(90).fork() + for (const v of message.weakDependency) { + writer.int32(v) + } + writer.ldelim() + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim() + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim() + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim() + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim() + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim() + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode( + message.sourceCodeInfo, + writer.uint32(74).fork() + ).ldelim() + } + if (message.syntax !== '') { + writer.uint32(98).string(message.syntax) + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): FileDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseFileDescriptorProto() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.name = reader.string() + break + case 2: + message.package = reader.string() + break + case 3: + message.dependency.push(reader.string()) + break + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()) + } + } else { + message.publicDependency.push(reader.int32()) + } + break + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()) + } + } else { + message.weakDependency.push(reader.int32()) + } + break + case 4: + message.messageType.push( + DescriptorProto.decode(reader, reader.uint32()) + ) + break + case 5: + message.enumType.push( + EnumDescriptorProto.decode(reader, reader.uint32()) + ) + break + case 6: + message.service.push( + ServiceDescriptorProto.decode(reader, reader.uint32()) + ) + break + case 7: + message.extension.push( + FieldDescriptorProto.decode(reader, reader.uint32()) + ) + break + case 8: + message.options = FileOptions.decode(reader, reader.uint32()) + break + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode( + reader, + reader.uint32() + ) + break + case 12: + message.syntax = reader.string() + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): FileDescriptorProto { + const message = createBaseFileDescriptorProto() + message.name = object.name ?? '' + message.package = object.package ?? '' + message.dependency = object.dependency?.map((e) => e) || [] + message.publicDependency = object.publicDependency?.map((e) => e) || [] + message.weakDependency = object.weakDependency?.map((e) => e) || [] + message.messageType = + object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || [] + message.enumType = + object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || [] + message.service = + object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || [] + message.extension = + object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || [] + message.options = + object.options !== undefined && object.options !== null + ? FileOptions.fromPartial(object.options) + : undefined + message.sourceCodeInfo = + object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined + message.syntax = object.syntax ?? '' + return message + }, + fromAmino(object: FileDescriptorProtoAmino): FileDescriptorProto { + const message = createBaseFileDescriptorProto() + if (object.name !== undefined && object.name !== null) { + message.name = object.name + } + if (object.package !== undefined && object.package !== null) { + message.package = object.package + } + message.dependency = object.dependency?.map((e) => e) || [] + message.publicDependency = object.public_dependency?.map((e) => e) || [] + message.weakDependency = object.weak_dependency?.map((e) => e) || [] + message.messageType = + object.message_type?.map((e) => DescriptorProto.fromAmino(e)) || [] + message.enumType = + object.enum_type?.map((e) => EnumDescriptorProto.fromAmino(e)) || [] + message.service = + object.service?.map((e) => ServiceDescriptorProto.fromAmino(e)) || [] + message.extension = + object.extension?.map((e) => FieldDescriptorProto.fromAmino(e)) || [] + if (object.options !== undefined && object.options !== null) { + message.options = FileOptions.fromAmino(object.options) + } + if ( + object.source_code_info !== undefined && + object.source_code_info !== null + ) { + message.sourceCodeInfo = SourceCodeInfo.fromAmino(object.source_code_info) + } + if (object.syntax !== undefined && object.syntax !== null) { + message.syntax = object.syntax + } + return message + }, + toAmino(message: FileDescriptorProto): FileDescriptorProtoAmino { + const obj: any = {} + obj.name = message.name === '' ? undefined : message.name + obj.package = message.package === '' ? undefined : message.package + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e) + } else { + obj.dependency = message.dependency + } + if (message.publicDependency) { + obj.public_dependency = message.publicDependency.map((e) => e) + } else { + obj.public_dependency = message.publicDependency + } + if (message.weakDependency) { + obj.weak_dependency = message.weakDependency.map((e) => e) + } else { + obj.weak_dependency = message.weakDependency + } + if (message.messageType) { + obj.message_type = message.messageType.map((e) => + e ? DescriptorProto.toAmino(e) : undefined + ) + } else { + obj.message_type = message.messageType + } + if (message.enumType) { + obj.enum_type = message.enumType.map((e) => + e ? EnumDescriptorProto.toAmino(e) : undefined + ) + } else { + obj.enum_type = message.enumType + } + if (message.service) { + obj.service = message.service.map((e) => + e ? ServiceDescriptorProto.toAmino(e) : undefined + ) + } else { + obj.service = message.service + } + if (message.extension) { + obj.extension = message.extension.map((e) => + e ? FieldDescriptorProto.toAmino(e) : undefined + ) + } else { + obj.extension = message.extension + } + obj.options = message.options + ? FileOptions.toAmino(message.options) + : undefined + obj.source_code_info = message.sourceCodeInfo + ? SourceCodeInfo.toAmino(message.sourceCodeInfo) + : undefined + obj.syntax = message.syntax === '' ? undefined : message.syntax + return obj + }, + fromAminoMsg(object: FileDescriptorProtoAminoMsg): FileDescriptorProto { + return FileDescriptorProto.fromAmino(object.value) + }, + fromProtoMsg(message: FileDescriptorProtoProtoMsg): FileDescriptorProto { + return FileDescriptorProto.decode(message.value) + }, + toProto(message: FileDescriptorProto): Uint8Array { + return FileDescriptorProto.encode(message).finish() + }, + toProtoMsg(message: FileDescriptorProto): FileDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.FileDescriptorProto', + value: FileDescriptorProto.encode(message).finish() + } + } +} +function createBaseDescriptorProto(): DescriptorProto { + return { + name: '', + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [] + } +} +export const DescriptorProto = { + typeUrl: '/google.protobuf.DescriptorProto', + encode( + message: DescriptorProto, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name) + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim() + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim() + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim() + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim() + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode( + v!, + writer.uint32(42).fork() + ).ldelim() + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim() + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim() + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode( + v!, + writer.uint32(74).fork() + ).ldelim() + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!) + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): DescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseDescriptorProto() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.name = reader.string() + break + case 2: + message.field.push( + FieldDescriptorProto.decode(reader, reader.uint32()) + ) + break + case 6: + message.extension.push( + FieldDescriptorProto.decode(reader, reader.uint32()) + ) + break + case 3: + message.nestedType.push( + DescriptorProto.decode(reader, reader.uint32()) + ) + break + case 4: + message.enumType.push( + EnumDescriptorProto.decode(reader, reader.uint32()) + ) + break + case 5: + message.extensionRange.push( + DescriptorProto_ExtensionRange.decode(reader, reader.uint32()) + ) + break + case 8: + message.oneofDecl.push( + OneofDescriptorProto.decode(reader, reader.uint32()) + ) + break + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()) + break + case 9: + message.reservedRange.push( + DescriptorProto_ReservedRange.decode(reader, reader.uint32()) + ) + break + case 10: + message.reservedName.push(reader.string()) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): DescriptorProto { + const message = createBaseDescriptorProto() + message.name = object.name ?? '' + message.field = + object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || [] + message.extension = + object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || [] + message.nestedType = + object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || [] + message.enumType = + object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || [] + message.extensionRange = + object.extensionRange?.map((e) => + DescriptorProto_ExtensionRange.fromPartial(e) + ) || [] + message.oneofDecl = + object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || [] + message.options = + object.options !== undefined && object.options !== null + ? MessageOptions.fromPartial(object.options) + : undefined + message.reservedRange = + object.reservedRange?.map((e) => + DescriptorProto_ReservedRange.fromPartial(e) + ) || [] + message.reservedName = object.reservedName?.map((e) => e) || [] + return message + }, + fromAmino(object: DescriptorProtoAmino): DescriptorProto { + const message = createBaseDescriptorProto() + if (object.name !== undefined && object.name !== null) { + message.name = object.name + } + message.field = + object.field?.map((e) => FieldDescriptorProto.fromAmino(e)) || [] + message.extension = + object.extension?.map((e) => FieldDescriptorProto.fromAmino(e)) || [] + message.nestedType = + object.nested_type?.map((e) => DescriptorProto.fromAmino(e)) || [] + message.enumType = + object.enum_type?.map((e) => EnumDescriptorProto.fromAmino(e)) || [] + message.extensionRange = + object.extension_range?.map((e) => + DescriptorProto_ExtensionRange.fromAmino(e) + ) || [] + message.oneofDecl = + object.oneof_decl?.map((e) => OneofDescriptorProto.fromAmino(e)) || [] + if (object.options !== undefined && object.options !== null) { + message.options = MessageOptions.fromAmino(object.options) + } + message.reservedRange = + object.reserved_range?.map((e) => + DescriptorProto_ReservedRange.fromAmino(e) + ) || [] + message.reservedName = object.reserved_name?.map((e) => e) || [] + return message + }, + toAmino(message: DescriptorProto): DescriptorProtoAmino { + const obj: any = {} + obj.name = message.name === '' ? undefined : message.name + if (message.field) { + obj.field = message.field.map((e) => + e ? FieldDescriptorProto.toAmino(e) : undefined + ) + } else { + obj.field = message.field + } + if (message.extension) { + obj.extension = message.extension.map((e) => + e ? FieldDescriptorProto.toAmino(e) : undefined + ) + } else { + obj.extension = message.extension + } + if (message.nestedType) { + obj.nested_type = message.nestedType.map((e) => + e ? DescriptorProto.toAmino(e) : undefined + ) + } else { + obj.nested_type = message.nestedType + } + if (message.enumType) { + obj.enum_type = message.enumType.map((e) => + e ? EnumDescriptorProto.toAmino(e) : undefined + ) + } else { + obj.enum_type = message.enumType + } + if (message.extensionRange) { + obj.extension_range = message.extensionRange.map((e) => + e ? DescriptorProto_ExtensionRange.toAmino(e) : undefined + ) + } else { + obj.extension_range = message.extensionRange + } + if (message.oneofDecl) { + obj.oneof_decl = message.oneofDecl.map((e) => + e ? OneofDescriptorProto.toAmino(e) : undefined + ) + } else { + obj.oneof_decl = message.oneofDecl + } + obj.options = message.options + ? MessageOptions.toAmino(message.options) + : undefined + if (message.reservedRange) { + obj.reserved_range = message.reservedRange.map((e) => + e ? DescriptorProto_ReservedRange.toAmino(e) : undefined + ) + } else { + obj.reserved_range = message.reservedRange + } + if (message.reservedName) { + obj.reserved_name = message.reservedName.map((e) => e) + } else { + obj.reserved_name = message.reservedName + } + return obj + }, + fromAminoMsg(object: DescriptorProtoAminoMsg): DescriptorProto { + return DescriptorProto.fromAmino(object.value) + }, + fromProtoMsg(message: DescriptorProtoProtoMsg): DescriptorProto { + return DescriptorProto.decode(message.value) + }, + toProto(message: DescriptorProto): Uint8Array { + return DescriptorProto.encode(message).finish() + }, + toProtoMsg(message: DescriptorProto): DescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.DescriptorProto', + value: DescriptorProto.encode(message).finish() + } + } +} +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { + start: 0, + end: 0, + options: undefined + } +} +export const DescriptorProto_ExtensionRange = { + typeUrl: '/google.protobuf.ExtensionRange', + encode( + message: DescriptorProto_ExtensionRange, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.start !== 0) { + writer.uint32(8).int32(message.start) + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end) + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode( + message.options, + writer.uint32(26).fork() + ).ldelim() + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): DescriptorProto_ExtensionRange { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseDescriptorProto_ExtensionRange() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.start = reader.int32() + break + case 2: + message.end = reader.int32() + break + case 3: + message.options = ExtensionRangeOptions.decode( + reader, + reader.uint32() + ) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial( + object: Partial + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange() + message.start = object.start ?? 0 + message.end = object.end ?? 0 + message.options = + object.options !== undefined && object.options !== null + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined + return message + }, + fromAmino( + object: DescriptorProto_ExtensionRangeAmino + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange() + if (object.start !== undefined && object.start !== null) { + message.start = object.start + } + if (object.end !== undefined && object.end !== null) { + message.end = object.end + } + if (object.options !== undefined && object.options !== null) { + message.options = ExtensionRangeOptions.fromAmino(object.options) + } + return message + }, + toAmino( + message: DescriptorProto_ExtensionRange + ): DescriptorProto_ExtensionRangeAmino { + const obj: any = {} + obj.start = message.start === 0 ? undefined : message.start + obj.end = message.end === 0 ? undefined : message.end + obj.options = message.options + ? ExtensionRangeOptions.toAmino(message.options) + : undefined + return obj + }, + fromAminoMsg( + object: DescriptorProto_ExtensionRangeAminoMsg + ): DescriptorProto_ExtensionRange { + return DescriptorProto_ExtensionRange.fromAmino(object.value) + }, + fromProtoMsg( + message: DescriptorProto_ExtensionRangeProtoMsg + ): DescriptorProto_ExtensionRange { + return DescriptorProto_ExtensionRange.decode(message.value) + }, + toProto(message: DescriptorProto_ExtensionRange): Uint8Array { + return DescriptorProto_ExtensionRange.encode(message).finish() + }, + toProtoMsg( + message: DescriptorProto_ExtensionRange + ): DescriptorProto_ExtensionRangeProtoMsg { + return { + typeUrl: '/google.protobuf.ExtensionRange', + value: DescriptorProto_ExtensionRange.encode(message).finish() + } + } +} +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { + start: 0, + end: 0 + } +} +export const DescriptorProto_ReservedRange = { + typeUrl: '/google.protobuf.ReservedRange', + encode( + message: DescriptorProto_ReservedRange, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.start !== 0) { + writer.uint32(8).int32(message.start) + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end) + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): DescriptorProto_ReservedRange { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseDescriptorProto_ReservedRange() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.start = reader.int32() + break + case 2: + message.end = reader.int32() + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial( + object: Partial + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange() + message.start = object.start ?? 0 + message.end = object.end ?? 0 + return message + }, + fromAmino( + object: DescriptorProto_ReservedRangeAmino + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange() + if (object.start !== undefined && object.start !== null) { + message.start = object.start + } + if (object.end !== undefined && object.end !== null) { + message.end = object.end + } + return message + }, + toAmino( + message: DescriptorProto_ReservedRange + ): DescriptorProto_ReservedRangeAmino { + const obj: any = {} + obj.start = message.start === 0 ? undefined : message.start + obj.end = message.end === 0 ? undefined : message.end + return obj + }, + fromAminoMsg( + object: DescriptorProto_ReservedRangeAminoMsg + ): DescriptorProto_ReservedRange { + return DescriptorProto_ReservedRange.fromAmino(object.value) + }, + fromProtoMsg( + message: DescriptorProto_ReservedRangeProtoMsg + ): DescriptorProto_ReservedRange { + return DescriptorProto_ReservedRange.decode(message.value) + }, + toProto(message: DescriptorProto_ReservedRange): Uint8Array { + return DescriptorProto_ReservedRange.encode(message).finish() + }, + toProtoMsg( + message: DescriptorProto_ReservedRange + ): DescriptorProto_ReservedRangeProtoMsg { + return { + typeUrl: '/google.protobuf.ReservedRange', + value: DescriptorProto_ReservedRange.encode(message).finish() + } + } +} +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { + uninterpretedOption: [] + } +} +export const ExtensionRangeOptions = { + typeUrl: '/google.protobuf.ExtensionRangeOptions', + encode( + message: ExtensionRangeOptions, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim() + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): ExtensionRangeOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseExtensionRangeOptions() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()) + ) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions() + message.uninterpretedOption = + object.uninterpretedOption?.map((e) => + UninterpretedOption.fromPartial(e) + ) || [] + return message + }, + fromAmino(object: ExtensionRangeOptionsAmino): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions() + message.uninterpretedOption = + object.uninterpreted_option?.map((e) => + UninterpretedOption.fromAmino(e) + ) || [] + return message + }, + toAmino(message: ExtensionRangeOptions): ExtensionRangeOptionsAmino { + const obj: any = {} + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map((e) => + e ? UninterpretedOption.toAmino(e) : undefined + ) + } else { + obj.uninterpreted_option = message.uninterpretedOption + } + return obj + }, + fromAminoMsg(object: ExtensionRangeOptionsAminoMsg): ExtensionRangeOptions { + return ExtensionRangeOptions.fromAmino(object.value) + }, + fromProtoMsg(message: ExtensionRangeOptionsProtoMsg): ExtensionRangeOptions { + return ExtensionRangeOptions.decode(message.value) + }, + toProto(message: ExtensionRangeOptions): Uint8Array { + return ExtensionRangeOptions.encode(message).finish() + }, + toProtoMsg(message: ExtensionRangeOptions): ExtensionRangeOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.ExtensionRangeOptions', + value: ExtensionRangeOptions.encode(message).finish() + } + } +} +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: '', + number: 0, + label: 1, + type: 1, + typeName: '', + extendee: '', + defaultValue: '', + oneofIndex: 0, + jsonName: '', + options: undefined + } +} +export const FieldDescriptorProto = { + typeUrl: '/google.protobuf.FieldDescriptorProto', + encode( + message: FieldDescriptorProto, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name) + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number) + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label) + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type) + } + if (message.typeName !== '') { + writer.uint32(50).string(message.typeName) + } + if (message.extendee !== '') { + writer.uint32(18).string(message.extendee) + } + if (message.defaultValue !== '') { + writer.uint32(58).string(message.defaultValue) + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex) + } + if (message.jsonName !== '') { + writer.uint32(82).string(message.jsonName) + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim() + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): FieldDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseFieldDescriptorProto() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.name = reader.string() + break + case 3: + message.number = reader.int32() + break + case 4: + message.label = reader.int32() as any + break + case 5: + message.type = reader.int32() as any + break + case 6: + message.typeName = reader.string() + break + case 2: + message.extendee = reader.string() + break + case 7: + message.defaultValue = reader.string() + break + case 9: + message.oneofIndex = reader.int32() + break + case 10: + message.jsonName = reader.string() + break + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto() + message.name = object.name ?? '' + message.number = object.number ?? 0 + message.label = object.label ?? 1 + message.type = object.type ?? 1 + message.typeName = object.typeName ?? '' + message.extendee = object.extendee ?? '' + message.defaultValue = object.defaultValue ?? '' + message.oneofIndex = object.oneofIndex ?? 0 + message.jsonName = object.jsonName ?? '' + message.options = + object.options !== undefined && object.options !== null + ? FieldOptions.fromPartial(object.options) + : undefined + return message + }, + fromAmino(object: FieldDescriptorProtoAmino): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto() + if (object.name !== undefined && object.name !== null) { + message.name = object.name + } + if (object.number !== undefined && object.number !== null) { + message.number = object.number + } + if (object.label !== undefined && object.label !== null) { + message.label = object.label + } + if (object.type !== undefined && object.type !== null) { + message.type = object.type + } + if (object.type_name !== undefined && object.type_name !== null) { + message.typeName = object.type_name + } + if (object.extendee !== undefined && object.extendee !== null) { + message.extendee = object.extendee + } + if (object.default_value !== undefined && object.default_value !== null) { + message.defaultValue = object.default_value + } + if (object.oneof_index !== undefined && object.oneof_index !== null) { + message.oneofIndex = object.oneof_index + } + if (object.json_name !== undefined && object.json_name !== null) { + message.jsonName = object.json_name + } + if (object.options !== undefined && object.options !== null) { + message.options = FieldOptions.fromAmino(object.options) + } + return message + }, + toAmino(message: FieldDescriptorProto): FieldDescriptorProtoAmino { + const obj: any = {} + obj.name = message.name === '' ? undefined : message.name + obj.number = message.number === 0 ? undefined : message.number + obj.label = message.label === 1 ? undefined : message.label + obj.type = message.type === 1 ? undefined : message.type + obj.type_name = message.typeName === '' ? undefined : message.typeName + obj.extendee = message.extendee === '' ? undefined : message.extendee + obj.default_value = + message.defaultValue === '' ? undefined : message.defaultValue + obj.oneof_index = message.oneofIndex === 0 ? undefined : message.oneofIndex + obj.json_name = message.jsonName === '' ? undefined : message.jsonName + obj.options = message.options + ? FieldOptions.toAmino(message.options) + : undefined + return obj + }, + fromAminoMsg(object: FieldDescriptorProtoAminoMsg): FieldDescriptorProto { + return FieldDescriptorProto.fromAmino(object.value) + }, + fromProtoMsg(message: FieldDescriptorProtoProtoMsg): FieldDescriptorProto { + return FieldDescriptorProto.decode(message.value) + }, + toProto(message: FieldDescriptorProto): Uint8Array { + return FieldDescriptorProto.encode(message).finish() + }, + toProtoMsg(message: FieldDescriptorProto): FieldDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.FieldDescriptorProto', + value: FieldDescriptorProto.encode(message).finish() + } + } +} +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { + name: '', + options: undefined + } +} +export const OneofDescriptorProto = { + typeUrl: '/google.protobuf.OneofDescriptorProto', + encode( + message: OneofDescriptorProto, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name) + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim() + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): OneofDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseOneofDescriptorProto() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.name = reader.string() + break + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto() + message.name = object.name ?? '' + message.options = + object.options !== undefined && object.options !== null + ? OneofOptions.fromPartial(object.options) + : undefined + return message + }, + fromAmino(object: OneofDescriptorProtoAmino): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto() + if (object.name !== undefined && object.name !== null) { + message.name = object.name + } + if (object.options !== undefined && object.options !== null) { + message.options = OneofOptions.fromAmino(object.options) + } + return message + }, + toAmino(message: OneofDescriptorProto): OneofDescriptorProtoAmino { + const obj: any = {} + obj.name = message.name === '' ? undefined : message.name + obj.options = message.options + ? OneofOptions.toAmino(message.options) + : undefined + return obj + }, + fromAminoMsg(object: OneofDescriptorProtoAminoMsg): OneofDescriptorProto { + return OneofDescriptorProto.fromAmino(object.value) + }, + fromProtoMsg(message: OneofDescriptorProtoProtoMsg): OneofDescriptorProto { + return OneofDescriptorProto.decode(message.value) + }, + toProto(message: OneofDescriptorProto): Uint8Array { + return OneofDescriptorProto.encode(message).finish() + }, + toProtoMsg(message: OneofDescriptorProto): OneofDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.OneofDescriptorProto', + value: OneofDescriptorProto.encode(message).finish() + } + } +} +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { + name: '', + value: [], + options: undefined, + reservedRange: [], + reservedName: [] + } +} +export const EnumDescriptorProto = { + typeUrl: '/google.protobuf.EnumDescriptorProto', + encode( + message: EnumDescriptorProto, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name) + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim() + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim() + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode( + v!, + writer.uint32(34).fork() + ).ldelim() + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!) + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): EnumDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseEnumDescriptorProto() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.name = reader.string() + break + case 2: + message.value.push( + EnumValueDescriptorProto.decode(reader, reader.uint32()) + ) + break + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()) + break + case 4: + message.reservedRange.push( + EnumDescriptorProto_EnumReservedRange.decode( + reader, + reader.uint32() + ) + ) + break + case 5: + message.reservedName.push(reader.string()) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto() + message.name = object.name ?? '' + message.value = + object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || [] + message.options = + object.options !== undefined && object.options !== null + ? EnumOptions.fromPartial(object.options) + : undefined + message.reservedRange = + object.reservedRange?.map((e) => + EnumDescriptorProto_EnumReservedRange.fromPartial(e) + ) || [] + message.reservedName = object.reservedName?.map((e) => e) || [] + return message + }, + fromAmino(object: EnumDescriptorProtoAmino): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto() + if (object.name !== undefined && object.name !== null) { + message.name = object.name + } + message.value = + object.value?.map((e) => EnumValueDescriptorProto.fromAmino(e)) || [] + if (object.options !== undefined && object.options !== null) { + message.options = EnumOptions.fromAmino(object.options) + } + message.reservedRange = + object.reserved_range?.map((e) => + EnumDescriptorProto_EnumReservedRange.fromAmino(e) + ) || [] + message.reservedName = object.reserved_name?.map((e) => e) || [] + return message + }, + toAmino(message: EnumDescriptorProto): EnumDescriptorProtoAmino { + const obj: any = {} + obj.name = message.name === '' ? undefined : message.name + if (message.value) { + obj.value = message.value.map((e) => + e ? EnumValueDescriptorProto.toAmino(e) : undefined + ) + } else { + obj.value = message.value + } + obj.options = message.options + ? EnumOptions.toAmino(message.options) + : undefined + if (message.reservedRange) { + obj.reserved_range = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toAmino(e) : undefined + ) + } else { + obj.reserved_range = message.reservedRange + } + if (message.reservedName) { + obj.reserved_name = message.reservedName.map((e) => e) + } else { + obj.reserved_name = message.reservedName + } + return obj + }, + fromAminoMsg(object: EnumDescriptorProtoAminoMsg): EnumDescriptorProto { + return EnumDescriptorProto.fromAmino(object.value) + }, + fromProtoMsg(message: EnumDescriptorProtoProtoMsg): EnumDescriptorProto { + return EnumDescriptorProto.decode(message.value) + }, + toProto(message: EnumDescriptorProto): Uint8Array { + return EnumDescriptorProto.encode(message).finish() + }, + toProtoMsg(message: EnumDescriptorProto): EnumDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.EnumDescriptorProto', + value: EnumDescriptorProto.encode(message).finish() + } + } +} +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { + start: 0, + end: 0 + } +} +export const EnumDescriptorProto_EnumReservedRange = { + typeUrl: '/google.protobuf.EnumReservedRange', + encode( + message: EnumDescriptorProto_EnumReservedRange, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.start !== 0) { + writer.uint32(8).int32(message.start) + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end) + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): EnumDescriptorProto_EnumReservedRange { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseEnumDescriptorProto_EnumReservedRange() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.start = reader.int32() + break + case 2: + message.end = reader.int32() + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial( + object: Partial + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange() + message.start = object.start ?? 0 + message.end = object.end ?? 0 + return message + }, + fromAmino( + object: EnumDescriptorProto_EnumReservedRangeAmino + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange() + if (object.start !== undefined && object.start !== null) { + message.start = object.start + } + if (object.end !== undefined && object.end !== null) { + message.end = object.end + } + return message + }, + toAmino( + message: EnumDescriptorProto_EnumReservedRange + ): EnumDescriptorProto_EnumReservedRangeAmino { + const obj: any = {} + obj.start = message.start === 0 ? undefined : message.start + obj.end = message.end === 0 ? undefined : message.end + return obj + }, + fromAminoMsg( + object: EnumDescriptorProto_EnumReservedRangeAminoMsg + ): EnumDescriptorProto_EnumReservedRange { + return EnumDescriptorProto_EnumReservedRange.fromAmino(object.value) + }, + fromProtoMsg( + message: EnumDescriptorProto_EnumReservedRangeProtoMsg + ): EnumDescriptorProto_EnumReservedRange { + return EnumDescriptorProto_EnumReservedRange.decode(message.value) + }, + toProto(message: EnumDescriptorProto_EnumReservedRange): Uint8Array { + return EnumDescriptorProto_EnumReservedRange.encode(message).finish() + }, + toProtoMsg( + message: EnumDescriptorProto_EnumReservedRange + ): EnumDescriptorProto_EnumReservedRangeProtoMsg { + return { + typeUrl: '/google.protobuf.EnumReservedRange', + value: EnumDescriptorProto_EnumReservedRange.encode(message).finish() + } + } +} +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { + name: '', + number: 0, + options: undefined + } +} +export const EnumValueDescriptorProto = { + typeUrl: '/google.protobuf.EnumValueDescriptorProto', + encode( + message: EnumValueDescriptorProto, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name) + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number) + } + if (message.options !== undefined) { + EnumValueOptions.encode( + message.options, + writer.uint32(26).fork() + ).ldelim() + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): EnumValueDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseEnumValueDescriptorProto() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.name = reader.string() + break + case 2: + message.number = reader.int32() + break + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial( + object: Partial + ): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto() + message.name = object.name ?? '' + message.number = object.number ?? 0 + message.options = + object.options !== undefined && object.options !== null + ? EnumValueOptions.fromPartial(object.options) + : undefined + return message + }, + fromAmino(object: EnumValueDescriptorProtoAmino): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto() + if (object.name !== undefined && object.name !== null) { + message.name = object.name + } + if (object.number !== undefined && object.number !== null) { + message.number = object.number + } + if (object.options !== undefined && object.options !== null) { + message.options = EnumValueOptions.fromAmino(object.options) + } + return message + }, + toAmino(message: EnumValueDescriptorProto): EnumValueDescriptorProtoAmino { + const obj: any = {} + obj.name = message.name === '' ? undefined : message.name + obj.number = message.number === 0 ? undefined : message.number + obj.options = message.options + ? EnumValueOptions.toAmino(message.options) + : undefined + return obj + }, + fromAminoMsg( + object: EnumValueDescriptorProtoAminoMsg + ): EnumValueDescriptorProto { + return EnumValueDescriptorProto.fromAmino(object.value) + }, + fromProtoMsg( + message: EnumValueDescriptorProtoProtoMsg + ): EnumValueDescriptorProto { + return EnumValueDescriptorProto.decode(message.value) + }, + toProto(message: EnumValueDescriptorProto): Uint8Array { + return EnumValueDescriptorProto.encode(message).finish() + }, + toProtoMsg( + message: EnumValueDescriptorProto + ): EnumValueDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.EnumValueDescriptorProto', + value: EnumValueDescriptorProto.encode(message).finish() + } + } +} +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { + name: '', + method: [], + options: undefined + } +} +export const ServiceDescriptorProto = { + typeUrl: '/google.protobuf.ServiceDescriptorProto', + encode( + message: ServiceDescriptorProto, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name) + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim() + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim() + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): ServiceDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseServiceDescriptorProto() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.name = reader.string() + break + case 2: + message.method.push( + MethodDescriptorProto.decode(reader, reader.uint32()) + ) + break + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto() + message.name = object.name ?? '' + message.method = + object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || [] + message.options = + object.options !== undefined && object.options !== null + ? ServiceOptions.fromPartial(object.options) + : undefined + return message + }, + fromAmino(object: ServiceDescriptorProtoAmino): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto() + if (object.name !== undefined && object.name !== null) { + message.name = object.name + } + message.method = + object.method?.map((e) => MethodDescriptorProto.fromAmino(e)) || [] + if (object.options !== undefined && object.options !== null) { + message.options = ServiceOptions.fromAmino(object.options) + } + return message + }, + toAmino(message: ServiceDescriptorProto): ServiceDescriptorProtoAmino { + const obj: any = {} + obj.name = message.name === '' ? undefined : message.name + if (message.method) { + obj.method = message.method.map((e) => + e ? MethodDescriptorProto.toAmino(e) : undefined + ) + } else { + obj.method = message.method + } + obj.options = message.options + ? ServiceOptions.toAmino(message.options) + : undefined + return obj + }, + fromAminoMsg(object: ServiceDescriptorProtoAminoMsg): ServiceDescriptorProto { + return ServiceDescriptorProto.fromAmino(object.value) + }, + fromProtoMsg( + message: ServiceDescriptorProtoProtoMsg + ): ServiceDescriptorProto { + return ServiceDescriptorProto.decode(message.value) + }, + toProto(message: ServiceDescriptorProto): Uint8Array { + return ServiceDescriptorProto.encode(message).finish() + }, + toProtoMsg(message: ServiceDescriptorProto): ServiceDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.ServiceDescriptorProto', + value: ServiceDescriptorProto.encode(message).finish() + } + } +} +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: '', + inputType: '', + outputType: '', + options: undefined, + clientStreaming: false, + serverStreaming: false + } +} +export const MethodDescriptorProto = { + typeUrl: '/google.protobuf.MethodDescriptorProto', + encode( + message: MethodDescriptorProto, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name) + } + if (message.inputType !== '') { + writer.uint32(18).string(message.inputType) + } + if (message.outputType !== '') { + writer.uint32(26).string(message.outputType) + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim() + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming) + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming) + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): MethodDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseMethodDescriptorProto() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.name = reader.string() + break + case 2: + message.inputType = reader.string() + break + case 3: + message.outputType = reader.string() + break + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()) + break + case 5: + message.clientStreaming = reader.bool() + break + case 6: + message.serverStreaming = reader.bool() + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto() + message.name = object.name ?? '' + message.inputType = object.inputType ?? '' + message.outputType = object.outputType ?? '' + message.options = + object.options !== undefined && object.options !== null + ? MethodOptions.fromPartial(object.options) + : undefined + message.clientStreaming = object.clientStreaming ?? false + message.serverStreaming = object.serverStreaming ?? false + return message + }, + fromAmino(object: MethodDescriptorProtoAmino): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto() + if (object.name !== undefined && object.name !== null) { + message.name = object.name + } + if (object.input_type !== undefined && object.input_type !== null) { + message.inputType = object.input_type + } + if (object.output_type !== undefined && object.output_type !== null) { + message.outputType = object.output_type + } + if (object.options !== undefined && object.options !== null) { + message.options = MethodOptions.fromAmino(object.options) + } + if ( + object.client_streaming !== undefined && + object.client_streaming !== null + ) { + message.clientStreaming = object.client_streaming + } + if ( + object.server_streaming !== undefined && + object.server_streaming !== null + ) { + message.serverStreaming = object.server_streaming + } + return message + }, + toAmino(message: MethodDescriptorProto): MethodDescriptorProtoAmino { + const obj: any = {} + obj.name = message.name === '' ? undefined : message.name + obj.input_type = message.inputType === '' ? undefined : message.inputType + obj.output_type = message.outputType === '' ? undefined : message.outputType + obj.options = message.options + ? MethodOptions.toAmino(message.options) + : undefined + obj.client_streaming = + message.clientStreaming === false ? undefined : message.clientStreaming + obj.server_streaming = + message.serverStreaming === false ? undefined : message.serverStreaming + return obj + }, + fromAminoMsg(object: MethodDescriptorProtoAminoMsg): MethodDescriptorProto { + return MethodDescriptorProto.fromAmino(object.value) + }, + fromProtoMsg(message: MethodDescriptorProtoProtoMsg): MethodDescriptorProto { + return MethodDescriptorProto.decode(message.value) + }, + toProto(message: MethodDescriptorProto): Uint8Array { + return MethodDescriptorProto.encode(message).finish() + }, + toProtoMsg(message: MethodDescriptorProto): MethodDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.MethodDescriptorProto', + value: MethodDescriptorProto.encode(message).finish() + } + } +} +function createBaseFileOptions(): FileOptions { + return { + javaPackage: '', + javaOuterClassname: '', + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: '', + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: '', + csharpNamespace: '', + swiftPrefix: '', + phpClassPrefix: '', + phpNamespace: '', + phpMetadataNamespace: '', + rubyPackage: '', + uninterpretedOption: [] + } +} +export const FileOptions = { + typeUrl: '/google.protobuf.FileOptions', + encode( + message: FileOptions, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.javaPackage !== '') { + writer.uint32(10).string(message.javaPackage) + } + if (message.javaOuterClassname !== '') { + writer.uint32(66).string(message.javaOuterClassname) + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles) + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash) + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8) + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor) + } + if (message.goPackage !== '') { + writer.uint32(90).string(message.goPackage) + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices) + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices) + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices) + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices) + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated) + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas) + } + if (message.objcClassPrefix !== '') { + writer.uint32(290).string(message.objcClassPrefix) + } + if (message.csharpNamespace !== '') { + writer.uint32(298).string(message.csharpNamespace) + } + if (message.swiftPrefix !== '') { + writer.uint32(314).string(message.swiftPrefix) + } + if (message.phpClassPrefix !== '') { + writer.uint32(322).string(message.phpClassPrefix) + } + if (message.phpNamespace !== '') { + writer.uint32(330).string(message.phpNamespace) + } + if (message.phpMetadataNamespace !== '') { + writer.uint32(354).string(message.phpMetadataNamespace) + } + if (message.rubyPackage !== '') { + writer.uint32(362).string(message.rubyPackage) + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim() + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): FileOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseFileOptions() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string() + break + case 8: + message.javaOuterClassname = reader.string() + break + case 10: + message.javaMultipleFiles = reader.bool() + break + case 20: + message.javaGenerateEqualsAndHash = reader.bool() + break + case 27: + message.javaStringCheckUtf8 = reader.bool() + break + case 9: + message.optimizeFor = reader.int32() as any + break + case 11: + message.goPackage = reader.string() + break + case 16: + message.ccGenericServices = reader.bool() + break + case 17: + message.javaGenericServices = reader.bool() + break + case 18: + message.pyGenericServices = reader.bool() + break + case 42: + message.phpGenericServices = reader.bool() + break + case 23: + message.deprecated = reader.bool() + break + case 31: + message.ccEnableArenas = reader.bool() + break + case 36: + message.objcClassPrefix = reader.string() + break + case 37: + message.csharpNamespace = reader.string() + break + case 39: + message.swiftPrefix = reader.string() + break + case 40: + message.phpClassPrefix = reader.string() + break + case 41: + message.phpNamespace = reader.string() + break + case 44: + message.phpMetadataNamespace = reader.string() + break + case 45: + message.rubyPackage = reader.string() + break + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()) + ) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): FileOptions { + const message = createBaseFileOptions() + message.javaPackage = object.javaPackage ?? '' + message.javaOuterClassname = object.javaOuterClassname ?? '' + message.javaMultipleFiles = object.javaMultipleFiles ?? false + message.javaGenerateEqualsAndHash = + object.javaGenerateEqualsAndHash ?? false + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false + message.optimizeFor = object.optimizeFor ?? 1 + message.goPackage = object.goPackage ?? '' + message.ccGenericServices = object.ccGenericServices ?? false + message.javaGenericServices = object.javaGenericServices ?? false + message.pyGenericServices = object.pyGenericServices ?? false + message.phpGenericServices = object.phpGenericServices ?? false + message.deprecated = object.deprecated ?? false + message.ccEnableArenas = object.ccEnableArenas ?? false + message.objcClassPrefix = object.objcClassPrefix ?? '' + message.csharpNamespace = object.csharpNamespace ?? '' + message.swiftPrefix = object.swiftPrefix ?? '' + message.phpClassPrefix = object.phpClassPrefix ?? '' + message.phpNamespace = object.phpNamespace ?? '' + message.phpMetadataNamespace = object.phpMetadataNamespace ?? '' + message.rubyPackage = object.rubyPackage ?? '' + message.uninterpretedOption = + object.uninterpretedOption?.map((e) => + UninterpretedOption.fromPartial(e) + ) || [] + return message + }, + fromAmino(object: FileOptionsAmino): FileOptions { + const message = createBaseFileOptions() + if (object.java_package !== undefined && object.java_package !== null) { + message.javaPackage = object.java_package + } + if ( + object.java_outer_classname !== undefined && + object.java_outer_classname !== null + ) { + message.javaOuterClassname = object.java_outer_classname + } + if ( + object.java_multiple_files !== undefined && + object.java_multiple_files !== null + ) { + message.javaMultipleFiles = object.java_multiple_files + } + if ( + object.java_generate_equals_and_hash !== undefined && + object.java_generate_equals_and_hash !== null + ) { + message.javaGenerateEqualsAndHash = object.java_generate_equals_and_hash + } + if ( + object.java_string_check_utf8 !== undefined && + object.java_string_check_utf8 !== null + ) { + message.javaStringCheckUtf8 = object.java_string_check_utf8 + } + if (object.optimize_for !== undefined && object.optimize_for !== null) { + message.optimizeFor = object.optimize_for + } + if (object.go_package !== undefined && object.go_package !== null) { + message.goPackage = object.go_package + } + if ( + object.cc_generic_services !== undefined && + object.cc_generic_services !== null + ) { + message.ccGenericServices = object.cc_generic_services + } + if ( + object.java_generic_services !== undefined && + object.java_generic_services !== null + ) { + message.javaGenericServices = object.java_generic_services + } + if ( + object.py_generic_services !== undefined && + object.py_generic_services !== null + ) { + message.pyGenericServices = object.py_generic_services + } + if ( + object.php_generic_services !== undefined && + object.php_generic_services !== null + ) { + message.phpGenericServices = object.php_generic_services + } + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated + } + if ( + object.cc_enable_arenas !== undefined && + object.cc_enable_arenas !== null + ) { + message.ccEnableArenas = object.cc_enable_arenas + } + if ( + object.objc_class_prefix !== undefined && + object.objc_class_prefix !== null + ) { + message.objcClassPrefix = object.objc_class_prefix + } + if ( + object.csharp_namespace !== undefined && + object.csharp_namespace !== null + ) { + message.csharpNamespace = object.csharp_namespace + } + if (object.swift_prefix !== undefined && object.swift_prefix !== null) { + message.swiftPrefix = object.swift_prefix + } + if ( + object.php_class_prefix !== undefined && + object.php_class_prefix !== null + ) { + message.phpClassPrefix = object.php_class_prefix + } + if (object.php_namespace !== undefined && object.php_namespace !== null) { + message.phpNamespace = object.php_namespace + } + if ( + object.php_metadata_namespace !== undefined && + object.php_metadata_namespace !== null + ) { + message.phpMetadataNamespace = object.php_metadata_namespace + } + if (object.ruby_package !== undefined && object.ruby_package !== null) { + message.rubyPackage = object.ruby_package + } + message.uninterpretedOption = + object.uninterpreted_option?.map((e) => + UninterpretedOption.fromAmino(e) + ) || [] + return message + }, + toAmino(message: FileOptions): FileOptionsAmino { + const obj: any = {} + obj.java_package = + message.javaPackage === '' ? undefined : message.javaPackage + obj.java_outer_classname = + message.javaOuterClassname === '' ? undefined : message.javaOuterClassname + obj.java_multiple_files = + message.javaMultipleFiles === false + ? undefined + : message.javaMultipleFiles + obj.java_generate_equals_and_hash = + message.javaGenerateEqualsAndHash === false + ? undefined + : message.javaGenerateEqualsAndHash + obj.java_string_check_utf8 = + message.javaStringCheckUtf8 === false + ? undefined + : message.javaStringCheckUtf8 + obj.optimize_for = + message.optimizeFor === 1 ? undefined : message.optimizeFor + obj.go_package = message.goPackage === '' ? undefined : message.goPackage + obj.cc_generic_services = + message.ccGenericServices === false + ? undefined + : message.ccGenericServices + obj.java_generic_services = + message.javaGenericServices === false + ? undefined + : message.javaGenericServices + obj.py_generic_services = + message.pyGenericServices === false + ? undefined + : message.pyGenericServices + obj.php_generic_services = + message.phpGenericServices === false + ? undefined + : message.phpGenericServices + obj.deprecated = + message.deprecated === false ? undefined : message.deprecated + obj.cc_enable_arenas = + message.ccEnableArenas === false ? undefined : message.ccEnableArenas + obj.objc_class_prefix = + message.objcClassPrefix === '' ? undefined : message.objcClassPrefix + obj.csharp_namespace = + message.csharpNamespace === '' ? undefined : message.csharpNamespace + obj.swift_prefix = + message.swiftPrefix === '' ? undefined : message.swiftPrefix + obj.php_class_prefix = + message.phpClassPrefix === '' ? undefined : message.phpClassPrefix + obj.php_namespace = + message.phpNamespace === '' ? undefined : message.phpNamespace + obj.php_metadata_namespace = + message.phpMetadataNamespace === '' + ? undefined + : message.phpMetadataNamespace + obj.ruby_package = + message.rubyPackage === '' ? undefined : message.rubyPackage + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map((e) => + e ? UninterpretedOption.toAmino(e) : undefined + ) + } else { + obj.uninterpreted_option = message.uninterpretedOption + } + return obj + }, + fromAminoMsg(object: FileOptionsAminoMsg): FileOptions { + return FileOptions.fromAmino(object.value) + }, + fromProtoMsg(message: FileOptionsProtoMsg): FileOptions { + return FileOptions.decode(message.value) + }, + toProto(message: FileOptions): Uint8Array { + return FileOptions.encode(message).finish() + }, + toProtoMsg(message: FileOptions): FileOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.FileOptions', + value: FileOptions.encode(message).finish() + } + } +} +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [] + } +} +export const MessageOptions = { + typeUrl: '/google.protobuf.MessageOptions', + encode( + message: MessageOptions, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat) + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor) + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated) + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry) + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim() + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): MessageOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseMessageOptions() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool() + break + case 2: + message.noStandardDescriptorAccessor = reader.bool() + break + case 3: + message.deprecated = reader.bool() + break + case 7: + message.mapEntry = reader.bool() + break + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()) + ) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): MessageOptions { + const message = createBaseMessageOptions() + message.messageSetWireFormat = object.messageSetWireFormat ?? false + message.noStandardDescriptorAccessor = + object.noStandardDescriptorAccessor ?? false + message.deprecated = object.deprecated ?? false + message.mapEntry = object.mapEntry ?? false + message.uninterpretedOption = + object.uninterpretedOption?.map((e) => + UninterpretedOption.fromPartial(e) + ) || [] + return message + }, + fromAmino(object: MessageOptionsAmino): MessageOptions { + const message = createBaseMessageOptions() + if ( + object.message_set_wire_format !== undefined && + object.message_set_wire_format !== null + ) { + message.messageSetWireFormat = object.message_set_wire_format + } + if ( + object.no_standard_descriptor_accessor !== undefined && + object.no_standard_descriptor_accessor !== null + ) { + message.noStandardDescriptorAccessor = + object.no_standard_descriptor_accessor + } + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated + } + if (object.map_entry !== undefined && object.map_entry !== null) { + message.mapEntry = object.map_entry + } + message.uninterpretedOption = + object.uninterpreted_option?.map((e) => + UninterpretedOption.fromAmino(e) + ) || [] + return message + }, + toAmino(message: MessageOptions): MessageOptionsAmino { + const obj: any = {} + obj.message_set_wire_format = + message.messageSetWireFormat === false + ? undefined + : message.messageSetWireFormat + obj.no_standard_descriptor_accessor = + message.noStandardDescriptorAccessor === false + ? undefined + : message.noStandardDescriptorAccessor + obj.deprecated = + message.deprecated === false ? undefined : message.deprecated + obj.map_entry = message.mapEntry === false ? undefined : message.mapEntry + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map((e) => + e ? UninterpretedOption.toAmino(e) : undefined + ) + } else { + obj.uninterpreted_option = message.uninterpretedOption + } + return obj + }, + fromAminoMsg(object: MessageOptionsAminoMsg): MessageOptions { + return MessageOptions.fromAmino(object.value) + }, + fromProtoMsg(message: MessageOptionsProtoMsg): MessageOptions { + return MessageOptions.decode(message.value) + }, + toProto(message: MessageOptions): Uint8Array { + return MessageOptions.encode(message).finish() + }, + toProtoMsg(message: MessageOptions): MessageOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.MessageOptions', + value: MessageOptions.encode(message).finish() + } + } +} +function createBaseFieldOptions(): FieldOptions { + return { + ctype: 1, + packed: false, + jstype: 1, + lazy: false, + deprecated: false, + weak: false, + uninterpretedOption: [] + } +} +export const FieldOptions = { + typeUrl: '/google.protobuf.FieldOptions', + encode( + message: FieldOptions, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.ctype !== 1) { + writer.uint32(8).int32(message.ctype) + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed) + } + if (message.jstype !== 1) { + writer.uint32(48).int32(message.jstype) + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy) + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated) + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak) + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim() + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): FieldOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseFieldOptions() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any + break + case 2: + message.packed = reader.bool() + break + case 6: + message.jstype = reader.int32() as any + break + case 5: + message.lazy = reader.bool() + break + case 3: + message.deprecated = reader.bool() + break + case 10: + message.weak = reader.bool() + break + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()) + ) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): FieldOptions { + const message = createBaseFieldOptions() + message.ctype = object.ctype ?? 1 + message.packed = object.packed ?? false + message.jstype = object.jstype ?? 1 + message.lazy = object.lazy ?? false + message.deprecated = object.deprecated ?? false + message.weak = object.weak ?? false + message.uninterpretedOption = + object.uninterpretedOption?.map((e) => + UninterpretedOption.fromPartial(e) + ) || [] + return message + }, + fromAmino(object: FieldOptionsAmino): FieldOptions { + const message = createBaseFieldOptions() + if (object.ctype !== undefined && object.ctype !== null) { + message.ctype = object.ctype + } + if (object.packed !== undefined && object.packed !== null) { + message.packed = object.packed + } + if (object.jstype !== undefined && object.jstype !== null) { + message.jstype = object.jstype + } + if (object.lazy !== undefined && object.lazy !== null) { + message.lazy = object.lazy + } + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated + } + if (object.weak !== undefined && object.weak !== null) { + message.weak = object.weak + } + message.uninterpretedOption = + object.uninterpreted_option?.map((e) => + UninterpretedOption.fromAmino(e) + ) || [] + return message + }, + toAmino(message: FieldOptions): FieldOptionsAmino { + const obj: any = {} + obj.ctype = message.ctype === 1 ? undefined : message.ctype + obj.packed = message.packed === false ? undefined : message.packed + obj.jstype = message.jstype === 1 ? undefined : message.jstype + obj.lazy = message.lazy === false ? undefined : message.lazy + obj.deprecated = + message.deprecated === false ? undefined : message.deprecated + obj.weak = message.weak === false ? undefined : message.weak + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map((e) => + e ? UninterpretedOption.toAmino(e) : undefined + ) + } else { + obj.uninterpreted_option = message.uninterpretedOption + } + return obj + }, + fromAminoMsg(object: FieldOptionsAminoMsg): FieldOptions { + return FieldOptions.fromAmino(object.value) + }, + fromProtoMsg(message: FieldOptionsProtoMsg): FieldOptions { + return FieldOptions.decode(message.value) + }, + toProto(message: FieldOptions): Uint8Array { + return FieldOptions.encode(message).finish() + }, + toProtoMsg(message: FieldOptions): FieldOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.FieldOptions', + value: FieldOptions.encode(message).finish() + } + } +} +function createBaseOneofOptions(): OneofOptions { + return { + uninterpretedOption: [] + } +} +export const OneofOptions = { + typeUrl: '/google.protobuf.OneofOptions', + encode( + message: OneofOptions, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim() + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): OneofOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseOneofOptions() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()) + ) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): OneofOptions { + const message = createBaseOneofOptions() + message.uninterpretedOption = + object.uninterpretedOption?.map((e) => + UninterpretedOption.fromPartial(e) + ) || [] + return message + }, + fromAmino(object: OneofOptionsAmino): OneofOptions { + const message = createBaseOneofOptions() + message.uninterpretedOption = + object.uninterpreted_option?.map((e) => + UninterpretedOption.fromAmino(e) + ) || [] + return message + }, + toAmino(message: OneofOptions): OneofOptionsAmino { + const obj: any = {} + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map((e) => + e ? UninterpretedOption.toAmino(e) : undefined + ) + } else { + obj.uninterpreted_option = message.uninterpretedOption + } + return obj + }, + fromAminoMsg(object: OneofOptionsAminoMsg): OneofOptions { + return OneofOptions.fromAmino(object.value) + }, + fromProtoMsg(message: OneofOptionsProtoMsg): OneofOptions { + return OneofOptions.decode(message.value) + }, + toProto(message: OneofOptions): Uint8Array { + return OneofOptions.encode(message).finish() + }, + toProtoMsg(message: OneofOptions): OneofOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.OneofOptions', + value: OneofOptions.encode(message).finish() + } + } +} +function createBaseEnumOptions(): EnumOptions { + return { + allowAlias: false, + deprecated: false, + uninterpretedOption: [] + } +} +export const EnumOptions = { + typeUrl: '/google.protobuf.EnumOptions', + encode( + message: EnumOptions, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias) + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated) + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim() + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): EnumOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseEnumOptions() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool() + break + case 3: + message.deprecated = reader.bool() + break + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()) + ) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): EnumOptions { + const message = createBaseEnumOptions() + message.allowAlias = object.allowAlias ?? false + message.deprecated = object.deprecated ?? false + message.uninterpretedOption = + object.uninterpretedOption?.map((e) => + UninterpretedOption.fromPartial(e) + ) || [] + return message + }, + fromAmino(object: EnumOptionsAmino): EnumOptions { + const message = createBaseEnumOptions() + if (object.allow_alias !== undefined && object.allow_alias !== null) { + message.allowAlias = object.allow_alias + } + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated + } + message.uninterpretedOption = + object.uninterpreted_option?.map((e) => + UninterpretedOption.fromAmino(e) + ) || [] + return message + }, + toAmino(message: EnumOptions): EnumOptionsAmino { + const obj: any = {} + obj.allow_alias = + message.allowAlias === false ? undefined : message.allowAlias + obj.deprecated = + message.deprecated === false ? undefined : message.deprecated + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map((e) => + e ? UninterpretedOption.toAmino(e) : undefined + ) + } else { + obj.uninterpreted_option = message.uninterpretedOption + } + return obj + }, + fromAminoMsg(object: EnumOptionsAminoMsg): EnumOptions { + return EnumOptions.fromAmino(object.value) + }, + fromProtoMsg(message: EnumOptionsProtoMsg): EnumOptions { + return EnumOptions.decode(message.value) + }, + toProto(message: EnumOptions): Uint8Array { + return EnumOptions.encode(message).finish() + }, + toProtoMsg(message: EnumOptions): EnumOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.EnumOptions', + value: EnumOptions.encode(message).finish() + } + } +} +function createBaseEnumValueOptions(): EnumValueOptions { + return { + deprecated: false, + uninterpretedOption: [] + } +} +export const EnumValueOptions = { + typeUrl: '/google.protobuf.EnumValueOptions', + encode( + message: EnumValueOptions, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated) + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim() + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): EnumValueOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseEnumValueOptions() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool() + break + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()) + ) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): EnumValueOptions { + const message = createBaseEnumValueOptions() + message.deprecated = object.deprecated ?? false + message.uninterpretedOption = + object.uninterpretedOption?.map((e) => + UninterpretedOption.fromPartial(e) + ) || [] + return message + }, + fromAmino(object: EnumValueOptionsAmino): EnumValueOptions { + const message = createBaseEnumValueOptions() + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated + } + message.uninterpretedOption = + object.uninterpreted_option?.map((e) => + UninterpretedOption.fromAmino(e) + ) || [] + return message + }, + toAmino(message: EnumValueOptions): EnumValueOptionsAmino { + const obj: any = {} + obj.deprecated = + message.deprecated === false ? undefined : message.deprecated + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map((e) => + e ? UninterpretedOption.toAmino(e) : undefined + ) + } else { + obj.uninterpreted_option = message.uninterpretedOption + } + return obj + }, + fromAminoMsg(object: EnumValueOptionsAminoMsg): EnumValueOptions { + return EnumValueOptions.fromAmino(object.value) + }, + fromProtoMsg(message: EnumValueOptionsProtoMsg): EnumValueOptions { + return EnumValueOptions.decode(message.value) + }, + toProto(message: EnumValueOptions): Uint8Array { + return EnumValueOptions.encode(message).finish() + }, + toProtoMsg(message: EnumValueOptions): EnumValueOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.EnumValueOptions', + value: EnumValueOptions.encode(message).finish() + } + } +} +function createBaseServiceOptions(): ServiceOptions { + return { + deprecated: false, + uninterpretedOption: [] + } +} +export const ServiceOptions = { + typeUrl: '/google.protobuf.ServiceOptions', + encode( + message: ServiceOptions, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated) + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim() + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): ServiceOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseServiceOptions() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool() + break + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()) + ) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): ServiceOptions { + const message = createBaseServiceOptions() + message.deprecated = object.deprecated ?? false + message.uninterpretedOption = + object.uninterpretedOption?.map((e) => + UninterpretedOption.fromPartial(e) + ) || [] + return message + }, + fromAmino(object: ServiceOptionsAmino): ServiceOptions { + const message = createBaseServiceOptions() + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated + } + message.uninterpretedOption = + object.uninterpreted_option?.map((e) => + UninterpretedOption.fromAmino(e) + ) || [] + return message + }, + toAmino(message: ServiceOptions): ServiceOptionsAmino { + const obj: any = {} + obj.deprecated = + message.deprecated === false ? undefined : message.deprecated + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map((e) => + e ? UninterpretedOption.toAmino(e) : undefined + ) + } else { + obj.uninterpreted_option = message.uninterpretedOption + } + return obj + }, + fromAminoMsg(object: ServiceOptionsAminoMsg): ServiceOptions { + return ServiceOptions.fromAmino(object.value) + }, + fromProtoMsg(message: ServiceOptionsProtoMsg): ServiceOptions { + return ServiceOptions.decode(message.value) + }, + toProto(message: ServiceOptions): Uint8Array { + return ServiceOptions.encode(message).finish() + }, + toProtoMsg(message: ServiceOptions): ServiceOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.ServiceOptions', + value: ServiceOptions.encode(message).finish() + } + } +} +function createBaseMethodOptions(): MethodOptions { + return { + deprecated: false, + idempotencyLevel: 1, + uninterpretedOption: [] + } +} +export const MethodOptions = { + typeUrl: '/google.protobuf.MethodOptions', + encode( + message: MethodOptions, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated) + } + if (message.idempotencyLevel !== 1) { + writer.uint32(272).int32(message.idempotencyLevel) + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim() + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): MethodOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseMethodOptions() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool() + break + case 34: + message.idempotencyLevel = reader.int32() as any + break + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()) + ) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): MethodOptions { + const message = createBaseMethodOptions() + message.deprecated = object.deprecated ?? false + message.idempotencyLevel = object.idempotencyLevel ?? 1 + message.uninterpretedOption = + object.uninterpretedOption?.map((e) => + UninterpretedOption.fromPartial(e) + ) || [] + return message + }, + fromAmino(object: MethodOptionsAmino): MethodOptions { + const message = createBaseMethodOptions() + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated + } + if ( + object.idempotency_level !== undefined && + object.idempotency_level !== null + ) { + message.idempotencyLevel = object.idempotency_level + } + message.uninterpretedOption = + object.uninterpreted_option?.map((e) => + UninterpretedOption.fromAmino(e) + ) || [] + return message + }, + toAmino(message: MethodOptions): MethodOptionsAmino { + const obj: any = {} + obj.deprecated = + message.deprecated === false ? undefined : message.deprecated + obj.idempotency_level = + message.idempotencyLevel === 1 ? undefined : message.idempotencyLevel + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map((e) => + e ? UninterpretedOption.toAmino(e) : undefined + ) + } else { + obj.uninterpreted_option = message.uninterpretedOption + } + return obj + }, + fromAminoMsg(object: MethodOptionsAminoMsg): MethodOptions { + return MethodOptions.fromAmino(object.value) + }, + fromProtoMsg(message: MethodOptionsProtoMsg): MethodOptions { + return MethodOptions.decode(message.value) + }, + toProto(message: MethodOptions): Uint8Array { + return MethodOptions.encode(message).finish() + }, + toProtoMsg(message: MethodOptions): MethodOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.MethodOptions', + value: MethodOptions.encode(message).finish() + } + } +} +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: '', + positiveIntValue: BigInt(0), + negativeIntValue: BigInt(0), + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: '' + } +} +export const UninterpretedOption = { + typeUrl: '/google.protobuf.UninterpretedOption', + encode( + message: UninterpretedOption, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim() + } + if (message.identifierValue !== '') { + writer.uint32(26).string(message.identifierValue) + } + if (message.positiveIntValue !== BigInt(0)) { + writer.uint32(32).uint64(message.positiveIntValue) + } + if (message.negativeIntValue !== BigInt(0)) { + writer.uint32(40).int64(message.negativeIntValue) + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue) + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue) + } + if (message.aggregateValue !== '') { + writer.uint32(66).string(message.aggregateValue) + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): UninterpretedOption { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseUninterpretedOption() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 2: + message.name.push( + UninterpretedOption_NamePart.decode(reader, reader.uint32()) + ) + break + case 3: + message.identifierValue = reader.string() + break + case 4: + message.positiveIntValue = reader.uint64() + break + case 5: + message.negativeIntValue = reader.int64() + break + case 6: + message.doubleValue = reader.double() + break + case 7: + message.stringValue = reader.bytes() + break + case 8: + message.aggregateValue = reader.string() + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): UninterpretedOption { + const message = createBaseUninterpretedOption() + message.name = + object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || [] + message.identifierValue = object.identifierValue ?? '' + message.positiveIntValue = + object.positiveIntValue !== undefined && object.positiveIntValue !== null + ? BigInt(object.positiveIntValue.toString()) + : BigInt(0) + message.negativeIntValue = + object.negativeIntValue !== undefined && object.negativeIntValue !== null + ? BigInt(object.negativeIntValue.toString()) + : BigInt(0) + message.doubleValue = object.doubleValue ?? 0 + message.stringValue = object.stringValue ?? new Uint8Array() + message.aggregateValue = object.aggregateValue ?? '' + return message + }, + fromAmino(object: UninterpretedOptionAmino): UninterpretedOption { + const message = createBaseUninterpretedOption() + message.name = + object.name?.map((e) => UninterpretedOption_NamePart.fromAmino(e)) || [] + if ( + object.identifier_value !== undefined && + object.identifier_value !== null + ) { + message.identifierValue = object.identifier_value + } + if ( + object.positive_int_value !== undefined && + object.positive_int_value !== null + ) { + message.positiveIntValue = BigInt(object.positive_int_value) + } + if ( + object.negative_int_value !== undefined && + object.negative_int_value !== null + ) { + message.negativeIntValue = BigInt(object.negative_int_value) + } + if (object.double_value !== undefined && object.double_value !== null) { + message.doubleValue = object.double_value + } + if (object.string_value !== undefined && object.string_value !== null) { + message.stringValue = bytesFromBase64(object.string_value) + } + if ( + object.aggregate_value !== undefined && + object.aggregate_value !== null + ) { + message.aggregateValue = object.aggregate_value + } + return message + }, + toAmino(message: UninterpretedOption): UninterpretedOptionAmino { + const obj: any = {} + if (message.name) { + obj.name = message.name.map((e) => + e ? UninterpretedOption_NamePart.toAmino(e) : undefined + ) + } else { + obj.name = message.name + } + obj.identifier_value = + message.identifierValue === '' ? undefined : message.identifierValue + obj.positive_int_value = + message.positiveIntValue !== BigInt(0) + ? message.positiveIntValue.toString() + : undefined + obj.negative_int_value = + message.negativeIntValue !== BigInt(0) + ? message.negativeIntValue.toString() + : undefined + obj.double_value = + message.doubleValue === 0 ? undefined : message.doubleValue + obj.string_value = message.stringValue + ? base64FromBytes(message.stringValue) + : undefined + obj.aggregate_value = + message.aggregateValue === '' ? undefined : message.aggregateValue + return obj + }, + fromAminoMsg(object: UninterpretedOptionAminoMsg): UninterpretedOption { + return UninterpretedOption.fromAmino(object.value) + }, + fromProtoMsg(message: UninterpretedOptionProtoMsg): UninterpretedOption { + return UninterpretedOption.decode(message.value) + }, + toProto(message: UninterpretedOption): Uint8Array { + return UninterpretedOption.encode(message).finish() + }, + toProtoMsg(message: UninterpretedOption): UninterpretedOptionProtoMsg { + return { + typeUrl: '/google.protobuf.UninterpretedOption', + value: UninterpretedOption.encode(message).finish() + } + } +} +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { + namePart: '', + isExtension: false + } +} +export const UninterpretedOption_NamePart = { + typeUrl: '/google.protobuf.NamePart', + encode( + message: UninterpretedOption_NamePart, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.namePart !== '') { + writer.uint32(10).string(message.namePart) + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension) + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): UninterpretedOption_NamePart { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseUninterpretedOption_NamePart() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.namePart = reader.string() + break + case 2: + message.isExtension = reader.bool() + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial( + object: Partial + ): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart() + message.namePart = object.namePart ?? '' + message.isExtension = object.isExtension ?? false + return message + }, + fromAmino( + object: UninterpretedOption_NamePartAmino + ): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart() + if (object.name_part !== undefined && object.name_part !== null) { + message.namePart = object.name_part + } + if (object.is_extension !== undefined && object.is_extension !== null) { + message.isExtension = object.is_extension + } + return message + }, + toAmino( + message: UninterpretedOption_NamePart + ): UninterpretedOption_NamePartAmino { + const obj: any = {} + obj.name_part = message.namePart === '' ? undefined : message.namePart + obj.is_extension = + message.isExtension === false ? undefined : message.isExtension + return obj + }, + fromAminoMsg( + object: UninterpretedOption_NamePartAminoMsg + ): UninterpretedOption_NamePart { + return UninterpretedOption_NamePart.fromAmino(object.value) + }, + fromProtoMsg( + message: UninterpretedOption_NamePartProtoMsg + ): UninterpretedOption_NamePart { + return UninterpretedOption_NamePart.decode(message.value) + }, + toProto(message: UninterpretedOption_NamePart): Uint8Array { + return UninterpretedOption_NamePart.encode(message).finish() + }, + toProtoMsg( + message: UninterpretedOption_NamePart + ): UninterpretedOption_NamePartProtoMsg { + return { + typeUrl: '/google.protobuf.NamePart', + value: UninterpretedOption_NamePart.encode(message).finish() + } + } +} +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { + location: [] + } +} +export const SourceCodeInfo = { + typeUrl: '/google.protobuf.SourceCodeInfo', + encode( + message: SourceCodeInfo, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim() + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): SourceCodeInfo { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseSourceCodeInfo() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.location.push( + SourceCodeInfo_Location.decode(reader, reader.uint32()) + ) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): SourceCodeInfo { + const message = createBaseSourceCodeInfo() + message.location = + object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || [] + return message + }, + fromAmino(object: SourceCodeInfoAmino): SourceCodeInfo { + const message = createBaseSourceCodeInfo() + message.location = + object.location?.map((e) => SourceCodeInfo_Location.fromAmino(e)) || [] + return message + }, + toAmino(message: SourceCodeInfo): SourceCodeInfoAmino { + const obj: any = {} + if (message.location) { + obj.location = message.location.map((e) => + e ? SourceCodeInfo_Location.toAmino(e) : undefined + ) + } else { + obj.location = message.location + } + return obj + }, + fromAminoMsg(object: SourceCodeInfoAminoMsg): SourceCodeInfo { + return SourceCodeInfo.fromAmino(object.value) + }, + fromProtoMsg(message: SourceCodeInfoProtoMsg): SourceCodeInfo { + return SourceCodeInfo.decode(message.value) + }, + toProto(message: SourceCodeInfo): Uint8Array { + return SourceCodeInfo.encode(message).finish() + }, + toProtoMsg(message: SourceCodeInfo): SourceCodeInfoProtoMsg { + return { + typeUrl: '/google.protobuf.SourceCodeInfo', + value: SourceCodeInfo.encode(message).finish() + } + } +} +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { + path: [], + span: [], + leadingComments: '', + trailingComments: '', + leadingDetachedComments: [] + } +} +export const SourceCodeInfo_Location = { + typeUrl: '/google.protobuf.Location', + encode( + message: SourceCodeInfo_Location, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + writer.uint32(10).fork() + for (const v of message.path) { + writer.int32(v) + } + writer.ldelim() + writer.uint32(18).fork() + for (const v of message.span) { + writer.int32(v) + } + writer.ldelim() + if (message.leadingComments !== '') { + writer.uint32(26).string(message.leadingComments) + } + if (message.trailingComments !== '') { + writer.uint32(34).string(message.trailingComments) + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!) + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): SourceCodeInfo_Location { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseSourceCodeInfo_Location() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos + while (reader.pos < end2) { + message.path.push(reader.int32()) + } + } else { + message.path.push(reader.int32()) + } + break + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos + while (reader.pos < end2) { + message.span.push(reader.int32()) + } + } else { + message.span.push(reader.int32()) + } + break + case 3: + message.leadingComments = reader.string() + break + case 4: + message.trailingComments = reader.string() + break + case 6: + message.leadingDetachedComments.push(reader.string()) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial( + object: Partial + ): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location() + message.path = object.path?.map((e) => e) || [] + message.span = object.span?.map((e) => e) || [] + message.leadingComments = object.leadingComments ?? '' + message.trailingComments = object.trailingComments ?? '' + message.leadingDetachedComments = + object.leadingDetachedComments?.map((e) => e) || [] + return message + }, + fromAmino(object: SourceCodeInfo_LocationAmino): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location() + message.path = object.path?.map((e) => e) || [] + message.span = object.span?.map((e) => e) || [] + if ( + object.leading_comments !== undefined && + object.leading_comments !== null + ) { + message.leadingComments = object.leading_comments + } + if ( + object.trailing_comments !== undefined && + object.trailing_comments !== null + ) { + message.trailingComments = object.trailing_comments + } + message.leadingDetachedComments = + object.leading_detached_comments?.map((e) => e) || [] + return message + }, + toAmino(message: SourceCodeInfo_Location): SourceCodeInfo_LocationAmino { + const obj: any = {} + if (message.path) { + obj.path = message.path.map((e) => e) + } else { + obj.path = message.path + } + if (message.span) { + obj.span = message.span.map((e) => e) + } else { + obj.span = message.span + } + obj.leading_comments = + message.leadingComments === '' ? undefined : message.leadingComments + obj.trailing_comments = + message.trailingComments === '' ? undefined : message.trailingComments + if (message.leadingDetachedComments) { + obj.leading_detached_comments = message.leadingDetachedComments.map( + (e) => e + ) + } else { + obj.leading_detached_comments = message.leadingDetachedComments + } + return obj + }, + fromAminoMsg( + object: SourceCodeInfo_LocationAminoMsg + ): SourceCodeInfo_Location { + return SourceCodeInfo_Location.fromAmino(object.value) + }, + fromProtoMsg( + message: SourceCodeInfo_LocationProtoMsg + ): SourceCodeInfo_Location { + return SourceCodeInfo_Location.decode(message.value) + }, + toProto(message: SourceCodeInfo_Location): Uint8Array { + return SourceCodeInfo_Location.encode(message).finish() + }, + toProtoMsg( + message: SourceCodeInfo_Location + ): SourceCodeInfo_LocationProtoMsg { + return { + typeUrl: '/google.protobuf.Location', + value: SourceCodeInfo_Location.encode(message).finish() + } + } +} +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { + annotation: [] + } +} +export const GeneratedCodeInfo = { + typeUrl: '/google.protobuf.GeneratedCodeInfo', + encode( + message: GeneratedCodeInfo, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim() + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseGeneratedCodeInfo() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.annotation.push( + GeneratedCodeInfo_Annotation.decode(reader, reader.uint32()) + ) + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo() + message.annotation = + object.annotation?.map((e) => + GeneratedCodeInfo_Annotation.fromPartial(e) + ) || [] + return message + }, + fromAmino(object: GeneratedCodeInfoAmino): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo() + message.annotation = + object.annotation?.map((e) => + GeneratedCodeInfo_Annotation.fromAmino(e) + ) || [] + return message + }, + toAmino(message: GeneratedCodeInfo): GeneratedCodeInfoAmino { + const obj: any = {} + if (message.annotation) { + obj.annotation = message.annotation.map((e) => + e ? GeneratedCodeInfo_Annotation.toAmino(e) : undefined + ) + } else { + obj.annotation = message.annotation + } + return obj + }, + fromAminoMsg(object: GeneratedCodeInfoAminoMsg): GeneratedCodeInfo { + return GeneratedCodeInfo.fromAmino(object.value) + }, + fromProtoMsg(message: GeneratedCodeInfoProtoMsg): GeneratedCodeInfo { + return GeneratedCodeInfo.decode(message.value) + }, + toProto(message: GeneratedCodeInfo): Uint8Array { + return GeneratedCodeInfo.encode(message).finish() + }, + toProtoMsg(message: GeneratedCodeInfo): GeneratedCodeInfoProtoMsg { + return { + typeUrl: '/google.protobuf.GeneratedCodeInfo', + value: GeneratedCodeInfo.encode(message).finish() + } + } +} +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { + path: [], + sourceFile: '', + begin: 0, + end: 0 + } +} +export const GeneratedCodeInfo_Annotation = { + typeUrl: '/google.protobuf.Annotation', + encode( + message: GeneratedCodeInfo_Annotation, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + writer.uint32(10).fork() + for (const v of message.path) { + writer.int32(v) + } + writer.ldelim() + if (message.sourceFile !== '') { + writer.uint32(18).string(message.sourceFile) + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin) + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end) + } + return writer + }, + decode( + input: BinaryReader | Uint8Array, + length?: number + ): GeneratedCodeInfo_Annotation { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseGeneratedCodeInfo_Annotation() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos + while (reader.pos < end2) { + message.path.push(reader.int32()) + } + } else { + message.path.push(reader.int32()) + } + break + case 2: + message.sourceFile = reader.string() + break + case 3: + message.begin = reader.int32() + break + case 4: + message.end = reader.int32() + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial( + object: Partial + ): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation() + message.path = object.path?.map((e) => e) || [] + message.sourceFile = object.sourceFile ?? '' + message.begin = object.begin ?? 0 + message.end = object.end ?? 0 + return message + }, + fromAmino( + object: GeneratedCodeInfo_AnnotationAmino + ): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation() + message.path = object.path?.map((e) => e) || [] + if (object.source_file !== undefined && object.source_file !== null) { + message.sourceFile = object.source_file + } + if (object.begin !== undefined && object.begin !== null) { + message.begin = object.begin + } + if (object.end !== undefined && object.end !== null) { + message.end = object.end + } + return message + }, + toAmino( + message: GeneratedCodeInfo_Annotation + ): GeneratedCodeInfo_AnnotationAmino { + const obj: any = {} + if (message.path) { + obj.path = message.path.map((e) => e) + } else { + obj.path = message.path + } + obj.source_file = message.sourceFile === '' ? undefined : message.sourceFile + obj.begin = message.begin === 0 ? undefined : message.begin + obj.end = message.end === 0 ? undefined : message.end + return obj + }, + fromAminoMsg( + object: GeneratedCodeInfo_AnnotationAminoMsg + ): GeneratedCodeInfo_Annotation { + return GeneratedCodeInfo_Annotation.fromAmino(object.value) + }, + fromProtoMsg( + message: GeneratedCodeInfo_AnnotationProtoMsg + ): GeneratedCodeInfo_Annotation { + return GeneratedCodeInfo_Annotation.decode(message.value) + }, + toProto(message: GeneratedCodeInfo_Annotation): Uint8Array { + return GeneratedCodeInfo_Annotation.encode(message).finish() + }, + toProtoMsg( + message: GeneratedCodeInfo_Annotation + ): GeneratedCodeInfo_AnnotationProtoMsg { + return { + typeUrl: '/google.protobuf.Annotation', + value: GeneratedCodeInfo_Annotation.encode(message).finish() + } + } +} diff --git a/src/proto/google/protobuf/duration.ts b/src/proto/google/protobuf/duration.ts new file mode 100644 index 0000000..00a94ca --- /dev/null +++ b/src/proto/google/protobuf/duration.ts @@ -0,0 +1,291 @@ +/* eslint-disable prefer-const */ +import { BinaryReader, BinaryWriter } from '../../binary' +/** + * A Duration represents a signed, fixed-length span of time represented + * as a count of seconds and fractions of seconds at nanosecond + * resolution. It is independent of any calendar and concepts like "day" + * or "month". It is related to Timestamp in that the difference between + * two Timestamp values is a Duration and it can be added or subtracted + * from a Timestamp. Range is approximately +-10,000 years. + * + * # Examples + * + * Example 1: Compute Duration from two Timestamps in pseudo code. + * + * Timestamp start = ...; + * Timestamp end = ...; + * Duration duration = ...; + * + * duration.seconds = end.seconds - start.seconds; + * duration.nanos = end.nanos - start.nanos; + * + * if (duration.seconds < 0 && duration.nanos > 0) { + * duration.seconds += 1; + * duration.nanos -= 1000000000; + * } else if (durations.seconds > 0 && duration.nanos < 0) { + * duration.seconds -= 1; + * duration.nanos += 1000000000; + * } + * + * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + * + * Timestamp start = ...; + * Duration duration = ...; + * Timestamp end = ...; + * + * end.seconds = start.seconds + duration.seconds; + * end.nanos = start.nanos + duration.nanos; + * + * if (end.nanos < 0) { + * end.seconds -= 1; + * end.nanos += 1000000000; + * } else if (end.nanos >= 1000000000) { + * end.seconds += 1; + * end.nanos -= 1000000000; + * } + * + * Example 3: Compute Duration from datetime.timedelta in Python. + * + * td = datetime.timedelta(days=3, minutes=10) + * duration = Duration() + * duration.FromTimedelta(td) + * + * # JSON Mapping + * + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". + */ +export interface Duration { + /** + * Signed seconds of the span of time. Must be from -315,576,000,000 + * to +315,576,000,000 inclusive. Note: these bounds are computed from: + * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + */ + seconds: bigint + /** + * Signed fractions of a second at nanosecond resolution of the span + * of time. Durations less than one second are represented with a 0 + * `seconds` field and a positive or negative `nanos` field. For durations + * of one second or more, a non-zero value for the `nanos` field must be + * of the same sign as the `seconds` field. Must be from -999,999,999 + * to +999,999,999 inclusive. + */ + nanos: number +} +export interface DurationProtoMsg { + typeUrl: '/google.protobuf.Duration' + value: Uint8Array +} +/** + * A Duration represents a signed, fixed-length span of time represented + * as a count of seconds and fractions of seconds at nanosecond + * resolution. It is independent of any calendar and concepts like "day" + * or "month". It is related to Timestamp in that the difference between + * two Timestamp values is a Duration and it can be added or subtracted + * from a Timestamp. Range is approximately +-10,000 years. + * + * # Examples + * + * Example 1: Compute Duration from two Timestamps in pseudo code. + * + * Timestamp start = ...; + * Timestamp end = ...; + * Duration duration = ...; + * + * duration.seconds = end.seconds - start.seconds; + * duration.nanos = end.nanos - start.nanos; + * + * if (duration.seconds < 0 && duration.nanos > 0) { + * duration.seconds += 1; + * duration.nanos -= 1000000000; + * } else if (durations.seconds > 0 && duration.nanos < 0) { + * duration.seconds -= 1; + * duration.nanos += 1000000000; + * } + * + * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + * + * Timestamp start = ...; + * Duration duration = ...; + * Timestamp end = ...; + * + * end.seconds = start.seconds + duration.seconds; + * end.nanos = start.nanos + duration.nanos; + * + * if (end.nanos < 0) { + * end.seconds -= 1; + * end.nanos += 1000000000; + * } else if (end.nanos >= 1000000000) { + * end.seconds += 1; + * end.nanos -= 1000000000; + * } + * + * Example 3: Compute Duration from datetime.timedelta in Python. + * + * td = datetime.timedelta(days=3, minutes=10) + * duration = Duration() + * duration.FromTimedelta(td) + * + * # JSON Mapping + * + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". + */ +export type DurationAmino = string +export interface DurationAminoMsg { + type: '/google.protobuf.Duration' + value: DurationAmino +} +/** + * A Duration represents a signed, fixed-length span of time represented + * as a count of seconds and fractions of seconds at nanosecond + * resolution. It is independent of any calendar and concepts like "day" + * or "month". It is related to Timestamp in that the difference between + * two Timestamp values is a Duration and it can be added or subtracted + * from a Timestamp. Range is approximately +-10,000 years. + * + * # Examples + * + * Example 1: Compute Duration from two Timestamps in pseudo code. + * + * Timestamp start = ...; + * Timestamp end = ...; + * Duration duration = ...; + * + * duration.seconds = end.seconds - start.seconds; + * duration.nanos = end.nanos - start.nanos; + * + * if (duration.seconds < 0 && duration.nanos > 0) { + * duration.seconds += 1; + * duration.nanos -= 1000000000; + * } else if (durations.seconds > 0 && duration.nanos < 0) { + * duration.seconds -= 1; + * duration.nanos += 1000000000; + * } + * + * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + * + * Timestamp start = ...; + * Duration duration = ...; + * Timestamp end = ...; + * + * end.seconds = start.seconds + duration.seconds; + * end.nanos = start.nanos + duration.nanos; + * + * if (end.nanos < 0) { + * end.seconds -= 1; + * end.nanos += 1000000000; + * } else if (end.nanos >= 1000000000) { + * end.seconds += 1; + * end.nanos -= 1000000000; + * } + * + * Example 3: Compute Duration from datetime.timedelta in Python. + * + * td = datetime.timedelta(days=3, minutes=10) + * duration = Duration() + * duration.FromTimedelta(td) + * + * # JSON Mapping + * + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". + */ +export interface DurationSDKType { + seconds: bigint + nanos: number +} +function createBaseDuration(): Duration { + return { + seconds: BigInt(0), + nanos: 0 + } +} +export const Duration = { + typeUrl: '/google.protobuf.Duration', + encode( + message: Duration, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.seconds !== BigInt(0)) { + writer.uint32(8).int64(message.seconds) + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos) + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): Duration { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseDuration() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.seconds = reader.int64() + break + case 2: + message.nanos = reader.int32() + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): Duration { + const message = createBaseDuration() + message.seconds = + object.seconds !== undefined && object.seconds !== null + ? BigInt(object.seconds.toString()) + : BigInt(0) + message.nanos = object.nanos ?? 0 + return message + }, + fromAmino(object: DurationAmino): Duration { + const value = BigInt(object) + return { + seconds: value / BigInt('1000000000'), + nanos: Number(value % BigInt('1000000000')) + } + }, + toAmino(message: Duration): DurationAmino { + return ( + message.seconds * BigInt('1000000000') + + BigInt(message.nanos) + ).toString() + }, + fromAminoMsg(object: DurationAminoMsg): Duration { + return Duration.fromAmino(object.value) + }, + fromProtoMsg(message: DurationProtoMsg): Duration { + return Duration.decode(message.value) + }, + toProto(message: Duration): Uint8Array { + return Duration.encode(message).finish() + }, + toProtoMsg(message: Duration): DurationProtoMsg { + return { + typeUrl: '/google.protobuf.Duration', + value: Duration.encode(message).finish() + } + } +} diff --git a/src/proto/google/protobuf/timestamp.ts b/src/proto/google/protobuf/timestamp.ts new file mode 100644 index 0000000..415d8d8 --- /dev/null +++ b/src/proto/google/protobuf/timestamp.ts @@ -0,0 +1,357 @@ +/* eslint-disable prefer-const */ +import { BinaryReader, BinaryWriter } from '../../binary' +import { fromJsonTimestamp, fromTimestamp } from '../../helpers' +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * + * Example 5: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: bigint + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + nanos: number +} +export interface TimestampProtoMsg { + typeUrl: '/google.protobuf.Timestamp' + value: Uint8Array +} +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * + * Example 5: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export type TimestampAmino = string +export interface TimestampAminoMsg { + type: '/google.protobuf.Timestamp' + value: TimestampAmino +} +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * + * Example 5: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export interface TimestampSDKType { + seconds: bigint + nanos: number +} +function createBaseTimestamp(): Timestamp { + return { + seconds: BigInt(0), + nanos: 0 + } +} +export const Timestamp = { + typeUrl: '/google.protobuf.Timestamp', + encode( + message: Timestamp, + writer: BinaryWriter = BinaryWriter.create() + ): BinaryWriter { + if (message.seconds !== BigInt(0)) { + writer.uint32(8).int64(message.seconds) + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos) + } + return writer + }, + decode(input: BinaryReader | Uint8Array, length?: number): Timestamp { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input) + let end = length === undefined ? reader.len : reader.pos + length + const message = createBaseTimestamp() + while (reader.pos < end) { + const tag = reader.uint32() + switch (tag >>> 3) { + case 1: + message.seconds = reader.int64() + break + case 2: + message.nanos = reader.int32() + break + default: + reader.skipType(tag & 7) + break + } + } + return message + }, + fromPartial(object: Partial): Timestamp { + const message = createBaseTimestamp() + message.seconds = + object.seconds !== undefined && object.seconds !== null + ? BigInt(object.seconds.toString()) + : BigInt(0) + message.nanos = object.nanos ?? 0 + return message + }, + fromAmino(object: TimestampAmino): Timestamp { + return fromJsonTimestamp(object) + }, + toAmino(message: Timestamp): TimestampAmino { + return fromTimestamp(message) + .toISOString() + .replace(/\.\d+Z$/, 'Z') + }, + fromAminoMsg(object: TimestampAminoMsg): Timestamp { + return Timestamp.fromAmino(object.value) + }, + fromProtoMsg(message: TimestampProtoMsg): Timestamp { + return Timestamp.decode(message.value) + }, + toProto(message: Timestamp): Uint8Array { + return Timestamp.encode(message).finish() + }, + toProtoMsg(message: Timestamp): TimestampProtoMsg { + return { + typeUrl: '/google.protobuf.Timestamp', + value: Timestamp.encode(message).finish() + } + } +} diff --git a/src/proto/osmojs/cosmos/auth/v1beta1/auth.ts b/src/proto/osmojs/cosmos/auth/v1beta1/auth.ts index 4c0db10..d539e7a 100644 --- a/src/proto/osmojs/cosmos/auth/v1beta1/auth.ts +++ b/src/proto/osmojs/cosmos/auth/v1beta1/auth.ts @@ -1,7 +1,7 @@ /* eslint-disable prefer-const */ /* eslint-disable @typescript-eslint/ban-ts-comment */ //@ts-nocheck -import { Any, AnyAmino, AnySDKType } from 'cosmjs-types/google/protobuf/any' +import { Any, AnyAmino, AnySDKType } from '../../../../google/protobuf/any' import { BinaryReader, BinaryWriter } from '../../../../binary' import { GlobalDecoderRegistry } from '../../../registry' import { bytesFromBase64, base64FromBytes } from '../../../../helpers' diff --git a/src/proto/osmojs/cosmos/authz/v1beta1/authz.ts b/src/proto/osmojs/cosmos/authz/v1beta1/authz.ts index 3ef96da..7398adc 100644 --- a/src/proto/osmojs/cosmos/authz/v1beta1/authz.ts +++ b/src/proto/osmojs/cosmos/authz/v1beta1/authz.ts @@ -7,8 +7,8 @@ import { AnyProtoMsg, AnyAmino, AnySDKType -} from 'cosmjs-types/google/protobuf/any' -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' +} from '../../../../google/protobuf/any' +import { Timestamp } from '../../../../google/protobuf/timestamp' import { TransferAuthorization, TransferAuthorizationProtoMsg, diff --git a/src/proto/osmojs/cosmos/authz/v1beta1/tx.ts b/src/proto/osmojs/cosmos/authz/v1beta1/tx.ts index 4c42293..4a073f9 100644 --- a/src/proto/osmojs/cosmos/authz/v1beta1/tx.ts +++ b/src/proto/osmojs/cosmos/authz/v1beta1/tx.ts @@ -8,7 +8,7 @@ import { AnyProtoMsg, AnyAmino, AnySDKType -} from 'cosmjs-types/google/protobuf/any' +} from '../../../../google/protobuf/any' import { BinaryReader, BinaryWriter } from '../../../../binary' import { GlobalDecoderRegistry } from '../../../registry' import { bytesFromBase64, base64FromBytes } from '../../../../helpers' diff --git a/src/proto/osmojs/cosmos/bundle.ts b/src/proto/osmojs/cosmos/bundle.ts index c7cb623..870de69 100644 --- a/src/proto/osmojs/cosmos/bundle.ts +++ b/src/proto/osmojs/cosmos/bundle.ts @@ -1,78 +1,27 @@ /* eslint-disable @typescript-eslint/no-namespace */ /* eslint-disable @typescript-eslint/ban-ts-comment */ -//@ts-nocheck + import * as _0 from './ics23/v1/proofs' -import * as _1 from './app/runtime/v1alpha1/module' -import * as _2 from './auth/module/v1/module' import * as _3 from './auth/v1beta1/auth' -import * as _4 from './auth/v1beta1/genesis' -import * as _5 from './auth/v1beta1/query' import * as _6 from './auth/v1beta1/tx' -import * as _7 from './authz/module/v1/module' import * as _8 from './authz/v1beta1/authz' -import * as _9 from './authz/v1beta1/event' -import * as _10 from './authz/v1beta1/genesis' -import * as _11 from './authz/v1beta1/query' import * as _12 from './authz/v1beta1/tx' -import * as _13 from './bank/module/v1/module' import * as _14 from './bank/v1beta1/authz' import * as _15 from './bank/v1beta1/bank' -import * as _16 from './bank/v1beta1/genesis' -import * as _17 from './bank/v1beta1/query' import * as _18 from './bank/v1beta1/tx' -import * as _19 from './base/abci/v1beta1/abci' -import * as _20 from './base/node/v1beta1/query' -import * as _21 from './base/query/v1beta1/pagination' -import * as _22 from './base/reflection/v2alpha1/reflection' import * as _23 from './base/v1beta1/coin' -import * as _24 from './capability/module/v1/module' -import * as _25 from './consensus/module/v1/module' -import * as _26 from './consensus/v1/query' import * as _27 from './consensus/v1/tx' -import * as _28 from './crisis/module/v1/module' -import * as _29 from './crypto/ed25519/keys' -import * as _30 from './crypto/hd/v1/hd' -import * as _31 from './crypto/keyring/v1/record' -import * as _32 from './crypto/multisig/keys' -import * as _33 from './crypto/secp256k1/keys' -import * as _34 from './crypto/secp256r1/keys' -import * as _35 from './distribution/module/v1/module' import * as _36 from './distribution/v1beta1/distribution' -import * as _37 from './distribution/v1beta1/genesis' -import * as _38 from './distribution/v1beta1/query' import * as _39 from './distribution/v1beta1/tx' -import * as _40 from './evidence/module/v1/module' -import * as _41 from './feegrant/module/v1/module' -import * as _42 from './genutil/module/v1/module' -import * as _43 from './gov/module/v1/module' -import * as _44 from './gov/v1beta1/genesis' import * as _45 from './gov/v1beta1/gov' -import * as _46 from './gov/v1beta1/query' import * as _47 from './gov/v1beta1/tx' -import * as _48 from './group/module/v1/module' -import * as _49 from './mint/module/v1/module' -import * as _50 from './nft/module/v1/module' -import * as _51 from './orm/module/v1alpha1/module' -import * as _52 from './orm/query/v1alpha1/query' -import * as _53 from './params/module/v1/module' -import * as _54 from './query/v1/query' -import * as _55 from './reflection/v1/reflection' -import * as _56 from './slashing/module/v1/module' -import * as _57 from './staking/module/v1/module' import * as _58 from './staking/v1beta1/authz' -import * as _59 from './staking/v1beta1/genesis' -import * as _60 from './staking/v1beta1/query' import * as _61 from './staking/v1beta1/staking' import * as _62 from './staking/v1beta1/tx' -import * as _63 from './tx/config/v1/config' import * as _64 from './tx/signing/v1beta1/signing' -import * as _65 from './tx/v1beta1/service' import * as _66 from './tx/v1beta1/tx' -import * as _67 from './upgrade/module/v1/module' -import * as _68 from './upgrade/v1beta1/query' import * as _69 from './upgrade/v1beta1/tx' import * as _70 from './upgrade/v1beta1/upgrade' -import * as _71 from './vesting/module/v1/module' import * as _240 from './auth/v1beta1/tx.amino' import * as _241 from './authz/v1beta1/tx.amino' import * as _242 from './bank/v1beta1/tx.amino' @@ -89,367 +38,91 @@ import * as _252 from './distribution/v1beta1/tx.registry' import * as _253 from './gov/v1beta1/tx.registry' import * as _254 from './staking/v1beta1/tx.registry' import * as _255 from './upgrade/v1beta1/tx.registry' -import * as _256 from './auth/v1beta1/query.lcd' -import * as _257 from './authz/v1beta1/query.lcd' -import * as _258 from './bank/v1beta1/query.lcd' -import * as _259 from './base/node/v1beta1/query.lcd' -import * as _260 from './consensus/v1/query.lcd' -import * as _261 from './distribution/v1beta1/query.lcd' -import * as _262 from './gov/v1beta1/query.lcd' -import * as _263 from './staking/v1beta1/query.lcd' -import * as _264 from './tx/v1beta1/service.lcd' -import * as _265 from './upgrade/v1beta1/query.lcd' -import * as _266 from './auth/v1beta1/query.rpc.Query' -import * as _267 from './authz/v1beta1/query.rpc.Query' -import * as _268 from './bank/v1beta1/query.rpc.Query' -import * as _269 from './base/node/v1beta1/query.rpc.Service' -import * as _270 from './consensus/v1/query.rpc.Query' -import * as _271 from './distribution/v1beta1/query.rpc.Query' -import * as _272 from './gov/v1beta1/query.rpc.Query' -import * as _273 from './orm/query/v1alpha1/query.rpc.Query' -import * as _274 from './staking/v1beta1/query.rpc.Query' -import * as _275 from './tx/v1beta1/service.rpc.Service' -import * as _276 from './upgrade/v1beta1/query.rpc.Query' -import * as _277 from './auth/v1beta1/tx.rpc.msg' -import * as _278 from './authz/v1beta1/tx.rpc.msg' -import * as _279 from './bank/v1beta1/tx.rpc.msg' -import * as _280 from './consensus/v1/tx.rpc.msg' -import * as _281 from './distribution/v1beta1/tx.rpc.msg' -import * as _282 from './gov/v1beta1/tx.rpc.msg' -import * as _283 from './staking/v1beta1/tx.rpc.msg' -import * as _284 from './upgrade/v1beta1/tx.rpc.msg' -import * as _415 from './lcd' -import * as _416 from './rpc.query' -import * as _417 from './rpc.tx' export namespace cosmos { export namespace ics23 { export const v1 = { ..._0 } } - export namespace app { - export namespace runtime { - export const v1alpha1 = { - ..._1 - } - } - } export namespace auth { - export namespace module { - export const v1 = { - ..._2 - } - } export const v1beta1 = { ..._3, - ..._4, - ..._5, ..._6, ..._240, - ..._248, - ..._256, - ..._266, - ..._277 + ..._248 } } export namespace authz { - export namespace module { - export const v1 = { - ..._7 - } - } export const v1beta1 = { ..._8, - ..._9, - ..._10, - ..._11, ..._12, ..._241, - ..._249, - ..._257, - ..._267, - ..._278 + ..._249 } } export namespace bank { - export namespace module { - export const v1 = { - ..._13 - } - } export const v1beta1 = { ..._14, ..._15, - ..._16, - ..._17, ..._18, ..._242, - ..._250, - ..._258, - ..._268, - ..._279 + ..._250 } } export namespace base { - export namespace abci { - export const v1beta1 = { - ..._19 - } - } - export namespace node { - export const v1beta1 = { - ..._20, - ..._259, - ..._269 - } - } - export namespace query { - export const v1beta1 = { - ..._21 - } - } - export namespace reflection { - export const v2alpha1 = { - ..._22 - } - } export const v1beta1 = { ..._23 } } - export namespace capability { - export namespace module { - export const v1 = { - ..._24 - } - } - } export namespace consensus { - export namespace module { - export const v1 = { - ..._25 - } - } export const v1 = { - ..._26, ..._27, ..._243, - ..._251, - ..._260, - ..._270, - ..._280 - } - } - export namespace crisis { - export namespace module { - export const v1 = { - ..._28 - } - } - } - export namespace crypto { - export const ed25519 = { - ..._29 - } - export namespace hd { - export const v1 = { - ..._30 - } - } - export namespace keyring { - export const v1 = { - ..._31 - } - } - export const multisig = { - ..._32 - } - export const secp256k1 = { - ..._33 - } - export const secp256r1 = { - ..._34 + ..._251 } } + export namespace distribution { - export namespace module { - export const v1 = { - ..._35 - } - } export const v1beta1 = { ..._36, - ..._37, - ..._38, ..._39, ..._244, - ..._252, - ..._261, - ..._271, - ..._281 - } - } - export namespace evidence { - export namespace module { - export const v1 = { - ..._40 - } - } - } - export namespace feegrant { - export namespace module { - export const v1 = { - ..._41 - } - } - } - export namespace genutil { - export namespace module { - export const v1 = { - ..._42 - } + ..._252 } } export namespace gov { - export namespace module { - export const v1 = { - ..._43 - } - } export const v1beta1 = { - ..._44, ..._45, - ..._46, ..._47, ..._245, - ..._253, - ..._262, - ..._272, - ..._282 - } - } - export namespace group { - export namespace module { - export const v1 = { - ..._48 - } - } - } - export namespace mint { - export namespace module { - export const v1 = { - ..._49 - } - } - } - export namespace nft { - export namespace module { - export const v1 = { - ..._50 - } - } - } - export namespace orm { - export namespace module { - export const v1alpha1 = { - ..._51 - } - } - export namespace query { - export const v1alpha1 = { - ..._52, - ..._273 - } - } - } - export namespace params { - export namespace module { - export const v1 = { - ..._53 - } - } - } - export namespace query { - export const v1 = { - ..._54 - } - } - export namespace reflection { - export const v1 = { - ..._55 - } - } - export namespace slashing { - export namespace module { - export const v1 = { - ..._56 - } + ..._253 } } export namespace staking { - export namespace module { - export const v1 = { - ..._57 - } - } export const v1beta1 = { ..._58, - ..._59, - ..._60, ..._61, ..._62, ..._246, - ..._254, - ..._263, - ..._274, - ..._283 - } - } - export namespace tx { - export namespace config { - export const v1 = { - ..._63 - } - } - export namespace signing { - export const v1beta1 = { - ..._64 - } - } - export const v1beta1 = { - ..._65, - ..._66, - ..._264, - ..._275 + ..._254 } } - export namespace upgrade { - export namespace module { - export const v1 = { - ..._67 - } - } +} +export namespace tx { + export namespace signing { export const v1beta1 = { - ..._68, - ..._69, - ..._70, - ..._247, - ..._255, - ..._265, - ..._276, - ..._284 + ..._64 } } - export namespace vesting { - export namespace module { - export const v1 = { - ..._71 - } - } + export const v1beta1 = { + ..._66 } - export const ClientFactory = { - ..._415, - ..._416, - ..._417 +} +export namespace upgrade { + export const v1beta1 = { + ..._69, + ..._70, + ..._247, + ..._255 } } diff --git a/src/proto/osmojs/cosmos/gov/v1beta1/gov.ts b/src/proto/osmojs/cosmos/gov/v1beta1/gov.ts index 539cd2e..f1972b1 100644 --- a/src/proto/osmojs/cosmos/gov/v1beta1/gov.ts +++ b/src/proto/osmojs/cosmos/gov/v1beta1/gov.ts @@ -8,13 +8,13 @@ import { AnyProtoMsg, AnyAmino, AnySDKType -} from 'cosmjs-types/google/protobuf/any' -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' +} from '../../../../google/protobuf/any' +import { Timestamp } from '../../../../google/protobuf/timestamp' import { Duration, DurationAmino, DurationSDKType -} from 'cosmjs-types/google/protobuf/duration' +} from '../../../../google/protobuf/duration' import { ClientUpdateProposal, ClientUpdateProposalProtoMsg, diff --git a/src/proto/osmojs/cosmos/gov/v1beta1/tx.ts b/src/proto/osmojs/cosmos/gov/v1beta1/tx.ts index 480ed23..5cbd50f 100644 --- a/src/proto/osmojs/cosmos/gov/v1beta1/tx.ts +++ b/src/proto/osmojs/cosmos/gov/v1beta1/tx.ts @@ -7,7 +7,7 @@ import { AnyProtoMsg, AnyAmino, AnySDKType -} from 'cosmjs-types/google/protobuf/any' +} from '../../../../google/protobuf/any' import { Coin, CoinAmino, CoinSDKType } from '../../base/v1beta1/coin' import { VoteOption, diff --git a/src/proto/osmojs/cosmos/staking/v1beta1/staking.ts b/src/proto/osmojs/cosmos/staking/v1beta1/staking.ts index 2bb3db0..cec7ef7 100644 --- a/src/proto/osmojs/cosmos/staking/v1beta1/staking.ts +++ b/src/proto/osmojs/cosmos/staking/v1beta1/staking.ts @@ -7,18 +7,18 @@ import { HeaderAmino, HeaderSDKType } from '../../../tendermint/types/types' -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' +import { Timestamp } from '../../../../google/protobuf/timestamp' import { Any, AnyProtoMsg, AnyAmino, AnySDKType -} from 'cosmjs-types/google/protobuf/any' +} from '../../../../google/protobuf/any' import { Duration, DurationAmino, DurationSDKType -} from 'cosmjs-types/google/protobuf/duration' +} from '../../../../google/protobuf/duration' import { Coin, CoinAmino, CoinSDKType } from '../../base/v1beta1/coin' import { ValidatorUpdate, diff --git a/src/proto/osmojs/cosmos/staking/v1beta1/tx.ts b/src/proto/osmojs/cosmos/staking/v1beta1/tx.ts index 080875c..a4c1fa2 100644 --- a/src/proto/osmojs/cosmos/staking/v1beta1/tx.ts +++ b/src/proto/osmojs/cosmos/staking/v1beta1/tx.ts @@ -18,9 +18,9 @@ import { AnyProtoMsg, AnyAmino, AnySDKType -} from 'cosmjs-types/google/protobuf/any' +} from '../../../../google/protobuf/any' import { Coin, CoinAmino, CoinSDKType } from '../../base/v1beta1/coin' -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' +import { Timestamp } from '../../../../google/protobuf/timestamp' import { BinaryReader, BinaryWriter } from '../../../../binary' import { encodePubkey, decodePubkey } from '@cosmjs/proto-signing' import { GlobalDecoderRegistry } from '../../../registry' diff --git a/src/proto/osmojs/cosmos/tx/signing/v1beta1/signing.ts b/src/proto/osmojs/cosmos/tx/signing/v1beta1/signing.ts index a295f95..a60cdbf 100644 --- a/src/proto/osmojs/cosmos/tx/signing/v1beta1/signing.ts +++ b/src/proto/osmojs/cosmos/tx/signing/v1beta1/signing.ts @@ -7,7 +7,7 @@ import { CompactBitArrayAmino, CompactBitArraySDKType } from '../../../crypto/multisig/v1beta1/multisig' -import { Any, AnyAmino, AnySDKType } from 'cosmjs-types/google/protobuf/any' +import { Any, AnyAmino, AnySDKType } from '../../../../../google/protobuf/any' import { BinaryReader, BinaryWriter } from '../../../../../binary' import { GlobalDecoderRegistry } from '../../../../registry' import { isSet, bytesFromBase64, base64FromBytes } from '../../../../../helpers' diff --git a/src/proto/osmojs/cosmos/tx/v1beta1/tx.ts b/src/proto/osmojs/cosmos/tx/v1beta1/tx.ts index cc1db8e..0636881 100644 --- a/src/proto/osmojs/cosmos/tx/v1beta1/tx.ts +++ b/src/proto/osmojs/cosmos/tx/v1beta1/tx.ts @@ -2,7 +2,7 @@ /* eslint-disable @typescript-eslint/no-empty-interface */ /* eslint-disable @typescript-eslint/ban-ts-comment */ //@ts-nocheck -import { Any, AnyAmino, AnySDKType } from 'cosmjs-types/google/protobuf/any' +import { Any, AnyAmino, AnySDKType } from '../../../../google/protobuf/any' import { SignMode } from '../signing/v1beta1/signing' import { CompactBitArray, diff --git a/src/proto/osmojs/cosmos/upgrade/v1beta1/upgrade.ts b/src/proto/osmojs/cosmos/upgrade/v1beta1/upgrade.ts index 0a24f90..934c884 100644 --- a/src/proto/osmojs/cosmos/upgrade/v1beta1/upgrade.ts +++ b/src/proto/osmojs/cosmos/upgrade/v1beta1/upgrade.ts @@ -2,8 +2,8 @@ /* eslint-disable @typescript-eslint/no-empty-interface */ /* eslint-disable @typescript-eslint/ban-ts-comment */ //@ts-nocheck -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' -import { Any, AnyAmino, AnySDKType } from 'cosmjs-types/google/protobuf/any' +import { Timestamp } from '../../../../google/protobuf/timestamp' +import { Any, AnyAmino, AnySDKType } from '../../../../google/protobuf/any' import { BinaryReader, BinaryWriter } from '../../../../binary' import { toTimestamp, fromTimestamp } from '../../../../helpers' import { GlobalDecoderRegistry } from '../../../registry' diff --git a/src/proto/osmojs/cosmwasm/bundle.ts b/src/proto/osmojs/cosmwasm/bundle.ts index 7fd769c..d58b80f 100644 --- a/src/proto/osmojs/cosmwasm/bundle.ts +++ b/src/proto/osmojs/cosmwasm/bundle.ts @@ -2,40 +2,20 @@ /* eslint-disable @typescript-eslint/no-namespace */ //@ts-nocheck import * as _118 from './wasm/v1/authz' -import * as _119 from './wasm/v1/genesis' -import * as _120 from './wasm/v1/ibc' import * as _121 from './wasm/v1/proposal_legacy' -import * as _122 from './wasm/v1/query' import * as _123 from './wasm/v1/tx' import * as _124 from './wasm/v1/types' import * as _325 from './wasm/v1/tx.amino' import * as _326 from './wasm/v1/tx.registry' -import * as _327 from './wasm/v1/query.lcd' -import * as _328 from './wasm/v1/query.rpc.Query' -import * as _329 from './wasm/v1/tx.rpc.msg' -import * as _421 from './lcd' -import * as _422 from './rpc.query' -import * as _423 from './rpc.tx' export namespace cosmwasm { export namespace wasm { export const v1 = { ..._118, - ..._119, - ..._120, ..._121, - ..._122, ..._123, ..._124, ..._325, - ..._326, - ..._327, - ..._328, - ..._329 + ..._326 } } - export const ClientFactory = { - ..._421, - ..._422, - ..._423 - } } diff --git a/src/proto/osmojs/cosmwasm/wasm/v1/authz.ts b/src/proto/osmojs/cosmwasm/wasm/v1/authz.ts index 7a02dfd..59949a3 100644 --- a/src/proto/osmojs/cosmwasm/wasm/v1/authz.ts +++ b/src/proto/osmojs/cosmwasm/wasm/v1/authz.ts @@ -8,7 +8,7 @@ import { AnyProtoMsg, AnyAmino, AnySDKType -} from 'cosmjs-types/google/protobuf/any' +} from '../../../../google/protobuf/any' import { Coin, CoinAmino, CoinSDKType } from '../../../cosmos/base/v1beta1/coin' import { BinaryReader, BinaryWriter } from '../../../../binary' import { GlobalDecoderRegistry } from '../../../registry' diff --git a/src/proto/osmojs/cosmwasm/wasm/v1/types.ts b/src/proto/osmojs/cosmwasm/wasm/v1/types.ts index e111167..0c63d2b 100644 --- a/src/proto/osmojs/cosmwasm/wasm/v1/types.ts +++ b/src/proto/osmojs/cosmwasm/wasm/v1/types.ts @@ -7,7 +7,7 @@ import { AnyProtoMsg, AnyAmino, AnySDKType -} from 'cosmjs-types/google/protobuf/any' +} from '../../../../google/protobuf/any' import { isSet, bytesFromBase64, base64FromBytes } from '../../../../helpers' import { BinaryReader, BinaryWriter } from '../../../../binary' import { GlobalDecoderRegistry } from '../../../registry' diff --git a/src/proto/osmojs/ibc/applications/interchain_accounts/v1/packet.ts b/src/proto/osmojs/ibc/applications/interchain_accounts/v1/packet.ts index c9fda82..308831f 100644 --- a/src/proto/osmojs/ibc/applications/interchain_accounts/v1/packet.ts +++ b/src/proto/osmojs/ibc/applications/interchain_accounts/v1/packet.ts @@ -2,7 +2,7 @@ /* eslint-disable @typescript-eslint/no-empty-interface */ /* eslint-disable @typescript-eslint/ban-ts-comment */ //@ts-nocheck -import { Any, AnyAmino, AnySDKType } from 'cosmjs-types/google/protobuf/any' +import { Any, AnyAmino, AnySDKType } from '../../../../../google/protobuf/any' import { isSet, bytesFromBase64, base64FromBytes } from '../../../../../helpers' import { BinaryReader, BinaryWriter } from '../../../../../binary' import { GlobalDecoderRegistry } from '../../../../registry' diff --git a/src/proto/osmojs/ibc/core/client/v1/client.ts b/src/proto/osmojs/ibc/core/client/v1/client.ts index ee635b6..1d9f46c 100644 --- a/src/proto/osmojs/ibc/core/client/v1/client.ts +++ b/src/proto/osmojs/ibc/core/client/v1/client.ts @@ -2,7 +2,7 @@ /* eslint-disable @typescript-eslint/no-empty-interface */ /* eslint-disable @typescript-eslint/ban-ts-comment */ //@ts-nocheck -import { Any, AnyAmino, AnySDKType } from 'cosmjs-types/google/protobuf/any' +import { Any, AnyAmino, AnySDKType } from '../../../../../google/protobuf/any' import { Plan, PlanAmino, diff --git a/src/proto/osmojs/ibc/core/client/v1/tx.ts b/src/proto/osmojs/ibc/core/client/v1/tx.ts index 8f392d9..fd77fc7 100644 --- a/src/proto/osmojs/ibc/core/client/v1/tx.ts +++ b/src/proto/osmojs/ibc/core/client/v1/tx.ts @@ -2,7 +2,7 @@ /* eslint-disable @typescript-eslint/no-empty-interface */ /* eslint-disable @typescript-eslint/ban-ts-comment */ //@ts-nocheck -import { Any, AnyAmino, AnySDKType } from 'cosmjs-types/google/protobuf/any' +import { Any, AnyAmino, AnySDKType } from '../../../../../google/protobuf/any' import { Plan, PlanAmino, diff --git a/src/proto/osmojs/ibc/core/connection/v1/tx.ts b/src/proto/osmojs/ibc/core/connection/v1/tx.ts index 910727e..0e426d9 100644 --- a/src/proto/osmojs/ibc/core/connection/v1/tx.ts +++ b/src/proto/osmojs/ibc/core/connection/v1/tx.ts @@ -10,7 +10,7 @@ import { VersionAmino, VersionSDKType } from './connection' -import { Any, AnyAmino, AnySDKType } from 'cosmjs-types/google/protobuf/any' +import { Any, AnyAmino, AnySDKType } from '../../../../../google/protobuf/any' import { Height, HeightAmino, diff --git a/src/proto/osmojs/index.ts b/src/proto/osmojs/index.ts index fbc4c7e..3f45110 100644 --- a/src/proto/osmojs/index.ts +++ b/src/proto/osmojs/index.ts @@ -8,20 +8,8 @@ export * from './cosmos/bundle' export * from './cosmos/client' -export * from './capability/bundle' -export * from './ibc/bundle' export * from './ibc/client' export * from './cosmwasm/bundle' export * from './cosmwasm/client' -export * from './osmosis/bundle' export * from './osmosis/client' -export * from './amino/bundle' -export * from './cosmos_proto/bundle' -export * from './gogoproto/bundle' -export * from './tendermint/bundle' -export * from './google/bundle' -export * from '../varint' -export * from '../utf8' -export * from '../binary' -export * from './types' export * from './registry' diff --git a/src/proto/osmojs/osmosis/gamm/v1beta1/balancerPool.ts b/src/proto/osmojs/osmosis/gamm/v1beta1/balancerPool.ts index dbe5997..a44609f 100644 --- a/src/proto/osmojs/osmosis/gamm/v1beta1/balancerPool.ts +++ b/src/proto/osmojs/osmosis/gamm/v1beta1/balancerPool.ts @@ -2,12 +2,12 @@ /* eslint-disable @typescript-eslint/no-empty-interface */ /* eslint-disable @typescript-eslint/ban-ts-comment */ //@ts-nocheck -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' +import { Timestamp } from '../../../../google/protobuf/timestamp' import { Duration, DurationAmino, DurationSDKType -} from 'cosmjs-types/google/protobuf/duration' +} from '../../../../google/protobuf/duration' import { Coin, CoinAmino, CoinSDKType } from '../../../cosmos/base/v1beta1/coin' import { BinaryReader, BinaryWriter } from '../../../../binary' import { toTimestamp, fromTimestamp } from '../../../../helpers' diff --git a/src/proto/osmojs/osmosis/incentives/gauge.ts b/src/proto/osmojs/osmosis/incentives/gauge.ts index 880b755..123f5a1 100644 --- a/src/proto/osmojs/osmosis/incentives/gauge.ts +++ b/src/proto/osmojs/osmosis/incentives/gauge.ts @@ -8,12 +8,12 @@ import { QueryConditionSDKType } from '../lockup/lock' import { Coin, CoinAmino, CoinSDKType } from '../../cosmos/base/v1beta1/coin' -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' +import { Timestamp } from '../../../google/protobuf/timestamp' import { Duration, DurationAmino, DurationSDKType -} from 'cosmjs-types/google/protobuf/duration' +} from '../../../google/protobuf/duration' import { BinaryReader, BinaryWriter } from '../../../binary' import { toTimestamp, fromTimestamp } from '../../../helpers' import { GlobalDecoderRegistry } from '../../registry' diff --git a/src/proto/osmojs/osmosis/incentives/tx.ts b/src/proto/osmojs/osmosis/incentives/tx.ts index c160365..aafc2f1 100644 --- a/src/proto/osmojs/osmosis/incentives/tx.ts +++ b/src/proto/osmojs/osmosis/incentives/tx.ts @@ -8,7 +8,7 @@ import { QueryConditionSDKType } from '../lockup/lock' import { Coin, CoinAmino, CoinSDKType } from '../../cosmos/base/v1beta1/coin' -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' +import { Timestamp } from '../../../google/protobuf/timestamp' import { BinaryReader, BinaryWriter } from '../../../binary' import { toTimestamp, fromTimestamp } from '../../../helpers' import { GlobalDecoderRegistry } from '../../registry' diff --git a/src/proto/osmojs/osmosis/lockup/lock.ts b/src/proto/osmojs/osmosis/lockup/lock.ts index 9ae86d8..dd432da 100644 --- a/src/proto/osmojs/osmosis/lockup/lock.ts +++ b/src/proto/osmojs/osmosis/lockup/lock.ts @@ -6,8 +6,8 @@ import { Duration, DurationAmino, DurationSDKType -} from 'cosmjs-types/google/protobuf/duration' -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' +} from '../../../google/protobuf/duration' +import { Timestamp } from '../../../google/protobuf/timestamp' import { Coin, CoinAmino, CoinSDKType } from '../../cosmos/base/v1beta1/coin' import { BinaryReader, BinaryWriter } from '../../../binary' import { toTimestamp, fromTimestamp, isSet } from '../../../helpers' diff --git a/src/proto/osmojs/osmosis/lockup/tx.ts b/src/proto/osmojs/osmosis/lockup/tx.ts index da8c3e6..c993ed4 100644 --- a/src/proto/osmojs/osmosis/lockup/tx.ts +++ b/src/proto/osmojs/osmosis/lockup/tx.ts @@ -6,7 +6,7 @@ import { Duration, DurationAmino, DurationSDKType -} from 'cosmjs-types/google/protobuf/duration' +} from '../../../google/protobuf/duration' import { Coin, CoinAmino, CoinSDKType } from '../../cosmos/base/v1beta1/coin' import { PeriodLock, PeriodLockAmino, PeriodLockSDKType } from './lock' import { BinaryReader, BinaryWriter } from '../../../binary' diff --git a/src/proto/osmojs/osmosis/poolincentives/v1beta1/incentives.ts b/src/proto/osmojs/osmosis/poolincentives/v1beta1/incentives.ts index 394203b..7e128f8 100644 --- a/src/proto/osmojs/osmosis/poolincentives/v1beta1/incentives.ts +++ b/src/proto/osmojs/osmosis/poolincentives/v1beta1/incentives.ts @@ -6,7 +6,7 @@ import { Duration, DurationAmino, DurationSDKType -} from 'cosmjs-types/google/protobuf/duration' +} from '../../../../google/protobuf/duration' import { BinaryReader, BinaryWriter } from '../../../../binary' import { GlobalDecoderRegistry } from '../../../registry' export interface Params { diff --git a/src/proto/osmojs/osmosis/superfluid/tx.ts b/src/proto/osmojs/osmosis/superfluid/tx.ts index e8cd04f..8f62bdc 100644 --- a/src/proto/osmojs/osmosis/superfluid/tx.ts +++ b/src/proto/osmojs/osmosis/superfluid/tx.ts @@ -3,7 +3,7 @@ /* eslint-disable @typescript-eslint/ban-ts-comment */ //@ts-nocheck import { Coin, CoinAmino, CoinSDKType } from '../../cosmos/base/v1beta1/coin' -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' +import { Timestamp } from '../../../google/protobuf/timestamp' import { BinaryReader, BinaryWriter } from '../../../binary' import { GlobalDecoderRegistry } from '../../registry' import { Decimal } from '@cosmjs/math' diff --git a/src/proto/osmojs/registry.ts b/src/proto/osmojs/registry.ts index 1e0f730..8c740d4 100644 --- a/src/proto/osmojs/registry.ts +++ b/src/proto/osmojs/registry.ts @@ -7,7 +7,7 @@ */ import { BinaryReader } from '../binary' -import { Any, AnyAmino } from 'cosmjs-types/google/protobuf/any' +import { Any, AnyAmino } from '../google/protobuf/any' import { IProtoType, TelescopeGeneratedCodec } from './types' export class GlobalDecoderRegistry { diff --git a/src/proto/osmojs/tendermint/abci/types.ts b/src/proto/osmojs/tendermint/abci/types.ts index 078093a..ba29643 100644 --- a/src/proto/osmojs/tendermint/abci/types.ts +++ b/src/proto/osmojs/tendermint/abci/types.ts @@ -2,7 +2,7 @@ /* eslint-disable @typescript-eslint/no-empty-interface */ /* eslint-disable @typescript-eslint/ban-ts-comment */ //@ts-nocheck -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' +import { Timestamp } from '../../../google/protobuf/timestamp' import { ConsensusParams, ConsensusParamsAmino, diff --git a/src/proto/osmojs/tendermint/types/params.ts b/src/proto/osmojs/tendermint/types/params.ts index f2ae1d9..9ecda89 100644 --- a/src/proto/osmojs/tendermint/types/params.ts +++ b/src/proto/osmojs/tendermint/types/params.ts @@ -6,7 +6,7 @@ import { Duration, DurationAmino, DurationSDKType -} from 'cosmjs-types/google/protobuf/duration' +} from '../../../google/protobuf/duration' import { BinaryReader, BinaryWriter } from '../../../binary' import { GlobalDecoderRegistry } from '../../registry' /** diff --git a/src/proto/osmojs/tendermint/types/types.ts b/src/proto/osmojs/tendermint/types/types.ts index b120f2b..a3e282d 100644 --- a/src/proto/osmojs/tendermint/types/types.ts +++ b/src/proto/osmojs/tendermint/types/types.ts @@ -4,7 +4,7 @@ //@ts-nocheck import { Proof, ProofAmino, ProofSDKType } from '../crypto/proof' import { Consensus, ConsensusAmino, ConsensusSDKType } from '../version/types' -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' +import { Timestamp } from '../../../google/protobuf/timestamp' import { ValidatorSet, ValidatorSetAmino, diff --git a/src/proto/stridejs/codegen/stride/airdrop/tx.ts b/src/proto/stridejs/codegen/stride/airdrop/tx.ts index 4349b3b..14e4423 100644 --- a/src/proto/stridejs/codegen/stride/airdrop/tx.ts +++ b/src/proto/stridejs/codegen/stride/airdrop/tx.ts @@ -1,7 +1,7 @@ /* eslint-disable @typescript-eslint/ban-ts-comment */ /* eslint-disable @typescript-eslint/no-empty-interface */ //@ts-nocheck -import { Timestamp } from 'cosmjs-types/google/protobuf/timestamp' +import { Timestamp } from '../../../../google/protobuf/timestamp' import { BinaryReader, BinaryWriter } from '../../../../binary' import { toTimestamp, fromTimestamp } from '../../../../helpers' import { Decimal } from '@cosmjs/math' diff --git a/src/proto/stridejs/codegen/types.ts b/src/proto/stridejs/codegen/types.ts index ac0eab8..abefb4f 100644 --- a/src/proto/stridejs/codegen/types.ts +++ b/src/proto/stridejs/codegen/types.ts @@ -5,7 +5,7 @@ */ import { IBinaryReader, IBinaryWriter } from '../../binary' -import { Any } from 'cosmjs-types/google/protobuf/any' +import { Any } from '../../google/protobuf/any' import { OfflineSigner } from '@cosmjs/proto-signing' import { HttpEndpoint } from '@cosmjs/tendermint-rpc' diff --git a/src/proto/varint.ts b/src/proto/stridejs/codegen/varint.ts similarity index 100% rename from src/proto/varint.ts rename to src/proto/stridejs/codegen/varint.ts diff --git a/yarn.lock b/yarn.lock index 9e8b4e9..ca4c848 100644 --- a/yarn.lock +++ b/yarn.lock @@ -7,6 +7,14 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== +"@confio/ics23@^0.6.8": + version "0.6.8" + resolved "https://registry.yarnpkg.com/@confio/ics23/-/ics23-0.6.8.tgz#2a6b4f1f2b7b20a35d9a0745bb5a446e72930b3d" + integrity sha512-wB6uo+3A50m0sW/EWcU64xpV/8wShZ6bMTa7pF8eYsTrSkQA7oLUIJcs/wb8g4y2Oyq701BaGiO6n/ak5WXO1w== + dependencies: + "@noble/hashes" "^1.0.0" + protobufjs "^6.8.8" + "@cosmjs/amino@^0.31.3": version "0.31.3" resolved "https://registry.npmjs.org/@cosmjs/amino/-/amino-0.31.3.tgz#0f4aa6bd68331c71bd51b187fa64f00eb075db0a" @@ -17,6 +25,16 @@ "@cosmjs/math" "^0.31.3" "@cosmjs/utils" "^0.31.3" +"@cosmjs/amino@^0.32.4": + version "0.32.4" + resolved "https://registry.yarnpkg.com/@cosmjs/amino/-/amino-0.32.4.tgz#3908946c0394e6d431694c8992c5147079a1c860" + integrity sha512-zKYOt6hPy8obIFtLie/xtygCkH9ZROiQ12UHfKsOkWaZfPQUvVbtgmu6R4Kn1tFLI/SRkw7eqhaogmW/3NYu/Q== + dependencies: + "@cosmjs/crypto" "^0.32.4" + "@cosmjs/encoding" "^0.32.4" + "@cosmjs/math" "^0.32.4" + "@cosmjs/utils" "^0.32.4" + "@cosmjs/crypto@^0.31.3": version "0.31.3" resolved "https://registry.npmjs.org/@cosmjs/crypto/-/crypto-0.31.3.tgz#c752cb6d682fdc735dcb45a2519f89c56ba16c26" @@ -30,6 +48,19 @@ elliptic "^6.5.4" libsodium-wrappers-sumo "^0.7.11" +"@cosmjs/crypto@^0.32.4": + version "0.32.4" + resolved "https://registry.yarnpkg.com/@cosmjs/crypto/-/crypto-0.32.4.tgz#5d29633b661eaf092ddb3e7ea6299cfd6f4507a2" + integrity sha512-zicjGU051LF1V9v7bp8p7ovq+VyC91xlaHdsFOTo2oVry3KQikp8L/81RkXmUIT8FxMwdx1T7DmFwVQikcSDIw== + dependencies: + "@cosmjs/encoding" "^0.32.4" + "@cosmjs/math" "^0.32.4" + "@cosmjs/utils" "^0.32.4" + "@noble/hashes" "^1" + bn.js "^5.2.0" + elliptic "^6.5.4" + libsodium-wrappers-sumo "^0.7.11" + "@cosmjs/encoding@^0.31.3": version "0.31.3" resolved "https://registry.npmjs.org/@cosmjs/encoding/-/encoding-0.31.3.tgz#2519d9c9ae48368424971f253775c4580b54c5aa" @@ -39,6 +70,23 @@ bech32 "^1.1.4" readonly-date "^1.0.0" +"@cosmjs/encoding@^0.32.4": + version "0.32.4" + resolved "https://registry.yarnpkg.com/@cosmjs/encoding/-/encoding-0.32.4.tgz#646e0e809f7f4f1414d8fa991fb0ffe6c633aede" + integrity sha512-tjvaEy6ZGxJchiizzTn7HVRiyTg1i4CObRRaTRPknm5EalE13SV+TCHq38gIDfyUeden4fCuaBVEdBR5+ti7Hw== + dependencies: + base64-js "^1.3.0" + bech32 "^1.1.4" + readonly-date "^1.0.0" + +"@cosmjs/json-rpc@^0.32.4": + version "0.32.4" + resolved "https://registry.yarnpkg.com/@cosmjs/json-rpc/-/json-rpc-0.32.4.tgz#be91eb89ea78bd5dc02d0a9fa184dd6790790f0b" + integrity sha512-/jt4mBl7nYzfJ2J/VJ+r19c92mUKF0Lt0JxM3MXEJl7wlwW5haHAWtzRujHkyYMXOwIR+gBqT2S0vntXVBRyhQ== + dependencies: + "@cosmjs/stream" "^0.32.4" + xstream "^11.14.0" + "@cosmjs/math@^0.31.3": version "0.31.3" resolved "https://registry.npmjs.org/@cosmjs/math/-/math-0.31.3.tgz#767f7263d12ba1b9ed2f01f68d857597839fd957" @@ -46,6 +94,13 @@ dependencies: bn.js "^5.2.0" +"@cosmjs/math@^0.32.4": + version "0.32.4" + resolved "https://registry.yarnpkg.com/@cosmjs/math/-/math-0.32.4.tgz#87ac9eadc06696e30a30bdb562a495974bfd0a1a" + integrity sha512-++dqq2TJkoB8zsPVYCvrt88oJWsy1vMOuSOKcdlnXuOA/ASheTJuYy4+oZlTQ3Fr8eALDLGGPhJI02W2HyAQaw== + dependencies: + bn.js "^5.2.0" + "@cosmjs/proto-signing@0.31.3": version "0.31.3" resolved "https://registry.npmjs.org/@cosmjs/proto-signing/-/proto-signing-0.31.3.tgz#20440b7b96fb2cd924256a10e656fd8d4481cdcd" @@ -59,11 +114,77 @@ cosmjs-types "^0.8.0" long "^4.0.0" +"@cosmjs/proto-signing@^0.32.4": + version "0.32.4" + resolved "https://registry.yarnpkg.com/@cosmjs/proto-signing/-/proto-signing-0.32.4.tgz#5a06e087c6d677439c8c9b25b5223d5e72c4cd93" + integrity sha512-QdyQDbezvdRI4xxSlyM1rSVBO2st5sqtbEIl3IX03uJ7YiZIQHyv6vaHVf1V4mapusCqguiHJzm4N4gsFdLBbQ== + dependencies: + "@cosmjs/amino" "^0.32.4" + "@cosmjs/crypto" "^0.32.4" + "@cosmjs/encoding" "^0.32.4" + "@cosmjs/math" "^0.32.4" + "@cosmjs/utils" "^0.32.4" + cosmjs-types "^0.9.0" + +"@cosmjs/socket@^0.32.4": + version "0.32.4" + resolved "https://registry.yarnpkg.com/@cosmjs/socket/-/socket-0.32.4.tgz#86ab6adf3a442314774c0810b7a7cfcddf4f2082" + integrity sha512-davcyYziBhkzfXQTu1l5NrpDYv0K9GekZCC9apBRvL1dvMc9F/ygM7iemHjUA+z8tJkxKxrt/YPjJ6XNHzLrkw== + dependencies: + "@cosmjs/stream" "^0.32.4" + isomorphic-ws "^4.0.1" + ws "^7" + xstream "^11.14.0" + +"@cosmjs/stargate@0.32.4": + version "0.32.4" + resolved "https://registry.yarnpkg.com/@cosmjs/stargate/-/stargate-0.32.4.tgz#bd0e4d3bf613b629addbf5f875d3d3b50f640af1" + integrity sha512-usj08LxBSsPRq9sbpCeVdyLx2guEcOHfJS9mHGCLCXpdAPEIEQEtWLDpEUc0LEhWOx6+k/ChXTc5NpFkdrtGUQ== + dependencies: + "@confio/ics23" "^0.6.8" + "@cosmjs/amino" "^0.32.4" + "@cosmjs/encoding" "^0.32.4" + "@cosmjs/math" "^0.32.4" + "@cosmjs/proto-signing" "^0.32.4" + "@cosmjs/stream" "^0.32.4" + "@cosmjs/tendermint-rpc" "^0.32.4" + "@cosmjs/utils" "^0.32.4" + cosmjs-types "^0.9.0" + xstream "^11.14.0" + +"@cosmjs/stream@^0.32.4": + version "0.32.4" + resolved "https://registry.yarnpkg.com/@cosmjs/stream/-/stream-0.32.4.tgz#83e1f2285807467c56d9ea0e1113f79d9fa63802" + integrity sha512-Gih++NYHEiP+oyD4jNEUxU9antoC0pFSg+33Hpp0JlHwH0wXhtD3OOKnzSfDB7OIoEbrzLJUpEjOgpCp5Z+W3A== + dependencies: + xstream "^11.14.0" + +"@cosmjs/tendermint-rpc@0.32.4", "@cosmjs/tendermint-rpc@^0.32.4": + version "0.32.4" + resolved "https://registry.yarnpkg.com/@cosmjs/tendermint-rpc/-/tendermint-rpc-0.32.4.tgz#b36f9ec657498e42c97e21bb7368798ef6279752" + integrity sha512-MWvUUno+4bCb/LmlMIErLypXxy7ckUuzEmpufYYYd9wgbdCXaTaO08SZzyFM5PI8UJ/0S2AmUrgWhldlbxO8mw== + dependencies: + "@cosmjs/crypto" "^0.32.4" + "@cosmjs/encoding" "^0.32.4" + "@cosmjs/json-rpc" "^0.32.4" + "@cosmjs/math" "^0.32.4" + "@cosmjs/socket" "^0.32.4" + "@cosmjs/stream" "^0.32.4" + "@cosmjs/utils" "^0.32.4" + axios "^1.6.0" + readonly-date "^1.0.0" + xstream "^11.14.0" + "@cosmjs/utils@^0.31.3": version "0.31.3" resolved "https://registry.npmjs.org/@cosmjs/utils/-/utils-0.31.3.tgz#f97bbfda35ad69e80cd5c7fe0a270cbda16db1ed" integrity sha512-VBhAgzrrYdIe0O5IbKRqwszbQa7ZyQLx9nEQuHQ3HUplQW7P44COG/ye2n6AzCudtqxmwdX7nyX8ta1J07GoqA== +"@cosmjs/utils@^0.32.4": + version "0.32.4" + resolved "https://registry.yarnpkg.com/@cosmjs/utils/-/utils-0.32.4.tgz#a9a717c9fd7b1984d9cefdd0ef6c6f254060c671" + integrity sha512-D1Yc+Zy8oL/hkUkFUL/bwxvuDBzRGpc4cF7/SkdhxX4iHpSLgdOuTt1mhCh9+kl6NQREy9t7SYZ6xeW5gFe60w== + "@esbuild/android-arm64@0.16.17": version "0.16.17" resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.16.17.tgz#cf91e86df127aa3d141744edafcba0abdc577d23" @@ -253,6 +374,11 @@ resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.3.0.tgz#085fd70f6d7d9d109671090ccae1d3bec62554a1" integrity sha512-ilHEACi9DwqJB0pw7kv+Apvh50jiiSyR/cQ3y4W7lOR5mhvn/50FLUfsnfJz0BDZtl/RR16kXvptiv6q1msYZg== +"@noble/hashes@^1.0.0": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.5.0.tgz#abadc5ca20332db2b1b2aa3e496e9af1213570b0" + integrity sha512-1j6kQFb7QRru7eKN3ZDvRcP13rugwdxZqCjbiAVZfIJwgj2A65UmT4TgARXGlXgnRkORLTDTrO19ZErt7+QXgA== + "@nodelib/fs.scandir@2.1.5": version "2.1.5" resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" @@ -572,11 +698,25 @@ assertion-error@^1.1.0: resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b" integrity sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw== +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + available-typed-arrays@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== +axios@^1.6.0: + version "1.7.7" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.7.tgz#2f554296f9892a72ac8d8e4c5b79c14a91d0a47f" + integrity sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q== + dependencies: + follow-redirects "^1.15.6" + form-data "^4.0.0" + proxy-from-env "^1.1.0" + balanced-match@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" @@ -739,6 +879,13 @@ color-name@~1.1.4: resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" @@ -749,7 +896,7 @@ convert-source-map@^1.6.0: resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== -cosmjs-types@0.9.0: +cosmjs-types@0.9.0, cosmjs-types@^0.9.0: version "0.9.0" resolved "https://registry.npmjs.org/cosmjs-types/-/cosmjs-types-0.9.0.tgz#c3bc482d28c7dfa25d1445093fdb2d9da1f6cfcc" integrity sha512-MN/yUe6mkJwHnCFfsNPeCfXVhyxHYW6c/xDUzrSbBycYzw++XvWDMJArXp2pLdgD6FQ8DW79vkPjeNKVrXaHeQ== @@ -801,6 +948,15 @@ deep-is@^0.1.3: resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== +define-data-property@^1.0.1: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e" + integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + gopd "^1.0.1" + define-properties@^1.1.3, define-properties@^1.1.4: version "1.2.0" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.0.tgz#52988570670c9eacedd8064f4a990f2405849bd5" @@ -809,6 +965,20 @@ define-properties@^1.1.3, define-properties@^1.1.4: has-property-descriptors "^1.0.0" object-keys "^1.1.1" +define-properties@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.1.tgz#10781cc616eb951a80a034bafcaa7377f6af2b6c" + integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg== + dependencies: + define-data-property "^1.0.1" + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + diff@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/diff/-/diff-5.1.0.tgz#bc52d298c5ea8df9194800224445ed43ffc87e40" @@ -902,6 +1072,18 @@ es-abstract@^1.19.0, es-abstract@^1.20.4: unbox-primitive "^1.0.2" which-typed-array "^1.1.9" +es-define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845" + integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ== + dependencies: + get-intrinsic "^1.2.4" + +es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + es-set-tostringtag@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz#338d502f6f674301d710b80c8592de8a15f09cd8" @@ -1148,6 +1330,11 @@ flatted@^3.1.0: resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== +follow-redirects@^1.15.6: + version "1.15.9" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1" + integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ== + for-each@^0.3.3: version "0.3.3" resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" @@ -1163,6 +1350,15 @@ foreground-child@^2.0.0: cross-spawn "^7.0.0" signal-exit "^3.0.2" +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" @@ -1178,6 +1374,11 @@ function-bind@^1.1.1: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + function.prototype.name@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" @@ -1212,6 +1413,17 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@ has "^1.0.3" has-symbols "^1.0.3" +get-intrinsic@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" + integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== + dependencies: + es-errors "^1.3.0" + function-bind "^1.1.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.0" + get-symbol-description@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" @@ -1253,6 +1465,14 @@ globals@^13.19.0: dependencies: type-fest "^0.20.2" +globalthis@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.4.tgz#7430ed3a975d97bfb59bcce41f5cabbafa651236" + integrity sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ== + dependencies: + define-properties "^1.2.1" + gopd "^1.0.1" + globalthis@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf" @@ -1343,6 +1563,13 @@ hash.js@^1.0.0, hash.js@^1.0.3: inherits "^2.0.3" minimalistic-assert "^1.0.1" +hasown@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + hmac-drbg@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" @@ -1546,6 +1773,11 @@ isexe@^2.0.0: resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== +isomorphic-ws@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz#55fd4cd6c5e6491e76dc125938dd863f5cd4f2dc" + integrity sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w== + istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" @@ -1691,6 +1923,18 @@ micromatch@^4.0.4: braces "^3.0.2" picomatch "^2.3.1" +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" @@ -1960,6 +2204,25 @@ protobufjs@7.2.5: "@types/node" ">=13.7.0" long "^5.0.0" +protobufjs@^6.8.8: + version "6.11.4" + resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.4.tgz#29a412c38bf70d89e537b6d02d904a6f448173aa" + integrity sha512-5kQWPaJHi1WoCpjTGszzQ32PG2F4+wRY6BmAT4Vfw56Q2FZ4YZzK20xUYQH4YkfehY1e6QSICrJquM6xXZNcrw== + dependencies: + "@protobufjs/aspromise" "^1.1.2" + "@protobufjs/base64" "^1.1.2" + "@protobufjs/codegen" "^2.0.4" + "@protobufjs/eventemitter" "^1.1.0" + "@protobufjs/fetch" "^1.1.0" + "@protobufjs/float" "^1.0.2" + "@protobufjs/inquire" "^1.1.0" + "@protobufjs/path" "^1.1.2" + "@protobufjs/pool" "^1.1.0" + "@protobufjs/utf8" "^1.1.0" + "@types/long" "^4.0.1" + "@types/node" ">=13.7.0" + long "^4.0.0" + protobufjs@~6.11.2: version "6.11.3" resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.3.tgz#637a527205a35caa4f3e2a9a4a13ddffe0e7af74" @@ -1979,6 +2242,11 @@ protobufjs@~6.11.2: "@types/node" ">=13.7.0" long "^4.0.0" +proxy-from-env@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" + integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== + punycode@^2.1.0: version "2.3.0" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" @@ -2295,6 +2563,11 @@ supports-preserve-symlinks-flag@^1.0.0: resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== +symbol-observable@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-2.0.3.tgz#5b521d3d07a43c351055fa43b8355b62d33fd16a" + integrity sha512-sQV7phh2WCYAn81oAkakC5qjq2Ml0g8ozqz03wOGnx9dDlG1de6yrF+0RAzSJD8fPUow3PTSMf2SAbOGxb93BA== + test-exclude@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" @@ -2531,6 +2804,19 @@ wrappy@1: resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== +ws@^7: + version "7.5.10" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9" + integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ== + +xstream@^11.14.0: + version "11.14.0" + resolved "https://registry.yarnpkg.com/xstream/-/xstream-11.14.0.tgz#2c071d26b18310523b6877e86b4e54df068a9ae5" + integrity sha512-1bLb+kKKtKPbgTK6i/BaoAn03g47PpFstlbe1BA+y3pNS/LfvcaghS5BFf9+EE1J+KwSQsEpfJvFN5GqFtiNmw== + dependencies: + globalthis "^1.0.1" + symbol-observable "^2.0.3" + y18n@^5.0.5: version "5.0.8" resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55"