This commit is contained in:
CHEVALLIER Abel
2025-11-13 16:23:22 +01:00
parent de9c515a47
commit cb235644dc
34924 changed files with 3811102 additions and 0 deletions

View File

@@ -0,0 +1,44 @@
/** An authenticated message of arbitrary type. */
export interface Envelope {
/**
* Message to be signed. (In JSON, this is encoded as base64.)
* REQUIRED.
*/
payload: Buffer;
/**
* String unambiguously identifying how to interpret payload.
* REQUIRED.
*/
payloadType: string;
/**
* Signature over:
* PAE(type, payload)
* Where PAE is defined as:
* PAE(type, payload) = "DSSEv1" + SP + LEN(type) + SP + type + SP + LEN(payload) + SP + payload
* + = concatenation
* SP = ASCII space [0x20]
* "DSSEv1" = ASCII [0x44, 0x53, 0x53, 0x45, 0x76, 0x31]
* LEN(s) = ASCII decimal encoding of the byte length of s, with no leading zeros
* REQUIRED (length >= 1).
*/
signatures: Signature[];
}
export interface Signature {
/**
* Signature itself. (In JSON, this is encoded as base64.)
* REQUIRED.
*/
sig: Buffer;
/**
* Unauthenticated* hint identifying which public key was used.
* OPTIONAL.
*/
keyid: string;
}
export declare const Envelope: MessageFns<Envelope>;
export declare const Signature: MessageFns<Signature>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,59 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: envelope.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.Signature = exports.Envelope = void 0;
exports.Envelope = {
fromJSON(object) {
return {
payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
payloadType: isSet(object.payloadType) ? globalThis.String(object.payloadType) : "",
signatures: globalThis.Array.isArray(object?.signatures)
? object.signatures.map((e) => exports.Signature.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
if (message.payload.length !== 0) {
obj.payload = base64FromBytes(message.payload);
}
if (message.payloadType !== "") {
obj.payloadType = message.payloadType;
}
if (message.signatures?.length) {
obj.signatures = message.signatures.map((e) => exports.Signature.toJSON(e));
}
return obj;
},
};
exports.Signature = {
fromJSON(object) {
return {
sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
keyid: isSet(object.keyid) ? globalThis.String(object.keyid) : "",
};
},
toJSON(message) {
const obj = {};
if (message.sig.length !== 0) {
obj.sig = base64FromBytes(message.sig);
}
if (message.keyid !== "") {
obj.keyid = message.keyid;
}
return obj;
},
};
function bytesFromBase64(b64) {
return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
return globalThis.Buffer.from(arr).toString("base64");
}
function isSet(value) {
return value !== null && value !== undefined;
}

View File

@@ -0,0 +1,64 @@
import { Any } from "./google/protobuf/any";
export interface CloudEvent {
/** Required Attributes */
id: string;
/** URI-reference */
source: string;
specVersion: string;
type: string;
/** Optional & Extension Attributes */
attributes: {
[key: string]: CloudEvent_CloudEventAttributeValue;
};
/** -- CloudEvent Data (Bytes, Text, or Proto) */
data?: {
$case: "binaryData";
binaryData: Buffer;
} | {
$case: "textData";
textData: string;
} | {
$case: "protoData";
protoData: Any;
} | undefined;
}
export interface CloudEvent_AttributesEntry {
key: string;
value: CloudEvent_CloudEventAttributeValue | undefined;
}
export interface CloudEvent_CloudEventAttributeValue {
attr?: {
$case: "ceBoolean";
ceBoolean: boolean;
} | {
$case: "ceInteger";
ceInteger: number;
} | {
$case: "ceString";
ceString: string;
} | {
$case: "ceBytes";
ceBytes: Buffer;
} | {
$case: "ceUri";
ceUri: string;
} | {
$case: "ceUriRef";
ceUriRef: string;
} | {
$case: "ceTimestamp";
ceTimestamp: Date;
} | undefined;
}
export interface CloudEventBatch {
events: CloudEvent[];
}
export declare const CloudEvent: MessageFns<CloudEvent>;
export declare const CloudEvent_AttributesEntry: MessageFns<CloudEvent_AttributesEntry>;
export declare const CloudEvent_CloudEventAttributeValue: MessageFns<CloudEvent_CloudEventAttributeValue>;
export declare const CloudEventBatch: MessageFns<CloudEventBatch>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,174 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: events.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
/* eslint-disable */
const any_1 = require("./google/protobuf/any");
const timestamp_1 = require("./google/protobuf/timestamp");
exports.CloudEvent = {
fromJSON(object) {
return {
id: isSet(object.id) ? globalThis.String(object.id) : "",
source: isSet(object.source) ? globalThis.String(object.source) : "",
specVersion: isSet(object.specVersion) ? globalThis.String(object.specVersion) : "",
type: isSet(object.type) ? globalThis.String(object.type) : "",
attributes: isObject(object.attributes)
? Object.entries(object.attributes).reduce((acc, [key, value]) => {
acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
return acc;
}, {})
: {},
data: isSet(object.binaryData)
? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
: isSet(object.textData)
? { $case: "textData", textData: globalThis.String(object.textData) }
: isSet(object.protoData)
? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
: undefined,
};
},
toJSON(message) {
const obj = {};
if (message.id !== "") {
obj.id = message.id;
}
if (message.source !== "") {
obj.source = message.source;
}
if (message.specVersion !== "") {
obj.specVersion = message.specVersion;
}
if (message.type !== "") {
obj.type = message.type;
}
if (message.attributes) {
const entries = Object.entries(message.attributes);
if (entries.length > 0) {
obj.attributes = {};
entries.forEach(([k, v]) => {
obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
});
}
}
if (message.data?.$case === "binaryData") {
obj.binaryData = base64FromBytes(message.data.binaryData);
}
else if (message.data?.$case === "textData") {
obj.textData = message.data.textData;
}
else if (message.data?.$case === "protoData") {
obj.protoData = any_1.Any.toJSON(message.data.protoData);
}
return obj;
},
};
exports.CloudEvent_AttributesEntry = {
fromJSON(object) {
return {
key: isSet(object.key) ? globalThis.String(object.key) : "",
value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
};
},
toJSON(message) {
const obj = {};
if (message.key !== "") {
obj.key = message.key;
}
if (message.value !== undefined) {
obj.value = exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value);
}
return obj;
},
};
exports.CloudEvent_CloudEventAttributeValue = {
fromJSON(object) {
return {
attr: isSet(object.ceBoolean)
? { $case: "ceBoolean", ceBoolean: globalThis.Boolean(object.ceBoolean) }
: isSet(object.ceInteger)
? { $case: "ceInteger", ceInteger: globalThis.Number(object.ceInteger) }
: isSet(object.ceString)
? { $case: "ceString", ceString: globalThis.String(object.ceString) }
: isSet(object.ceBytes)
? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
: isSet(object.ceUri)
? { $case: "ceUri", ceUri: globalThis.String(object.ceUri) }
: isSet(object.ceUriRef)
? { $case: "ceUriRef", ceUriRef: globalThis.String(object.ceUriRef) }
: isSet(object.ceTimestamp)
? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
: undefined,
};
},
toJSON(message) {
const obj = {};
if (message.attr?.$case === "ceBoolean") {
obj.ceBoolean = message.attr.ceBoolean;
}
else if (message.attr?.$case === "ceInteger") {
obj.ceInteger = Math.round(message.attr.ceInteger);
}
else if (message.attr?.$case === "ceString") {
obj.ceString = message.attr.ceString;
}
else if (message.attr?.$case === "ceBytes") {
obj.ceBytes = base64FromBytes(message.attr.ceBytes);
}
else if (message.attr?.$case === "ceUri") {
obj.ceUri = message.attr.ceUri;
}
else if (message.attr?.$case === "ceUriRef") {
obj.ceUriRef = message.attr.ceUriRef;
}
else if (message.attr?.$case === "ceTimestamp") {
obj.ceTimestamp = message.attr.ceTimestamp.toISOString();
}
return obj;
},
};
exports.CloudEventBatch = {
fromJSON(object) {
return {
events: globalThis.Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [],
};
},
toJSON(message) {
const obj = {};
if (message.events?.length) {
obj.events = message.events.map((e) => exports.CloudEvent.toJSON(e));
}
return obj;
},
};
function bytesFromBase64(b64) {
return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
return globalThis.Buffer.from(arr).toString("base64");
}
function fromTimestamp(t) {
let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
millis += (t.nanos || 0) / 1_000_000;
return new globalThis.Date(millis);
}
function fromJsonTimestamp(o) {
if (o instanceof globalThis.Date) {
return o;
}
else if (typeof o === "string") {
return new globalThis.Date(o);
}
else {
return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
}
}
function isObject(value) {
return typeof value === "object" && value !== null;
}
function isSet(value) {
return value !== null && value !== undefined;
}

View File

@@ -0,0 +1,74 @@
/**
* An indicator of the behavior of a given field (for example, that a field
* is required in requests, or given as output but ignored as input).
* This **does not** change the behavior in protocol buffers itself; it only
* denotes the behavior and may affect how API tooling handles the field.
*
* Note: This enum **may** receive new values in the future.
*/
export declare enum FieldBehavior {
/** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
FIELD_BEHAVIOR_UNSPECIFIED = 0,
/**
* OPTIONAL - Specifically denotes a field as optional.
* While all fields in protocol buffers are optional, this may be specified
* for emphasis if appropriate.
*/
OPTIONAL = 1,
/**
* REQUIRED - Denotes a field as required.
* This indicates that the field **must** be provided as part of the request,
* and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
*/
REQUIRED = 2,
/**
* OUTPUT_ONLY - Denotes a field as output only.
* This indicates that the field is provided in responses, but including the
* field in a request does nothing (the server *must* ignore it and
* *must not* throw an error as a result of the field's presence).
*/
OUTPUT_ONLY = 3,
/**
* INPUT_ONLY - Denotes a field as input only.
* This indicates that the field is provided in requests, and the
* corresponding field is not included in output.
*/
INPUT_ONLY = 4,
/**
* IMMUTABLE - Denotes a field as immutable.
* This indicates that the field may be set once in a request to create a
* resource, but may not be changed thereafter.
*/
IMMUTABLE = 5,
/**
* UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
* This indicates that the service may provide the elements of the list
* in any arbitrary order, rather than the order the user originally
* provided. Additionally, the list's order may or may not be stable.
*/
UNORDERED_LIST = 6,
/**
* NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set.
* This indicates that if the user provides the empty value in a request,
* a non-empty value will be returned. The user will not be aware of what
* non-empty value to expect.
*/
NON_EMPTY_DEFAULT = 7,
/**
* IDENTIFIER - Denotes that the field in a resource (a message annotated with
* google.api.resource) is used in the resource name to uniquely identify the
* resource. For AIP-compliant APIs, this should only be applied to the
* `name` field on the resource.
*
* This behavior should not be applied to references to other resources within
* the message.
*
* The identifier field of resources often have different field behavior
* depending on the request it is embedded in (e.g. for Create methods name
* is optional and unused, while for Update methods it is required). Instead
* of method-specific annotations, only `IDENTIFIER` is required.
*/
IDENTIFIER = 8
}
export declare function fieldBehaviorFromJSON(object: any): FieldBehavior;
export declare function fieldBehaviorToJSON(object: FieldBehavior): string;

View File

@@ -0,0 +1,141 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: google/api/field_behavior.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.FieldBehavior = void 0;
exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
/* eslint-disable */
/**
* An indicator of the behavior of a given field (for example, that a field
* is required in requests, or given as output but ignored as input).
* This **does not** change the behavior in protocol buffers itself; it only
* denotes the behavior and may affect how API tooling handles the field.
*
* Note: This enum **may** receive new values in the future.
*/
var FieldBehavior;
(function (FieldBehavior) {
/** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
/**
* OPTIONAL - Specifically denotes a field as optional.
* While all fields in protocol buffers are optional, this may be specified
* for emphasis if appropriate.
*/
FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
/**
* REQUIRED - Denotes a field as required.
* This indicates that the field **must** be provided as part of the request,
* and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
*/
FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
/**
* OUTPUT_ONLY - Denotes a field as output only.
* This indicates that the field is provided in responses, but including the
* field in a request does nothing (the server *must* ignore it and
* *must not* throw an error as a result of the field's presence).
*/
FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
/**
* INPUT_ONLY - Denotes a field as input only.
* This indicates that the field is provided in requests, and the
* corresponding field is not included in output.
*/
FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
/**
* IMMUTABLE - Denotes a field as immutable.
* This indicates that the field may be set once in a request to create a
* resource, but may not be changed thereafter.
*/
FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
/**
* UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
* This indicates that the service may provide the elements of the list
* in any arbitrary order, rather than the order the user originally
* provided. Additionally, the list's order may or may not be stable.
*/
FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
/**
* NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set.
* This indicates that if the user provides the empty value in a request,
* a non-empty value will be returned. The user will not be aware of what
* non-empty value to expect.
*/
FieldBehavior[FieldBehavior["NON_EMPTY_DEFAULT"] = 7] = "NON_EMPTY_DEFAULT";
/**
* IDENTIFIER - Denotes that the field in a resource (a message annotated with
* google.api.resource) is used in the resource name to uniquely identify the
* resource. For AIP-compliant APIs, this should only be applied to the
* `name` field on the resource.
*
* This behavior should not be applied to references to other resources within
* the message.
*
* The identifier field of resources often have different field behavior
* depending on the request it is embedded in (e.g. for Create methods name
* is optional and unused, while for Update methods it is required). Instead
* of method-specific annotations, only `IDENTIFIER` is required.
*/
FieldBehavior[FieldBehavior["IDENTIFIER"] = 8] = "IDENTIFIER";
})(FieldBehavior || (exports.FieldBehavior = FieldBehavior = {}));
function fieldBehaviorFromJSON(object) {
switch (object) {
case 0:
case "FIELD_BEHAVIOR_UNSPECIFIED":
return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
case 1:
case "OPTIONAL":
return FieldBehavior.OPTIONAL;
case 2:
case "REQUIRED":
return FieldBehavior.REQUIRED;
case 3:
case "OUTPUT_ONLY":
return FieldBehavior.OUTPUT_ONLY;
case 4:
case "INPUT_ONLY":
return FieldBehavior.INPUT_ONLY;
case 5:
case "IMMUTABLE":
return FieldBehavior.IMMUTABLE;
case 6:
case "UNORDERED_LIST":
return FieldBehavior.UNORDERED_LIST;
case 7:
case "NON_EMPTY_DEFAULT":
return FieldBehavior.NON_EMPTY_DEFAULT;
case 8:
case "IDENTIFIER":
return FieldBehavior.IDENTIFIER;
default:
throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
}
}
function fieldBehaviorToJSON(object) {
switch (object) {
case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
return "FIELD_BEHAVIOR_UNSPECIFIED";
case FieldBehavior.OPTIONAL:
return "OPTIONAL";
case FieldBehavior.REQUIRED:
return "REQUIRED";
case FieldBehavior.OUTPUT_ONLY:
return "OUTPUT_ONLY";
case FieldBehavior.INPUT_ONLY:
return "INPUT_ONLY";
case FieldBehavior.IMMUTABLE:
return "IMMUTABLE";
case FieldBehavior.UNORDERED_LIST:
return "UNORDERED_LIST";
case FieldBehavior.NON_EMPTY_DEFAULT:
return "NON_EMPTY_DEFAULT";
case FieldBehavior.IDENTIFIER:
return "IDENTIFIER";
default:
throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
}
}

View File

@@ -0,0 +1,128 @@
/**
* `Any` contains an arbitrary serialized protocol buffer message along with a
* URL that describes the type of the serialized message.
*
* Protobuf library provides support to pack/unpack Any values in the form
* of utility functions or additional generated methods of the Any type.
*
* Example 1: Pack and unpack a message in C++.
*
* Foo foo = ...;
* Any any;
* any.PackFrom(foo);
* ...
* if (any.UnpackTo(&foo)) {
* ...
* }
*
* Example 2: Pack and unpack a message in Java.
*
* Foo foo = ...;
* Any any = Any.pack(foo);
* ...
* if (any.is(Foo.class)) {
* foo = any.unpack(Foo.class);
* }
* // or ...
* if (any.isSameTypeAs(Foo.getDefaultInstance())) {
* foo = any.unpack(Foo.getDefaultInstance());
* }
*
* Example 3: Pack and unpack a message in Python.
*
* foo = Foo(...)
* any = Any()
* any.Pack(foo)
* ...
* if any.Is(Foo.DESCRIPTOR):
* any.Unpack(foo)
* ...
*
* Example 4: Pack and unpack a message in Go
*
* foo := &pb.Foo{...}
* any, err := anypb.New(foo)
* if err != nil {
* ...
* }
* ...
* foo := &pb.Foo{}
* if err := any.UnmarshalTo(foo); err != nil {
* ...
* }
*
* The pack methods provided by protobuf library will by default use
* 'type.googleapis.com/full.type.name' as the type URL and the unpack
* methods only use the fully qualified type name after the last '/'
* in the type URL, for example "foo.bar.com/x/y.z" will yield type
* name "y.z".
*
* JSON
* ====
* The JSON representation of an `Any` value uses the regular
* representation of the deserialized, embedded message, with an
* additional field `@type` which contains the type URL. Example:
*
* package google.profile;
* message Person {
* string first_name = 1;
* string last_name = 2;
* }
*
* {
* "@type": "type.googleapis.com/google.profile.Person",
* "firstName": <string>,
* "lastName": <string>
* }
*
* If the embedded message type is well-known and has a custom JSON
* representation, that representation will be embedded adding a field
* `value` which holds the custom JSON in addition to the `@type`
* field. Example (for message [google.protobuf.Duration][]):
*
* {
* "@type": "type.googleapis.com/google.protobuf.Duration",
* "value": "1.212s"
* }
*/
export interface Any {
/**
* A URL/resource name that uniquely identifies the type of the serialized
* protocol buffer message. This string must contain at least
* one "/" character. The last segment of the URL's path must represent
* the fully qualified name of the type (as in
* `path/google.protobuf.Duration`). The name should be in a canonical form
* (e.g., leading "." is not accepted).
*
* In practice, teams usually precompile into the binary all types that they
* expect it to use in the context of Any. However, for URLs which use the
* scheme `http`, `https`, or no scheme, one can optionally set up a type
* server that maps type URLs to message definitions as follows:
*
* * If no scheme is provided, `https` is assumed.
* * An HTTP GET on the URL must yield a [google.protobuf.Type][]
* value in binary format, or produce an error.
* * Applications are allowed to cache lookup results based on the
* URL, or have them precompiled into a binary to avoid any
* lookup. Therefore, binary compatibility needs to be preserved
* on changes to types. (Use versioned type names to manage
* breaking changes.)
*
* Note: this functionality is not currently available in the official
* protobuf release, and it is not used for type URLs beginning with
* type.googleapis.com. As of May 2023, there are no widely used type server
* implementations and no plans to implement one.
*
* Schemes other than `http`, `https` (or the empty scheme) might be
* used with implementation specific semantics.
*/
typeUrl: string;
/** Must be a valid serialized protocol buffer of the above specified type. */
value: Buffer;
}
export declare const Any: MessageFns<Any>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,35 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: google/protobuf/any.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.Any = void 0;
exports.Any = {
fromJSON(object) {
return {
typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "",
value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
};
},
toJSON(message) {
const obj = {};
if (message.typeUrl !== "") {
obj.typeUrl = message.typeUrl;
}
if (message.value.length !== 0) {
obj.value = base64FromBytes(message.value);
}
return obj;
},
};
function bytesFromBase64(b64) {
return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
return globalThis.Buffer.from(arr).toString("base64");
}
function isSet(value) {
return value !== null && value !== undefined;
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,112 @@
/**
* A Timestamp represents a point in time independent of any time zone or local
* calendar, encoded as a count of seconds and fractions of seconds at
* nanosecond resolution. The count is relative to an epoch at UTC midnight on
* January 1, 1970, in the proleptic Gregorian calendar which extends the
* Gregorian calendar backwards to year one.
*
* All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
* second table is needed for interpretation, using a [24-hour linear
* smear](https://developers.google.com/time/smear).
*
* The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
* restricting to that range, we ensure that we can convert to and from [RFC
* 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
*
* # Examples
*
* Example 1: Compute Timestamp from POSIX `time()`.
*
* Timestamp timestamp;
* timestamp.set_seconds(time(NULL));
* timestamp.set_nanos(0);
*
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
*
* struct timeval tv;
* gettimeofday(&tv, NULL);
*
* Timestamp timestamp;
* timestamp.set_seconds(tv.tv_sec);
* timestamp.set_nanos(tv.tv_usec * 1000);
*
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
*
* FILETIME ft;
* GetSystemTimeAsFileTime(&ft);
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
*
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
* Timestamp timestamp;
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
*
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
*
* long millis = System.currentTimeMillis();
*
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
* .setNanos((int) ((millis % 1000) * 1000000)).build();
*
* Example 5: Compute Timestamp from Java `Instant.now()`.
*
* Instant now = Instant.now();
*
* Timestamp timestamp =
* Timestamp.newBuilder().setSeconds(now.getEpochSecond())
* .setNanos(now.getNano()).build();
*
* Example 6: Compute Timestamp from current time in Python.
*
* timestamp = Timestamp()
* timestamp.GetCurrentTime()
*
* # JSON Mapping
*
* In JSON format, the Timestamp type is encoded as a string in the
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
* where {year} is always expressed using four digits while {month}, {day},
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
* is required. A proto3 JSON serializer should always use UTC (as indicated by
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
* able to accept both UTC and other timezones (as indicated by an offset).
*
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
* 01:30 UTC on January 15, 2017.
*
* In JavaScript, one can convert a Date object to this format using the
* standard
* [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
* method. In Python, a standard `datetime.datetime` object can be converted
* to this format using
* [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
* the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
* the Joda Time's [`ISODateTimeFormat.dateTime()`](
* http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()
* ) to obtain a formatter capable of generating timestamps in this format.
*/
export interface Timestamp {
/**
* Represents seconds of UTC time since Unix epoch
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
* 9999-12-31T23:59:59Z inclusive.
*/
seconds: string;
/**
* Non-negative fractions of a second at nanosecond resolution. Negative
* second values with fractions must still have non-negative nanos values
* that count forward in time. Must be from 0 to 999,999,999
* inclusive.
*/
nanos: number;
}
export declare const Timestamp: MessageFns<Timestamp>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,29 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: google/protobuf/timestamp.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.Timestamp = void 0;
exports.Timestamp = {
fromJSON(object) {
return {
seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0",
nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
};
},
toJSON(message) {
const obj = {};
if (message.seconds !== "0") {
obj.seconds = message.seconds;
}
if (message.nanos !== 0) {
obj.nanos = Math.round(message.nanos);
}
return obj;
},
};
function isSet(value) {
return value !== null && value !== undefined;
}

View File

@@ -0,0 +1,23 @@
import { Envelope } from "../../envelope";
import { HashOutput } from "../../sigstore_common";
import { Signature, Verifier } from "./verifier";
/** A request to add a DSSE v0.0.2 entry to the log */
export interface DSSERequestV002 {
/** A DSSE envelope */
envelope: Envelope | undefined;
/** All necessary verification material to verify all signatures embedded in the envelope */
verifiers: Verifier[];
}
export interface DSSELogEntryV002 {
/** The hash of the DSSE payload */
payloadHash: HashOutput | undefined;
/** Signatures and their associated verification material used to verify the payload */
signatures: Signature[];
}
export declare const DSSERequestV002: MessageFns<DSSERequestV002>;
export declare const DSSELogEntryV002: MessageFns<DSSELogEntryV002>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,55 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: rekor/v2/dsse.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.DSSELogEntryV002 = exports.DSSERequestV002 = void 0;
/* eslint-disable */
const envelope_1 = require("../../envelope");
const sigstore_common_1 = require("../../sigstore_common");
const verifier_1 = require("./verifier");
exports.DSSERequestV002 = {
fromJSON(object) {
return {
envelope: isSet(object.envelope) ? envelope_1.Envelope.fromJSON(object.envelope) : undefined,
verifiers: globalThis.Array.isArray(object?.verifiers)
? object.verifiers.map((e) => verifier_1.Verifier.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
if (message.envelope !== undefined) {
obj.envelope = envelope_1.Envelope.toJSON(message.envelope);
}
if (message.verifiers?.length) {
obj.verifiers = message.verifiers.map((e) => verifier_1.Verifier.toJSON(e));
}
return obj;
},
};
exports.DSSELogEntryV002 = {
fromJSON(object) {
return {
payloadHash: isSet(object.payloadHash) ? sigstore_common_1.HashOutput.fromJSON(object.payloadHash) : undefined,
signatures: globalThis.Array.isArray(object?.signatures)
? object.signatures.map((e) => verifier_1.Signature.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
if (message.payloadHash !== undefined) {
obj.payloadHash = sigstore_common_1.HashOutput.toJSON(message.payloadHash);
}
if (message.signatures?.length) {
obj.signatures = message.signatures.map((e) => verifier_1.Signature.toJSON(e));
}
return obj;
},
};
function isSet(value) {
return value !== null && value !== undefined;
}

View File

@@ -0,0 +1,43 @@
import { DSSELogEntryV002, DSSERequestV002 } from "./dsse";
import { HashedRekordLogEntryV002, HashedRekordRequestV002 } from "./hashedrekord";
/**
* Entry is the message that is canonicalized and uploaded to the log.
* This format is meant to be compliant with Rekor v1 entries in that
* the `apiVersion` and `kind` can be parsed before parsing the spec.
* Clients are expected to understand and handle the differences in the
* contents of `spec` between Rekor v1 (a polymorphic OpenAPI defintion)
* and Rekor v2 (a typed proto defintion).
*/
export interface Entry {
kind: string;
apiVersion: string;
spec: Spec | undefined;
}
/** Spec contains one of the Rekor entry types. */
export interface Spec {
spec?: {
$case: "hashedRekordV002";
hashedRekordV002: HashedRekordLogEntryV002;
} | {
$case: "dsseV002";
dsseV002: DSSELogEntryV002;
} | undefined;
}
/** Create a new HashedRekord or DSSE */
export interface CreateEntryRequest {
spec?: {
$case: "hashedRekordRequestV002";
hashedRekordRequestV002: HashedRekordRequestV002;
} | {
$case: "dsseRequestV002";
dsseRequestV002: DSSERequestV002;
} | undefined;
}
export declare const Entry: MessageFns<Entry>;
export declare const Spec: MessageFns<Spec>;
export declare const CreateEntryRequest: MessageFns<CreateEntryRequest>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,81 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: rekor/v2/entry.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.CreateEntryRequest = exports.Spec = exports.Entry = void 0;
/* eslint-disable */
const dsse_1 = require("./dsse");
const hashedrekord_1 = require("./hashedrekord");
exports.Entry = {
fromJSON(object) {
return {
kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
apiVersion: isSet(object.apiVersion) ? globalThis.String(object.apiVersion) : "",
spec: isSet(object.spec) ? exports.Spec.fromJSON(object.spec) : undefined,
};
},
toJSON(message) {
const obj = {};
if (message.kind !== "") {
obj.kind = message.kind;
}
if (message.apiVersion !== "") {
obj.apiVersion = message.apiVersion;
}
if (message.spec !== undefined) {
obj.spec = exports.Spec.toJSON(message.spec);
}
return obj;
},
};
exports.Spec = {
fromJSON(object) {
return {
spec: isSet(object.hashedRekordV002)
? { $case: "hashedRekordV002", hashedRekordV002: hashedrekord_1.HashedRekordLogEntryV002.fromJSON(object.hashedRekordV002) }
: isSet(object.dsseV002)
? { $case: "dsseV002", dsseV002: dsse_1.DSSELogEntryV002.fromJSON(object.dsseV002) }
: undefined,
};
},
toJSON(message) {
const obj = {};
if (message.spec?.$case === "hashedRekordV002") {
obj.hashedRekordV002 = hashedrekord_1.HashedRekordLogEntryV002.toJSON(message.spec.hashedRekordV002);
}
else if (message.spec?.$case === "dsseV002") {
obj.dsseV002 = dsse_1.DSSELogEntryV002.toJSON(message.spec.dsseV002);
}
return obj;
},
};
exports.CreateEntryRequest = {
fromJSON(object) {
return {
spec: isSet(object.hashedRekordRequestV002)
? {
$case: "hashedRekordRequestV002",
hashedRekordRequestV002: hashedrekord_1.HashedRekordRequestV002.fromJSON(object.hashedRekordRequestV002),
}
: isSet(object.dsseRequestV002)
? { $case: "dsseRequestV002", dsseRequestV002: dsse_1.DSSERequestV002.fromJSON(object.dsseRequestV002) }
: undefined,
};
},
toJSON(message) {
const obj = {};
if (message.spec?.$case === "hashedRekordRequestV002") {
obj.hashedRekordRequestV002 = hashedrekord_1.HashedRekordRequestV002.toJSON(message.spec.hashedRekordRequestV002);
}
else if (message.spec?.$case === "dsseRequestV002") {
obj.dsseRequestV002 = dsse_1.DSSERequestV002.toJSON(message.spec.dsseRequestV002);
}
return obj;
},
};
function isSet(value) {
return value !== null && value !== undefined;
}

View File

@@ -0,0 +1,22 @@
import { HashOutput } from "../../sigstore_common";
import { Signature } from "./verifier";
/** A request to add a hashedrekord v0.0.2 to the log */
export interface HashedRekordRequestV002 {
/** The hashed data */
digest: Buffer;
/** A single signature over the hashed data with the verifier needed to validate it */
signature: Signature | undefined;
}
export interface HashedRekordLogEntryV002 {
/** The hashed data */
data: HashOutput | undefined;
/** A single signature over the hashed data with the verifier needed to validate it */
signature: Signature | undefined;
}
export declare const HashedRekordRequestV002: MessageFns<HashedRekordRequestV002>;
export declare const HashedRekordLogEntryV002: MessageFns<HashedRekordLogEntryV002>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,56 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: rekor/v2/hashedrekord.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.HashedRekordLogEntryV002 = exports.HashedRekordRequestV002 = void 0;
/* eslint-disable */
const sigstore_common_1 = require("../../sigstore_common");
const verifier_1 = require("./verifier");
exports.HashedRekordRequestV002 = {
fromJSON(object) {
return {
digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
};
},
toJSON(message) {
const obj = {};
if (message.digest.length !== 0) {
obj.digest = base64FromBytes(message.digest);
}
if (message.signature !== undefined) {
obj.signature = verifier_1.Signature.toJSON(message.signature);
}
return obj;
},
};
exports.HashedRekordLogEntryV002 = {
fromJSON(object) {
return {
data: isSet(object.data) ? sigstore_common_1.HashOutput.fromJSON(object.data) : undefined,
signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
};
},
toJSON(message) {
const obj = {};
if (message.data !== undefined) {
obj.data = sigstore_common_1.HashOutput.toJSON(message.data);
}
if (message.signature !== undefined) {
obj.signature = verifier_1.Signature.toJSON(message.signature);
}
return obj;
},
};
function bytesFromBase64(b64) {
return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
return globalThis.Buffer.from(arr).toString("base64");
}
function isSet(value) {
return value !== null && value !== undefined;
}

View File

@@ -0,0 +1,35 @@
import { PublicKeyDetails, X509Certificate } from "../../sigstore_common";
/** PublicKey contains an encoded public key */
export interface PublicKey {
/** DER-encoded public key */
rawBytes: Buffer;
}
/** Either a public key or a X.509 cerificiate with an embedded public key */
export interface Verifier {
verifier?: //
/** DER-encoded public key. Encoding method is specified by the key_details attribute */
{
$case: "publicKey";
publicKey: PublicKey;
} | //
/** DER-encoded certificate */
{
$case: "x509Certificate";
x509Certificate: X509Certificate;
} | undefined;
/** Key encoding and signature algorithm to use for this key */
keyDetails: PublicKeyDetails;
}
/** A signature and an associated verifier */
export interface Signature {
content: Buffer;
verifier: Verifier | undefined;
}
export declare const PublicKey: MessageFns<PublicKey>;
export declare const Verifier: MessageFns<Verifier>;
export declare const Signature: MessageFns<Signature>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,74 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: rekor/v2/verifier.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.Signature = exports.Verifier = exports.PublicKey = void 0;
/* eslint-disable */
const sigstore_common_1 = require("../../sigstore_common");
exports.PublicKey = {
fromJSON(object) {
return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
},
toJSON(message) {
const obj = {};
if (message.rawBytes.length !== 0) {
obj.rawBytes = base64FromBytes(message.rawBytes);
}
return obj;
},
};
exports.Verifier = {
fromJSON(object) {
return {
verifier: isSet(object.publicKey)
? { $case: "publicKey", publicKey: exports.PublicKey.fromJSON(object.publicKey) }
: isSet(object.x509Certificate)
? { $case: "x509Certificate", x509Certificate: sigstore_common_1.X509Certificate.fromJSON(object.x509Certificate) }
: undefined,
keyDetails: isSet(object.keyDetails) ? (0, sigstore_common_1.publicKeyDetailsFromJSON)(object.keyDetails) : 0,
};
},
toJSON(message) {
const obj = {};
if (message.verifier?.$case === "publicKey") {
obj.publicKey = exports.PublicKey.toJSON(message.verifier.publicKey);
}
else if (message.verifier?.$case === "x509Certificate") {
obj.x509Certificate = sigstore_common_1.X509Certificate.toJSON(message.verifier.x509Certificate);
}
if (message.keyDetails !== 0) {
obj.keyDetails = (0, sigstore_common_1.publicKeyDetailsToJSON)(message.keyDetails);
}
return obj;
},
};
exports.Signature = {
fromJSON(object) {
return {
content: isSet(object.content) ? Buffer.from(bytesFromBase64(object.content)) : Buffer.alloc(0),
verifier: isSet(object.verifier) ? exports.Verifier.fromJSON(object.verifier) : undefined,
};
},
toJSON(message) {
const obj = {};
if (message.content.length !== 0) {
obj.content = base64FromBytes(message.content);
}
if (message.verifier !== undefined) {
obj.verifier = exports.Verifier.toJSON(message.verifier);
}
return obj;
},
};
function bytesFromBase64(b64) {
return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
return globalThis.Buffer.from(arr).toString("base64");
}
function isSet(value) {
return value !== null && value !== undefined;
}

View File

@@ -0,0 +1,146 @@
import { Envelope } from "./envelope";
import { MessageSignature, PublicKeyIdentifier, RFC3161SignedTimestamp, X509Certificate, X509CertificateChain } from "./sigstore_common";
import { TransparencyLogEntry } from "./sigstore_rekor";
/**
* Various timestamped counter signatures over the artifacts signature.
* Currently only RFC3161 signatures are provided. More formats may be added
* in the future.
*/
export interface TimestampVerificationData {
/**
* A list of RFC3161 signed timestamps provided by the user.
* This can be used when the entry has not been stored on a
* transparency log, or in conjunction for a stronger trust model.
* Clients MUST verify the hashed message in the message imprint
* against the signature in the bundle.
*/
rfc3161Timestamps: RFC3161SignedTimestamp[];
}
/**
* VerificationMaterial captures details on the materials used to verify
* signatures. This message may be embedded in a DSSE envelope as a signature
* extension. Specifically, the `ext` field of the extension will expect this
* message when the signature extension is for Sigstore. This is identified by
* the `kind` field in the extension, which must be set to
* application/vnd.dev.sigstore.verificationmaterial;version=0.1 for Sigstore.
* When used as a DSSE extension, if the `public_key` field is used to indicate
* the key identifier, it MUST match the `keyid` field of the signature the
* extension is attached to.
*/
export interface VerificationMaterial {
/**
* The key material for verification purposes.
*
* This allows key material to be conveyed in one of three forms:
*
* 1. An unspecified public key identifier, for retrieving a key
* from an out-of-band mechanism (such as a keyring);
*
* 2. A sequence of one or more X.509 certificates, of which the first member
* MUST be a leaf certificate conveying the signing key. Subsequent members
* SHOULD be in issuing order, meaning that `n + 1` should be an issuer for `n`.
*
* Signers MUST NOT include root CA certificates in bundles, and SHOULD NOT
* include intermediate CA certificates that appear in an independent root of trust
* (such as the Public Good Instance's trusted root).
*
* Verifiers MUST validate the chain carefully to ensure that it chains up
* to a CA certificate that they independently trust. Verifiers SHOULD
* handle old or non-complying bundles that have superfluous intermediate and/or
* root CA certificates by either ignoring them or explicitly considering them
* untrusted for the purposes of chain building.
*
* 3. A single X.509 certificate, which MUST be a leaf certificate conveying
* the signing key.
*
* When used with the Public Good Instance (PGI) of Sigstore for "keyless" signing
* via Fulcio, form (1) MUST NOT be used, regardless of bundle version. Form (1)
* MAY be used with the PGI for self-managed keys.
*
* When used in a `0.1` or `0.2` bundle with the PGI and "keyless" signing,
* form (2) MUST be used.
*
* When used in a `0.3` bundle with the PGI and "keyless" signing,
* form (3) MUST be used.
*/
content?: {
$case: "publicKey";
publicKey: PublicKeyIdentifier;
} | {
$case: "x509CertificateChain";
x509CertificateChain: X509CertificateChain;
} | {
$case: "certificate";
certificate: X509Certificate;
} | undefined;
/**
* An inclusion proof and an optional signed timestamp from the log.
* Client verification libraries MAY provide an option to support v0.1
* bundles for backwards compatibility, which may contain an inclusion
* promise and not an inclusion proof. In this case, the client MUST
* validate the promise.
* Verifiers SHOULD NOT allow v0.1 bundles if they're used in an
* ecosystem which never produced them.
*/
tlogEntries: TransparencyLogEntry[];
/**
* Timestamp may also come from
* tlog_entries.inclusion_promise.signed_entry_timestamp.
*/
timestampVerificationData: TimestampVerificationData | undefined;
}
export interface Bundle {
/**
* MUST be application/vnd.dev.sigstore.bundle.v0.3+json when
* when encoded as JSON.
* Clients must to be able to accept media type using the previously
* defined formats:
* * application/vnd.dev.sigstore.bundle+json;version=0.1
* * application/vnd.dev.sigstore.bundle+json;version=0.2
* * application/vnd.dev.sigstore.bundle+json;version=0.3
*/
mediaType: string;
/**
* When a signer is identified by a X.509 certificate, a verifier MUST
* verify that the signature was computed at the time the certificate
* was valid as described in the Sigstore client spec: "Verification
* using a Bundle".
* <https://docs.google.com/document/d/1kbhK2qyPPk8SLavHzYSDM8-Ueul9_oxIMVFuWMWKz0E/edit#heading=h.x8bduppe89ln>
* If the verification material contains a public key identifier
* (key hint) and the `content` is a DSSE envelope, the key hints
* MUST be exactly the same in the verification material and in the
* DSSE envelope.
*/
verificationMaterial: VerificationMaterial | undefined;
content?: {
$case: "messageSignature";
messageSignature: MessageSignature;
} | //
/**
* A DSSE envelope can contain arbitrary payloads.
* Verifiers must verify that the payload type is a
* supported and expected type. This is part of the DSSE
* protocol which is defined here:
* <https://github.com/secure-systems-lab/dsse/blob/master/protocol.md>
* DSSE envelopes in a bundle MUST have exactly one signature.
* This is a limitation from the DSSE spec, as it can contain
* multiple signatures. There are two primary reasons:
* 1. It simplifies the verification logic and policy
* 2. The bundle (currently) can only contain a single
* instance of the required verification materials
* During verification a client MUST reject an envelope if
* the number of signatures is not equal to one.
*/
{
$case: "dsseEnvelope";
dsseEnvelope: Envelope;
} | undefined;
}
export declare const TimestampVerificationData: MessageFns<TimestampVerificationData>;
export declare const VerificationMaterial: MessageFns<VerificationMaterial>;
export declare const Bundle: MessageFns<Bundle>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,103 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: sigstore_bundle.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
/* eslint-disable */
const envelope_1 = require("./envelope");
const sigstore_common_1 = require("./sigstore_common");
const sigstore_rekor_1 = require("./sigstore_rekor");
exports.TimestampVerificationData = {
fromJSON(object) {
return {
rfc3161Timestamps: globalThis.Array.isArray(object?.rfc3161Timestamps)
? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
if (message.rfc3161Timestamps?.length) {
obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.toJSON(e));
}
return obj;
},
};
exports.VerificationMaterial = {
fromJSON(object) {
return {
content: isSet(object.publicKey)
? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
: isSet(object.x509CertificateChain)
? {
$case: "x509CertificateChain",
x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
}
: isSet(object.certificate)
? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) }
: undefined,
tlogEntries: globalThis.Array.isArray(object?.tlogEntries)
? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
: [],
timestampVerificationData: isSet(object.timestampVerificationData)
? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
: undefined,
};
},
toJSON(message) {
const obj = {};
if (message.content?.$case === "publicKey") {
obj.publicKey = sigstore_common_1.PublicKeyIdentifier.toJSON(message.content.publicKey);
}
else if (message.content?.$case === "x509CertificateChain") {
obj.x509CertificateChain = sigstore_common_1.X509CertificateChain.toJSON(message.content.x509CertificateChain);
}
else if (message.content?.$case === "certificate") {
obj.certificate = sigstore_common_1.X509Certificate.toJSON(message.content.certificate);
}
if (message.tlogEntries?.length) {
obj.tlogEntries = message.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.toJSON(e));
}
if (message.timestampVerificationData !== undefined) {
obj.timestampVerificationData = exports.TimestampVerificationData.toJSON(message.timestampVerificationData);
}
return obj;
},
};
exports.Bundle = {
fromJSON(object) {
return {
mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
verificationMaterial: isSet(object.verificationMaterial)
? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
: undefined,
content: isSet(object.messageSignature)
? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
: isSet(object.dsseEnvelope)
? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
: undefined,
};
},
toJSON(message) {
const obj = {};
if (message.mediaType !== "") {
obj.mediaType = message.mediaType;
}
if (message.verificationMaterial !== undefined) {
obj.verificationMaterial = exports.VerificationMaterial.toJSON(message.verificationMaterial);
}
if (message.content?.$case === "messageSignature") {
obj.messageSignature = sigstore_common_1.MessageSignature.toJSON(message.content.messageSignature);
}
else if (message.content?.$case === "dsseEnvelope") {
obj.dsseEnvelope = envelope_1.Envelope.toJSON(message.content.dsseEnvelope);
}
return obj;
},
};
function isSet(value) {
return value !== null && value !== undefined;
}

View File

@@ -0,0 +1,293 @@
/**
* Only a subset of the secure hash standard algorithms are supported.
* See <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf> for more
* details.
* UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
* any proto JSON serialization to emit the used hash algorithm, as default
* option is to *omit* the default value of an enum (which is the first
* value, represented by '0'.
*/
export declare enum HashAlgorithm {
HASH_ALGORITHM_UNSPECIFIED = 0,
SHA2_256 = 1,
SHA2_384 = 2,
SHA2_512 = 3,
SHA3_256 = 4,
SHA3_384 = 5
}
export declare function hashAlgorithmFromJSON(object: any): HashAlgorithm;
export declare function hashAlgorithmToJSON(object: HashAlgorithm): string;
/**
* Details of a specific public key, capturing the the key encoding method,
* and signature algorithm.
*
* PublicKeyDetails captures the public key/hash algorithm combinations
* recommended in the Sigstore ecosystem.
*
* This is modelled as a linear set as we want to provide a small number of
* opinionated options instead of allowing every possible permutation.
*
* Any changes to this enum MUST be reflected in the algorithm registry.
*
* See: <https://github.com/sigstore/architecture-docs/blob/main/algorithm-registry.md>
*
* To avoid the possibility of contradicting formats such as PKCS1 with
* ED25519 the valid permutations are listed as a linear set instead of a
* cartesian set (i.e one combined variable instead of two, one for encoding
* and one for the signature algorithm).
*/
export declare enum PublicKeyDetails {
PUBLIC_KEY_DETAILS_UNSPECIFIED = 0,
/**
* PKCS1_RSA_PKCS1V5 - RSA
*
* @deprecated
*/
PKCS1_RSA_PKCS1V5 = 1,
/**
* PKCS1_RSA_PSS - See RFC8017
*
* @deprecated
*/
PKCS1_RSA_PSS = 2,
/** @deprecated */
PKIX_RSA_PKCS1V5 = 3,
/** @deprecated */
PKIX_RSA_PSS = 4,
/** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
PKIX_RSA_PKCS1V15_2048_SHA256 = 9,
PKIX_RSA_PKCS1V15_3072_SHA256 = 10,
PKIX_RSA_PKCS1V15_4096_SHA256 = 11,
/** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
PKIX_RSA_PSS_2048_SHA256 = 16,
PKIX_RSA_PSS_3072_SHA256 = 17,
PKIX_RSA_PSS_4096_SHA256 = 18,
/**
* PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
*
* @deprecated
*/
PKIX_ECDSA_P256_HMAC_SHA_256 = 6,
/** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
PKIX_ECDSA_P256_SHA_256 = 5,
PKIX_ECDSA_P384_SHA_384 = 12,
PKIX_ECDSA_P521_SHA_512 = 13,
/** PKIX_ED25519 - Ed 25519 */
PKIX_ED25519 = 7,
PKIX_ED25519_PH = 8,
/**
* PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they
* were/are being used by most Sigstore clients implementations.
*
* @deprecated
*/
PKIX_ECDSA_P384_SHA_256 = 19,
/** @deprecated */
PKIX_ECDSA_P521_SHA_256 = 20,
/**
* LMS_SHA256 - LMS and LM-OTS
*
* These algorithms are deprecated and should not be used.
* Keys and signatures MAY be used by private Sigstore
* deployments, but will not be supported by the public
* good instance.
*
* USER WARNING: LMS and LM-OTS are both stateful signature schemes.
* Using them correctly requires discretion and careful consideration
* to ensure that individual secret keys are not used more than once.
* In addition, LM-OTS is a single-use scheme, meaning that it
* MUST NOT be used for more than one signature per LM-OTS key.
* If you cannot maintain these invariants, you MUST NOT use these
* schemes.
*
* @deprecated
*/
LMS_SHA256 = 14,
/** @deprecated */
LMOTS_SHA256 = 15,
/**
* ML_DSA_65 - ML-DSA
*
* These ML_DSA_65 and ML-DSA_87 algorithms are the pure variants that
* take data to sign rather than the prehash variants (HashML-DSA), which
* take digests. While considered quantum-resistant, their usage
* involves tradeoffs in that signatures and keys are much larger, and
* this makes deployments more costly.
*
* USER WARNING: ML_DSA_65 and ML_DSA_87 are experimental algorithms.
* In the future they MAY be used by private Sigstore deployments, but
* they are not yet fully functional. This warning will be removed when
* these algorithms are widely supported by Sigstore clients and servers,
* but care should still be taken for production environments.
*/
ML_DSA_65 = 21,
ML_DSA_87 = 22
}
export declare function publicKeyDetailsFromJSON(object: any): PublicKeyDetails;
export declare function publicKeyDetailsToJSON(object: PublicKeyDetails): string;
export declare enum SubjectAlternativeNameType {
SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED = 0,
EMAIL = 1,
URI = 2,
/**
* OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
* See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
* for more details.
*/
OTHER_NAME = 3
}
export declare function subjectAlternativeNameTypeFromJSON(object: any): SubjectAlternativeNameType;
export declare function subjectAlternativeNameTypeToJSON(object: SubjectAlternativeNameType): string;
/**
* HashOutput captures a digest of a 'message' (generic octet sequence)
* and the corresponding hash algorithm used.
*/
export interface HashOutput {
algorithm: HashAlgorithm;
/**
* This is the raw octets of the message digest as computed by
* the hash algorithm.
*/
digest: Buffer;
}
/** MessageSignature stores the computed signature over a message. */
export interface MessageSignature {
/**
* Message digest can be used to identify the artifact.
* Clients MUST NOT attempt to use this digest to verify the associated
* signature; it is intended solely for identification.
*/
messageDigest: HashOutput | undefined;
/**
* The raw bytes as returned from the signature algorithm.
* The signature algorithm (and so the format of the signature bytes)
* are determined by the contents of the 'verification_material',
* either a key-pair or a certificate. If using a certificate, the
* certificate contains the required information on the signature
* algorithm.
* When using a key pair, the algorithm MUST be part of the public
* key, which MUST be communicated out-of-band.
*/
signature: Buffer;
}
/** LogId captures the identity of a transparency log. */
export interface LogId {
/** The unique identity of the log, represented by its public key. */
keyId: Buffer;
}
/** This message holds a RFC 3161 timestamp. */
export interface RFC3161SignedTimestamp {
/**
* Signed timestamp is the DER encoded TimeStampResponse.
* See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2
*/
signedTimestamp: Buffer;
}
export interface PublicKey {
/**
* DER-encoded public key, encoding method is specified by the
* key_details attribute.
*/
rawBytes?: Buffer | undefined;
/** Key encoding and signature algorithm to use for this key. */
keyDetails: PublicKeyDetails;
/** Optional validity period for this key, *inclusive* of the endpoints. */
validFor?: TimeRange | undefined;
}
/**
* PublicKeyIdentifier can be used to identify an (out of band) delivered
* key, to verify a signature.
*/
export interface PublicKeyIdentifier {
/**
* Optional unauthenticated hint on which key to use.
* The format of the hint must be agreed upon out of band by the
* signer and the verifiers, and so is not subject to this
* specification.
* Example use-case is to specify the public key to use, from a
* trusted key-ring.
* Implementors are RECOMMENDED to derive the value from the public
* key as described in RFC 6962.
* See: <https://www.rfc-editor.org/rfc/rfc6962#section-3.2>
*/
hint: string;
}
/** An ASN.1 OBJECT IDENTIFIER */
export interface ObjectIdentifier {
id: number[];
}
/** An OID and the corresponding (byte) value. */
export interface ObjectIdentifierValuePair {
oid: ObjectIdentifier | undefined;
value: Buffer;
}
export interface DistinguishedName {
organization: string;
commonName: string;
}
export interface X509Certificate {
/** DER-encoded X.509 certificate. */
rawBytes: Buffer;
}
export interface SubjectAlternativeName {
type: SubjectAlternativeNameType;
identity?: //
/**
* A regular expression describing the expected value for
* the SAN.
*/
{
$case: "regexp";
regexp: string;
} | //
/** The exact value to match against. */
{
$case: "value";
value: string;
} | undefined;
}
/**
* A collection of X.509 certificates.
*
* This "chain" can be used in multiple contexts, such as providing a root CA
* certificate within a TUF root of trust or multiple untrusted certificates for
* the purpose of chain building.
*/
export interface X509CertificateChain {
/**
* One or more DER-encoded certificates.
*
* In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence
* has an imposed order. Unless explicitly specified, there is otherwise no
* guaranteed order.
*/
certificates: X509Certificate[];
}
/**
* The time range is closed and includes both the start and end times,
* (i.e., [start, end]).
* End is optional to be able to capture a period that has started but
* has no known end.
*/
export interface TimeRange {
start: Date | undefined;
end?: Date | undefined;
}
export declare const HashOutput: MessageFns<HashOutput>;
export declare const MessageSignature: MessageFns<MessageSignature>;
export declare const LogId: MessageFns<LogId>;
export declare const RFC3161SignedTimestamp: MessageFns<RFC3161SignedTimestamp>;
export declare const PublicKey: MessageFns<PublicKey>;
export declare const PublicKeyIdentifier: MessageFns<PublicKeyIdentifier>;
export declare const ObjectIdentifier: MessageFns<ObjectIdentifier>;
export declare const ObjectIdentifierValuePair: MessageFns<ObjectIdentifierValuePair>;
export declare const DistinguishedName: MessageFns<DistinguishedName>;
export declare const X509Certificate: MessageFns<X509Certificate>;
export declare const SubjectAlternativeName: MessageFns<SubjectAlternativeName>;
export declare const X509CertificateChain: MessageFns<X509CertificateChain>;
export declare const TimeRange: MessageFns<TimeRange>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,596 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: sigstore_common.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = void 0;
exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
/* eslint-disable */
const timestamp_1 = require("./google/protobuf/timestamp");
/**
* Only a subset of the secure hash standard algorithms are supported.
* See <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf> for more
* details.
* UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
* any proto JSON serialization to emit the used hash algorithm, as default
* option is to *omit* the default value of an enum (which is the first
* value, represented by '0'.
*/
var HashAlgorithm;
(function (HashAlgorithm) {
HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
HashAlgorithm[HashAlgorithm["SHA2_384"] = 2] = "SHA2_384";
HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512";
HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256";
HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384";
})(HashAlgorithm || (exports.HashAlgorithm = HashAlgorithm = {}));
function hashAlgorithmFromJSON(object) {
switch (object) {
case 0:
case "HASH_ALGORITHM_UNSPECIFIED":
return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
case 1:
case "SHA2_256":
return HashAlgorithm.SHA2_256;
case 2:
case "SHA2_384":
return HashAlgorithm.SHA2_384;
case 3:
case "SHA2_512":
return HashAlgorithm.SHA2_512;
case 4:
case "SHA3_256":
return HashAlgorithm.SHA3_256;
case 5:
case "SHA3_384":
return HashAlgorithm.SHA3_384;
default:
throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
}
}
function hashAlgorithmToJSON(object) {
switch (object) {
case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
return "HASH_ALGORITHM_UNSPECIFIED";
case HashAlgorithm.SHA2_256:
return "SHA2_256";
case HashAlgorithm.SHA2_384:
return "SHA2_384";
case HashAlgorithm.SHA2_512:
return "SHA2_512";
case HashAlgorithm.SHA3_256:
return "SHA3_256";
case HashAlgorithm.SHA3_384:
return "SHA3_384";
default:
throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
}
}
/**
* Details of a specific public key, capturing the the key encoding method,
* and signature algorithm.
*
* PublicKeyDetails captures the public key/hash algorithm combinations
* recommended in the Sigstore ecosystem.
*
* This is modelled as a linear set as we want to provide a small number of
* opinionated options instead of allowing every possible permutation.
*
* Any changes to this enum MUST be reflected in the algorithm registry.
*
* See: <https://github.com/sigstore/architecture-docs/blob/main/algorithm-registry.md>
*
* To avoid the possibility of contradicting formats such as PKCS1 with
* ED25519 the valid permutations are listed as a linear set instead of a
* cartesian set (i.e one combined variable instead of two, one for encoding
* and one for the signature algorithm).
*/
var PublicKeyDetails;
(function (PublicKeyDetails) {
PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
/**
* PKCS1_RSA_PKCS1V5 - RSA
*
* @deprecated
*/
PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
/**
* PKCS1_RSA_PSS - See RFC8017
*
* @deprecated
*/
PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
/** @deprecated */
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
/** @deprecated */
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
/** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_2048_SHA256"] = 9] = "PKIX_RSA_PKCS1V15_2048_SHA256";
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_3072_SHA256"] = 10] = "PKIX_RSA_PKCS1V15_3072_SHA256";
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_4096_SHA256"] = 11] = "PKIX_RSA_PKCS1V15_4096_SHA256";
/** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_2048_SHA256"] = 16] = "PKIX_RSA_PSS_2048_SHA256";
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_3072_SHA256"] = 17] = "PKIX_RSA_PSS_3072_SHA256";
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_4096_SHA256"] = 18] = "PKIX_RSA_PSS_4096_SHA256";
/**
* PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
*
* @deprecated
*/
PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
/** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_384"] = 12] = "PKIX_ECDSA_P384_SHA_384";
PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_512"] = 13] = "PKIX_ECDSA_P521_SHA_512";
/** PKIX_ED25519 - Ed 25519 */
PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH";
/**
* PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they
* were/are being used by most Sigstore clients implementations.
*
* @deprecated
*/
PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_256"] = 19] = "PKIX_ECDSA_P384_SHA_256";
/** @deprecated */
PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_256"] = 20] = "PKIX_ECDSA_P521_SHA_256";
/**
* LMS_SHA256 - LMS and LM-OTS
*
* These algorithms are deprecated and should not be used.
* Keys and signatures MAY be used by private Sigstore
* deployments, but will not be supported by the public
* good instance.
*
* USER WARNING: LMS and LM-OTS are both stateful signature schemes.
* Using them correctly requires discretion and careful consideration
* to ensure that individual secret keys are not used more than once.
* In addition, LM-OTS is a single-use scheme, meaning that it
* MUST NOT be used for more than one signature per LM-OTS key.
* If you cannot maintain these invariants, you MUST NOT use these
* schemes.
*
* @deprecated
*/
PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256";
/** @deprecated */
PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256";
/**
* ML_DSA_65 - ML-DSA
*
* These ML_DSA_65 and ML-DSA_87 algorithms are the pure variants that
* take data to sign rather than the prehash variants (HashML-DSA), which
* take digests. While considered quantum-resistant, their usage
* involves tradeoffs in that signatures and keys are much larger, and
* this makes deployments more costly.
*
* USER WARNING: ML_DSA_65 and ML_DSA_87 are experimental algorithms.
* In the future they MAY be used by private Sigstore deployments, but
* they are not yet fully functional. This warning will be removed when
* these algorithms are widely supported by Sigstore clients and servers,
* but care should still be taken for production environments.
*/
PublicKeyDetails[PublicKeyDetails["ML_DSA_65"] = 21] = "ML_DSA_65";
PublicKeyDetails[PublicKeyDetails["ML_DSA_87"] = 22] = "ML_DSA_87";
})(PublicKeyDetails || (exports.PublicKeyDetails = PublicKeyDetails = {}));
function publicKeyDetailsFromJSON(object) {
switch (object) {
case 0:
case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
case 1:
case "PKCS1_RSA_PKCS1V5":
return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
case 2:
case "PKCS1_RSA_PSS":
return PublicKeyDetails.PKCS1_RSA_PSS;
case 3:
case "PKIX_RSA_PKCS1V5":
return PublicKeyDetails.PKIX_RSA_PKCS1V5;
case 4:
case "PKIX_RSA_PSS":
return PublicKeyDetails.PKIX_RSA_PSS;
case 9:
case "PKIX_RSA_PKCS1V15_2048_SHA256":
return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256;
case 10:
case "PKIX_RSA_PKCS1V15_3072_SHA256":
return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256;
case 11:
case "PKIX_RSA_PKCS1V15_4096_SHA256":
return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256;
case 16:
case "PKIX_RSA_PSS_2048_SHA256":
return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256;
case 17:
case "PKIX_RSA_PSS_3072_SHA256":
return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256;
case 18:
case "PKIX_RSA_PSS_4096_SHA256":
return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256;
case 6:
case "PKIX_ECDSA_P256_HMAC_SHA_256":
return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
case 5:
case "PKIX_ECDSA_P256_SHA_256":
return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
case 12:
case "PKIX_ECDSA_P384_SHA_384":
return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384;
case 13:
case "PKIX_ECDSA_P521_SHA_512":
return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512;
case 7:
case "PKIX_ED25519":
return PublicKeyDetails.PKIX_ED25519;
case 8:
case "PKIX_ED25519_PH":
return PublicKeyDetails.PKIX_ED25519_PH;
case 19:
case "PKIX_ECDSA_P384_SHA_256":
return PublicKeyDetails.PKIX_ECDSA_P384_SHA_256;
case 20:
case "PKIX_ECDSA_P521_SHA_256":
return PublicKeyDetails.PKIX_ECDSA_P521_SHA_256;
case 14:
case "LMS_SHA256":
return PublicKeyDetails.LMS_SHA256;
case 15:
case "LMOTS_SHA256":
return PublicKeyDetails.LMOTS_SHA256;
case 21:
case "ML_DSA_65":
return PublicKeyDetails.ML_DSA_65;
case 22:
case "ML_DSA_87":
return PublicKeyDetails.ML_DSA_87;
default:
throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
}
}
function publicKeyDetailsToJSON(object) {
switch (object) {
case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
return "PKCS1_RSA_PKCS1V5";
case PublicKeyDetails.PKCS1_RSA_PSS:
return "PKCS1_RSA_PSS";
case PublicKeyDetails.PKIX_RSA_PKCS1V5:
return "PKIX_RSA_PKCS1V5";
case PublicKeyDetails.PKIX_RSA_PSS:
return "PKIX_RSA_PSS";
case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256:
return "PKIX_RSA_PKCS1V15_2048_SHA256";
case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256:
return "PKIX_RSA_PKCS1V15_3072_SHA256";
case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256:
return "PKIX_RSA_PKCS1V15_4096_SHA256";
case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256:
return "PKIX_RSA_PSS_2048_SHA256";
case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256:
return "PKIX_RSA_PSS_3072_SHA256";
case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256:
return "PKIX_RSA_PSS_4096_SHA256";
case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
return "PKIX_ECDSA_P256_HMAC_SHA_256";
case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
return "PKIX_ECDSA_P256_SHA_256";
case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384:
return "PKIX_ECDSA_P384_SHA_384";
case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512:
return "PKIX_ECDSA_P521_SHA_512";
case PublicKeyDetails.PKIX_ED25519:
return "PKIX_ED25519";
case PublicKeyDetails.PKIX_ED25519_PH:
return "PKIX_ED25519_PH";
case PublicKeyDetails.PKIX_ECDSA_P384_SHA_256:
return "PKIX_ECDSA_P384_SHA_256";
case PublicKeyDetails.PKIX_ECDSA_P521_SHA_256:
return "PKIX_ECDSA_P521_SHA_256";
case PublicKeyDetails.LMS_SHA256:
return "LMS_SHA256";
case PublicKeyDetails.LMOTS_SHA256:
return "LMOTS_SHA256";
case PublicKeyDetails.ML_DSA_65:
return "ML_DSA_65";
case PublicKeyDetails.ML_DSA_87:
return "ML_DSA_87";
default:
throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
}
}
var SubjectAlternativeNameType;
(function (SubjectAlternativeNameType) {
SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
/**
* OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
* See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
* for more details.
*/
SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
})(SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = SubjectAlternativeNameType = {}));
function subjectAlternativeNameTypeFromJSON(object) {
switch (object) {
case 0:
case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
case 1:
case "EMAIL":
return SubjectAlternativeNameType.EMAIL;
case 2:
case "URI":
return SubjectAlternativeNameType.URI;
case 3:
case "OTHER_NAME":
return SubjectAlternativeNameType.OTHER_NAME;
default:
throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
}
}
function subjectAlternativeNameTypeToJSON(object) {
switch (object) {
case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
case SubjectAlternativeNameType.EMAIL:
return "EMAIL";
case SubjectAlternativeNameType.URI:
return "URI";
case SubjectAlternativeNameType.OTHER_NAME:
return "OTHER_NAME";
default:
throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
}
}
exports.HashOutput = {
fromJSON(object) {
return {
algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
};
},
toJSON(message) {
const obj = {};
if (message.algorithm !== 0) {
obj.algorithm = hashAlgorithmToJSON(message.algorithm);
}
if (message.digest.length !== 0) {
obj.digest = base64FromBytes(message.digest);
}
return obj;
},
};
exports.MessageSignature = {
fromJSON(object) {
return {
messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
};
},
toJSON(message) {
const obj = {};
if (message.messageDigest !== undefined) {
obj.messageDigest = exports.HashOutput.toJSON(message.messageDigest);
}
if (message.signature.length !== 0) {
obj.signature = base64FromBytes(message.signature);
}
return obj;
},
};
exports.LogId = {
fromJSON(object) {
return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
},
toJSON(message) {
const obj = {};
if (message.keyId.length !== 0) {
obj.keyId = base64FromBytes(message.keyId);
}
return obj;
},
};
exports.RFC3161SignedTimestamp = {
fromJSON(object) {
return {
signedTimestamp: isSet(object.signedTimestamp)
? Buffer.from(bytesFromBase64(object.signedTimestamp))
: Buffer.alloc(0),
};
},
toJSON(message) {
const obj = {};
if (message.signedTimestamp.length !== 0) {
obj.signedTimestamp = base64FromBytes(message.signedTimestamp);
}
return obj;
},
};
exports.PublicKey = {
fromJSON(object) {
return {
rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
};
},
toJSON(message) {
const obj = {};
if (message.rawBytes !== undefined) {
obj.rawBytes = base64FromBytes(message.rawBytes);
}
if (message.keyDetails !== 0) {
obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails);
}
if (message.validFor !== undefined) {
obj.validFor = exports.TimeRange.toJSON(message.validFor);
}
return obj;
},
};
exports.PublicKeyIdentifier = {
fromJSON(object) {
return { hint: isSet(object.hint) ? globalThis.String(object.hint) : "" };
},
toJSON(message) {
const obj = {};
if (message.hint !== "") {
obj.hint = message.hint;
}
return obj;
},
};
exports.ObjectIdentifier = {
fromJSON(object) {
return { id: globalThis.Array.isArray(object?.id) ? object.id.map((e) => globalThis.Number(e)) : [] };
},
toJSON(message) {
const obj = {};
if (message.id?.length) {
obj.id = message.id.map((e) => Math.round(e));
}
return obj;
},
};
exports.ObjectIdentifierValuePair = {
fromJSON(object) {
return {
oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
};
},
toJSON(message) {
const obj = {};
if (message.oid !== undefined) {
obj.oid = exports.ObjectIdentifier.toJSON(message.oid);
}
if (message.value.length !== 0) {
obj.value = base64FromBytes(message.value);
}
return obj;
},
};
exports.DistinguishedName = {
fromJSON(object) {
return {
organization: isSet(object.organization) ? globalThis.String(object.organization) : "",
commonName: isSet(object.commonName) ? globalThis.String(object.commonName) : "",
};
},
toJSON(message) {
const obj = {};
if (message.organization !== "") {
obj.organization = message.organization;
}
if (message.commonName !== "") {
obj.commonName = message.commonName;
}
return obj;
},
};
exports.X509Certificate = {
fromJSON(object) {
return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
},
toJSON(message) {
const obj = {};
if (message.rawBytes.length !== 0) {
obj.rawBytes = base64FromBytes(message.rawBytes);
}
return obj;
},
};
exports.SubjectAlternativeName = {
fromJSON(object) {
return {
type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
identity: isSet(object.regexp)
? { $case: "regexp", regexp: globalThis.String(object.regexp) }
: isSet(object.value)
? { $case: "value", value: globalThis.String(object.value) }
: undefined,
};
},
toJSON(message) {
const obj = {};
if (message.type !== 0) {
obj.type = subjectAlternativeNameTypeToJSON(message.type);
}
if (message.identity?.$case === "regexp") {
obj.regexp = message.identity.regexp;
}
else if (message.identity?.$case === "value") {
obj.value = message.identity.value;
}
return obj;
},
};
exports.X509CertificateChain = {
fromJSON(object) {
return {
certificates: globalThis.Array.isArray(object?.certificates)
? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
if (message.certificates?.length) {
obj.certificates = message.certificates.map((e) => exports.X509Certificate.toJSON(e));
}
return obj;
},
};
exports.TimeRange = {
fromJSON(object) {
return {
start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
};
},
toJSON(message) {
const obj = {};
if (message.start !== undefined) {
obj.start = message.start.toISOString();
}
if (message.end !== undefined) {
obj.end = message.end.toISOString();
}
return obj;
},
};
function bytesFromBase64(b64) {
return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
return globalThis.Buffer.from(arr).toString("base64");
}
function fromTimestamp(t) {
let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
millis += (t.nanos || 0) / 1_000_000;
return new globalThis.Date(millis);
}
function fromJsonTimestamp(o) {
if (o instanceof globalThis.Date) {
return o;
}
else if (typeof o === "string") {
return new globalThis.Date(o);
}
else {
return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
}
}
function isSet(value) {
return value !== null && value !== undefined;
}

View File

@@ -0,0 +1,144 @@
import { LogId } from "./sigstore_common";
/** KindVersion contains the entry's kind and api version. */
export interface KindVersion {
/**
* Kind is the type of entry being stored in the log.
* See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types
*/
kind: string;
/** The specific api version of the type. */
version: string;
}
/**
* The checkpoint MUST contain an origin string as a unique log identifier,
* the tree size, and the root hash. It MAY also be followed by optional data,
* and clients MUST NOT assume optional data. The checkpoint MUST also contain
* a signature over the root hash (tree head). The checkpoint MAY contain additional
* signatures, but the first SHOULD be the signature from the log. Checkpoint contents
* are concatenated with newlines into a single string.
* The checkpoint format is described in
* https://github.com/transparency-dev/formats/blob/main/log/README.md
* and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md.
* An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go
*/
export interface Checkpoint {
envelope: string;
}
/**
* InclusionProof is the proof returned from the transparency log. Can
* be used for offline or online verification against the log.
*/
export interface InclusionProof {
/** The index of the entry in the tree it was written to. */
logIndex: string;
/**
* The hash digest stored at the root of the merkle tree at the time
* the proof was generated.
*/
rootHash: Buffer;
/** The size of the merkle tree at the time the proof was generated. */
treeSize: string;
/**
* A list of hashes required to compute the inclusion proof, sorted
* in order from leaf to root.
* Note that leaf and root hashes are not included.
* The root hash is available separately in this message, and the
* leaf hash should be calculated by the client.
*/
hashes: Buffer[];
/**
* Signature of the tree head, as of the time of this proof was
* generated. See above info on 'Checkpoint' for more details.
*/
checkpoint: Checkpoint | undefined;
}
/**
* The inclusion promise is calculated by Rekor. It's calculated as a
* signature over a canonical JSON serialization of the persisted entry, the
* log ID, log index and the integration timestamp.
* See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54
* The format of the signature depends on the transparency log's public key.
* If the signature algorithm requires a hash function and/or a signature
* scheme (e.g. RSA) those has to be retrieved out-of-band from the log's
* operators, together with the public key.
* This is used to verify the integration timestamp's value and that the log
* has promised to include the entry.
*/
export interface InclusionPromise {
signedEntryTimestamp: Buffer;
}
/**
* TransparencyLogEntry captures all the details required from Rekor to
* reconstruct an entry, given that the payload is provided via other means.
* This type can easily be created from the existing response from Rekor.
* Future iterations could rely on Rekor returning the minimal set of
* attributes (excluding the payload) that are required for verifying the
* inclusion promise. The inclusion promise (called SignedEntryTimestamp in
* the response from Rekor) is similar to a Signed Certificate Timestamp
* as described here https://www.rfc-editor.org/rfc/rfc6962.html#section-3.2.
*/
export interface TransparencyLogEntry {
/** The global index of the entry, used when querying the log by index. */
logIndex: string;
/** The unique identifier of the log. */
logId: LogId | undefined;
/**
* The kind (type) and version of the object associated with this
* entry. These values are required to construct the entry during
* verification.
*/
kindVersion: KindVersion | undefined;
/**
* The UNIX timestamp from the log when the entry was persisted.
* The integration time MUST NOT be trusted if inclusion_promise
* is omitted.
*/
integratedTime: string;
/**
* The inclusion promise/signed entry timestamp from the log.
* Required for v0.1 bundles, and MUST be verified.
* Optional for >= v0.2 bundles if another suitable source of
* time is present (such as another source of signed time,
* or the current system time for long-lived certificates).
* MUST be verified if no other suitable source of time is present,
* and SHOULD be verified otherwise.
*/
inclusionPromise: InclusionPromise | undefined;
/**
* The inclusion proof can be used for offline or online verification
* that the entry was appended to the log, and that the log has not been
* altered.
*/
inclusionProof: InclusionProof | undefined;
/**
* Optional. The canonicalized transparency log entry, used to
* reconstruct the Signed Entry Timestamp (SET) during verification.
* The contents of this field are the same as the `body` field in
* a Rekor response, meaning that it does **not** include the "full"
* canonicalized form (of log index, ID, etc.) which are
* exposed as separate fields. The verifier is responsible for
* combining the `canonicalized_body`, `log_index`, `log_id`,
* and `integrated_time` into the payload that the SET's signature
* is generated over.
* This field is intended to be used in cases where the SET cannot be
* produced determinisitically (e.g. inconsistent JSON field ordering,
* differing whitespace, etc).
*
* If set, clients MUST verify that the signature referenced in the
* `canonicalized_body` matches the signature provided in the
* `Bundle.content`.
* If not set, clients are responsible for constructing an equivalent
* payload from other sources to verify the signature.
*/
canonicalizedBody: Buffer;
}
export declare const KindVersion: MessageFns<KindVersion>;
export declare const Checkpoint: MessageFns<Checkpoint>;
export declare const InclusionProof: MessageFns<InclusionProof>;
export declare const InclusionPromise: MessageFns<InclusionPromise>;
export declare const TransparencyLogEntry: MessageFns<TransparencyLogEntry>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,137 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: sigstore_rekor.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
/* eslint-disable */
const sigstore_common_1 = require("./sigstore_common");
exports.KindVersion = {
fromJSON(object) {
return {
kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
version: isSet(object.version) ? globalThis.String(object.version) : "",
};
},
toJSON(message) {
const obj = {};
if (message.kind !== "") {
obj.kind = message.kind;
}
if (message.version !== "") {
obj.version = message.version;
}
return obj;
},
};
exports.Checkpoint = {
fromJSON(object) {
return { envelope: isSet(object.envelope) ? globalThis.String(object.envelope) : "" };
},
toJSON(message) {
const obj = {};
if (message.envelope !== "") {
obj.envelope = message.envelope;
}
return obj;
},
};
exports.InclusionProof = {
fromJSON(object) {
return {
logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
treeSize: isSet(object.treeSize) ? globalThis.String(object.treeSize) : "0",
hashes: globalThis.Array.isArray(object?.hashes)
? object.hashes.map((e) => Buffer.from(bytesFromBase64(e)))
: [],
checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
};
},
toJSON(message) {
const obj = {};
if (message.logIndex !== "0") {
obj.logIndex = message.logIndex;
}
if (message.rootHash.length !== 0) {
obj.rootHash = base64FromBytes(message.rootHash);
}
if (message.treeSize !== "0") {
obj.treeSize = message.treeSize;
}
if (message.hashes?.length) {
obj.hashes = message.hashes.map((e) => base64FromBytes(e));
}
if (message.checkpoint !== undefined) {
obj.checkpoint = exports.Checkpoint.toJSON(message.checkpoint);
}
return obj;
},
};
exports.InclusionPromise = {
fromJSON(object) {
return {
signedEntryTimestamp: isSet(object.signedEntryTimestamp)
? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
: Buffer.alloc(0),
};
},
toJSON(message) {
const obj = {};
if (message.signedEntryTimestamp.length !== 0) {
obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp);
}
return obj;
},
};
exports.TransparencyLogEntry = {
fromJSON(object) {
return {
logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
integratedTime: isSet(object.integratedTime) ? globalThis.String(object.integratedTime) : "0",
inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
canonicalizedBody: isSet(object.canonicalizedBody)
? Buffer.from(bytesFromBase64(object.canonicalizedBody))
: Buffer.alloc(0),
};
},
toJSON(message) {
const obj = {};
if (message.logIndex !== "0") {
obj.logIndex = message.logIndex;
}
if (message.logId !== undefined) {
obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
}
if (message.kindVersion !== undefined) {
obj.kindVersion = exports.KindVersion.toJSON(message.kindVersion);
}
if (message.integratedTime !== "0") {
obj.integratedTime = message.integratedTime;
}
if (message.inclusionPromise !== undefined) {
obj.inclusionPromise = exports.InclusionPromise.toJSON(message.inclusionPromise);
}
if (message.inclusionProof !== undefined) {
obj.inclusionProof = exports.InclusionProof.toJSON(message.inclusionProof);
}
if (message.canonicalizedBody.length !== 0) {
obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody);
}
return obj;
},
};
function bytesFromBase64(b64) {
return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
return globalThis.Buffer.from(arr).toString("base64");
}
function isSet(value) {
return value !== null && value !== undefined;
}

View File

@@ -0,0 +1,383 @@
import { DistinguishedName, HashAlgorithm, LogId, PublicKey, TimeRange, X509CertificateChain } from "./sigstore_common";
/**
* ServiceSelector specifies how a client SHOULD select a set of
* Services to connect to. A client SHOULD throw an error if
* the value is SERVICE_SELECTOR_UNDEFINED.
*/
export declare enum ServiceSelector {
SERVICE_SELECTOR_UNDEFINED = 0,
/**
* ALL - Clients SHOULD select all Services based on supported API version
* and validity window.
*/
ALL = 1,
/**
* ANY - Clients SHOULD select one Service based on supported API version
* and validity window. It is up to the client implementation to
* decide how to select the Service, e.g. random or round-robin.
*/
ANY = 2,
/**
* EXACT - Clients SHOULD select a specific number of Services based on
* supported API version and validity window, using the provided
* `count`. It is up to the client implementation to decide how to
* select the Service, e.g. random or round-robin.
*/
EXACT = 3
}
export declare function serviceSelectorFromJSON(object: any): ServiceSelector;
export declare function serviceSelectorToJSON(object: ServiceSelector): string;
/**
* TransparencyLogInstance describes the immutable parameters from a
* transparency log.
* See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters
* for more details.
* The included parameters are the minimal set required to identify a log,
* and verify an inclusion proof/promise.
*/
export interface TransparencyLogInstance {
/**
* The base URL at which can be used to URLs for the client.
* SHOULD match the origin on the log checkpoint:
* https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md#note-text.
*/
baseUrl: string;
/** The hash algorithm used for the Merkle Tree. */
hashAlgorithm: HashAlgorithm;
/**
* The public key used to verify signatures generated by the log.
* This attribute contains the signature algorithm used by the log.
*/
publicKey: PublicKey | undefined;
/**
* The unique identifier for this transparency log.
* Represented as the SHA-256 hash of the log's public key,
* calculated over the DER encoding of the key represented as
* SubjectPublicKeyInfo.
* See https://www.rfc-editor.org/rfc/rfc6962#section-3.2
* MUST set checkpoint_key_id if multiple logs use the same
* signing key.
* Deprecated: Use checkpoint_key_id instead, since log_id is not
* guaranteed to be unique across multiple deployments. Clients
* must use the key name and key ID from a checkpoint to determine
* the correct TransparencyLogInstance to verify a proof.
*
* @deprecated
*/
logId: LogId | undefined;
/**
* The unique identifier for the log, used in the checkpoint.
* Its calculation is described in
* https://github.com/C2SP/C2SP/blob/main/signed-note.md#signatures
* SHOULD be set for all logs. When not set, clients MUST use log_id.
*
* For Ed25519 signatures, the key ID is computed per the C2SP spec:
* key ID = SHA-256(key name || 0x0A || 0x01 || 32-byte Ed25519 public key)[:4]
* For ECDSA signatures, the key ID is computed per the C2SP spec:
* key ID = SHA-256(PKIX ASN.1 DER-encoded public key, in SubjectPublicKeyInfo format)[:4]
* For RSA signatures, the signature type will be 0xff with an appended identifier for the format,
* "PKIX-RSA-PKCS#1v1.5":
* key ID = SHA-256(key name || 0x0A || 0xff || PKIX-RSA-PKCS#1v1.5 || PKIX ASN.1 DER-encoded public key)[:4]
*
* This is provided for convenience. Clients can also calculate the
* checkpoint key ID given the log's public key.
* SHOULD be 4 bytes long, as a truncated hash.
*
* To find a matching TransparencyLogInstance in the TrustedRoot,
* clients will parse the checkpoint, and for each signature line,
* use the key name (i.e. log origin, base_url from TrustedRoot)
* and checkpoint key ID (i.e. checkpoint_key_id from TrustedRoot)
* which can then be compared against the TrustedRoot log instances.
*/
checkpointKeyId: LogId | undefined;
/**
* The name of the operator of this log deployment. Operator MUST be
* formatted as a scheme-less URI, e.g. sigstore.dev
* This MUST be used when there are multiple transparency log instances
* to determine if log proof verification meets a specified threshold,
* e.g. two proofs from log deployments operated by the same operator
* should count as only one valid proof.
*/
operator: string;
}
/**
* CertificateAuthority enlists the information required to identify which
* CA to use and perform signature verification.
*/
export interface CertificateAuthority {
/**
* The root certificate MUST be self-signed, and so the subject and
* issuer are the same.
*/
subject: DistinguishedName | undefined;
/**
* The URI identifies the certificate authority.
*
* It is RECOMMENDED that the URI is the base URL for the certificate
* authority, that can be provided to any SDK/client provided
* by the certificate authority to interact with the certificate
* authority.
*/
uri: string;
/**
* The certificate chain for this CA. The last certificate in the chain
* MUST be the trust anchor. The trust anchor MAY be a self-signed root
* CA certificate or MAY be an intermediate CA certificate.
*/
certChain: X509CertificateChain | undefined;
/**
* The time the *entire* chain was valid. This is at max the
* longest interval when *all* certificates in the chain were valid,
* but it MAY be shorter. Clients MUST check timestamps against *both*
* the `valid_for` time range *and* the entire certificate chain.
*
* The TimeRange should be considered valid *inclusive* of the
* endpoints.
*/
validFor: TimeRange | undefined;
/**
* The name of the operator of this certificate or timestamp authority.
* Operator MUST be formatted as a scheme-less URI, e.g. sigstore.dev
* This MUST be used when there are multiple timestamp authorities to
* determine if the signed timestamp verification meets a specified
* threshold, e.g. two signed timestamps from timestamp authorities
* operated by the same operator should count as only one valid
* timestamp.
*/
operator: string;
}
/**
* TrustedRoot describes the client's complete set of trusted entities.
* How the TrustedRoot is populated is not specified, but can be a
* combination of many sources such as TUF repositories, files on disk etc.
*
* The TrustedRoot is not meant to be used for any artifact verification, only
* to capture the complete/global set of trusted verification materials.
* When verifying an artifact, based on the artifact and policies, a selection
* of keys/authorities are expected to be extracted and provided to the
* verification function. This way the set of keys/authorities can be kept to
* a minimal set by the policy to gain better control over what signatures
* that are allowed.
*
* The embedded transparency logs, CT logs, CAs and TSAs MUST include any
* previously used instance -- otherwise signatures made in the past cannot
* be verified.
*
* All the listed instances SHOULD be sorted by the 'valid_for' in ascending
* order, that is, the oldest instance first. Only the last instance is
* allowed to have their 'end' timestamp unset. All previous instances MUST
* have a closed interval of validity. The last instance MAY have a closed
* interval. Clients MUST accept instances that overlaps in time, if not
* clients may experience problems during rotations of verification
* materials.
*
* To be able to manage planned rotations of either transparency logs or
* certificate authorities, clienst MUST accept lists of instances where
* the last instance have a 'valid_for' that belongs to the future.
* This should not be a problem as clients SHOULD first seek the trust root
* for a suitable instance before creating a per artifact trust root (that
* is, a sub-set of the complete trust root) that is used for verification.
*/
export interface TrustedRoot {
/**
* MUST be application/vnd.dev.sigstore.trustedroot.v0.1+json
* when encoded as JSON.
* Clients MUST be able to process and parse content with the media
* type defined in the old format:
* application/vnd.dev.sigstore.trustedroot+json;version=0.1
*/
mediaType: string;
/** A set of trusted Rekor servers. */
tlogs: TransparencyLogInstance[];
/**
* A set of trusted certificate authorities (e.g Fulcio), and any
* intermediate certificates they provide.
* If a CA is issuing multiple intermediate certificate, each
* combination shall be represented as separate chain. I.e, a single
* root cert may appear in multiple chains but with different
* intermediate and/or leaf certificates.
* The certificates are intended to be used for verifying artifact
* signatures.
*/
certificateAuthorities: CertificateAuthority[];
/** A set of trusted certificate transparency logs. */
ctlogs: TransparencyLogInstance[];
/** A set of trusted timestamping authorities. */
timestampAuthorities: CertificateAuthority[];
}
/**
* SigningConfig represents the trusted entities/state needed by Sigstore
* signing. In particular, it primarily contains service URLs that a Sigstore
* signer may need to connect to for the online aspects of signing.
*/
export interface SigningConfig {
/**
* MUST be application/vnd.dev.sigstore.signingconfig.v0.2+json
* Clients MAY choose to also support
* application/vnd.dev.sigstore.signingconfig.v0.1+json
*/
mediaType: string;
/**
* URLs to Fulcio-compatible CAs, capable of receiving
* Certificate Signing Requests (CSRs) and responding with
* issued certificates.
*
* These URLs MUST be the "base" URL for the CAs, which clients
* should construct an appropriate CSR endpoint on top of.
* For example, if a CA URL is `https://example.com/ca`, then
* the client MAY construct the CSR endpoint as
* `https://example.com/ca/api/v2/signingCert`.
*
* Clients MUST select only one Service with the highest API version
* that the client is compatible with, that is within its
* validity period, and has the newest validity start date.
* Client SHOULD select the first Service that meets this requirement.
* All listed Services SHOULD be sorted by the `valid_for` window in
* descending order, with the newest instance first.
*/
caUrls: Service[];
/**
* URLs to OpenID Connect identity providers.
*
* These URLs MUST be the "base" URLs for the OIDC IdPs, which clients
* should perform well-known OpenID Connect discovery against.
*
* Clients MUST select only one Service with the highest API version
* that the client is compatible with, that is within its
* validity period, and has the newest validity start date.
* Client SHOULD select the first Service that meets this requirement.
* All listed Services SHOULD be sorted by the `valid_for` window in
* descending order, with the newest instance first.
*/
oidcUrls: Service[];
/**
* URLs to Rekor transparency logs.
*
* These URL MUST be the "base" URLs for the transparency logs,
* which clients should construct appropriate API endpoints on top of.
*
* Clients MUST group Services by `operator` and select at most one
* Service from each operator. Clients MUST select Services with the
* highest API version that the client is compatible with, that are
* within its validity period, and have the newest validity start dates.
* All listed Services SHOULD be sorted by the `valid_for` window in
* descending order, with the newest instance first.
*
* Clients MUST select Services based on the selector value of
* `rekor_tlog_config`.
*/
rekorTlogUrls: Service[];
/**
* Specifies how a client should select the set of Rekor transparency
* logs to write to.
*/
rekorTlogConfig: ServiceConfiguration | undefined;
/**
* URLs to RFC 3161 Time Stamping Authorities (TSA).
*
* These URLs MUST be the *full* URL for the TSA, meaning that it
* should be suitable for submitting Time Stamp Requests (TSRs) to
* via HTTP, per RFC 3161.
*
* Clients MUST group Services by `operator` and select at most one
* Service from each operator. Clients MUST select Services with the
* highest API version that the client is compatible with, that are
* within its validity period, and have the newest validity start dates.
* All listed Services SHOULD be sorted by the `valid_for` window in
* descending order, with the newest instance first.
*
* Clients MUST select Services based on the selector value of
* `tsa_config`.
*/
tsaUrls: Service[];
/**
* Specifies how a client should select the set of TSAs to request
* signed timestamps from.
*/
tsaConfig: ServiceConfiguration | undefined;
}
/**
* Service represents an instance of a service that is a part of Sigstore infrastructure.
* When selecting one or multiple services from a list of services, clients MUST:
* * Use the API version hint to determine the service with the highest API version
* that the client is compatible with.
* * Only select services within the specified validity period and that have the
* newest validity start date.
* When selecting multiple services, clients MUST:
* * Use the ServiceConfiguration to determine how many services MUST be selected.
* Clients MUST return an error if there are not enough services that meet the
* selection criteria.
* * Group services by `operator` and select at most one service from an operator.
* During verification, clients MUST treat valid verification metadata from the
* operator as valid only once towards a threshold.
* * Select services from only the highest supported API version.
*/
export interface Service {
/** URL of the service. MUST include scheme and authority. MAY include path. */
url: string;
/**
* Specifies the major API version. A value of 0 represents a service that
* has not yet been released.
*/
majorApiVersion: number;
/**
* Validity period of a service. A service that has only a start date
* SHOULD be considered the most recent instance of that service, but
* the client MUST NOT assume there is only one valid instance.
* The TimeRange MUST be considered valid *inclusive* of the
* endpoints.
*/
validFor: TimeRange | undefined;
/**
* Specifies the name of the service operator. When selecting multiple
* services, clients MUST use the operator to select services from
* distinct operators. Operator MUST be formatted as a scheme-less
* URI, e.g. sigstore.dev
*/
operator: string;
}
/**
* ServiceConfiguration specifies how a client should select a set of
* Services to connect to, along with a count when a specific number
* of Services is requested.
*/
export interface ServiceConfiguration {
/**
* How a client should select a set of Services to connect to.
* Clients SHOULD NOT select services from multiple API versions.
*/
selector: ServiceSelector;
/**
* count specifies the number of Services the client should use.
* Only used when selector is set to EXACT, and count MUST be greater
* than 0. count MUST be less than or equal to the number of Services.
* Clients MUST return an error is there are not enough services
* that meet selection criteria.
*/
count: number;
}
/**
* ClientTrustConfig describes the complete state needed by a client
* to perform both signing and verification operations against a particular
* instance of Sigstore.
*/
export interface ClientTrustConfig {
/** MUST be application/vnd.dev.sigstore.clienttrustconfig.v0.1+json */
mediaType: string;
/** The root of trust, which MUST be present. */
trustedRoot: TrustedRoot | undefined;
/** Configuration for signing clients, which MUST be present. */
signingConfig: SigningConfig | undefined;
}
export declare const TransparencyLogInstance: MessageFns<TransparencyLogInstance>;
export declare const CertificateAuthority: MessageFns<CertificateAuthority>;
export declare const TrustedRoot: MessageFns<TrustedRoot>;
export declare const SigningConfig: MessageFns<SigningConfig>;
export declare const Service: MessageFns<Service>;
export declare const ServiceConfiguration: MessageFns<ServiceConfiguration>;
export declare const ClientTrustConfig: MessageFns<ClientTrustConfig>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,284 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: sigstore_trustroot.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.ClientTrustConfig = exports.ServiceConfiguration = exports.Service = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = exports.ServiceSelector = void 0;
exports.serviceSelectorFromJSON = serviceSelectorFromJSON;
exports.serviceSelectorToJSON = serviceSelectorToJSON;
/* eslint-disable */
const sigstore_common_1 = require("./sigstore_common");
/**
* ServiceSelector specifies how a client SHOULD select a set of
* Services to connect to. A client SHOULD throw an error if
* the value is SERVICE_SELECTOR_UNDEFINED.
*/
var ServiceSelector;
(function (ServiceSelector) {
ServiceSelector[ServiceSelector["SERVICE_SELECTOR_UNDEFINED"] = 0] = "SERVICE_SELECTOR_UNDEFINED";
/**
* ALL - Clients SHOULD select all Services based on supported API version
* and validity window.
*/
ServiceSelector[ServiceSelector["ALL"] = 1] = "ALL";
/**
* ANY - Clients SHOULD select one Service based on supported API version
* and validity window. It is up to the client implementation to
* decide how to select the Service, e.g. random or round-robin.
*/
ServiceSelector[ServiceSelector["ANY"] = 2] = "ANY";
/**
* EXACT - Clients SHOULD select a specific number of Services based on
* supported API version and validity window, using the provided
* `count`. It is up to the client implementation to decide how to
* select the Service, e.g. random or round-robin.
*/
ServiceSelector[ServiceSelector["EXACT"] = 3] = "EXACT";
})(ServiceSelector || (exports.ServiceSelector = ServiceSelector = {}));
function serviceSelectorFromJSON(object) {
switch (object) {
case 0:
case "SERVICE_SELECTOR_UNDEFINED":
return ServiceSelector.SERVICE_SELECTOR_UNDEFINED;
case 1:
case "ALL":
return ServiceSelector.ALL;
case 2:
case "ANY":
return ServiceSelector.ANY;
case 3:
case "EXACT":
return ServiceSelector.EXACT;
default:
throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
}
}
function serviceSelectorToJSON(object) {
switch (object) {
case ServiceSelector.SERVICE_SELECTOR_UNDEFINED:
return "SERVICE_SELECTOR_UNDEFINED";
case ServiceSelector.ALL:
return "ALL";
case ServiceSelector.ANY:
return "ANY";
case ServiceSelector.EXACT:
return "EXACT";
default:
throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
}
}
exports.TransparencyLogInstance = {
fromJSON(object) {
return {
baseUrl: isSet(object.baseUrl) ? globalThis.String(object.baseUrl) : "",
hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined,
operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
};
},
toJSON(message) {
const obj = {};
if (message.baseUrl !== "") {
obj.baseUrl = message.baseUrl;
}
if (message.hashAlgorithm !== 0) {
obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm);
}
if (message.publicKey !== undefined) {
obj.publicKey = sigstore_common_1.PublicKey.toJSON(message.publicKey);
}
if (message.logId !== undefined) {
obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
}
if (message.checkpointKeyId !== undefined) {
obj.checkpointKeyId = sigstore_common_1.LogId.toJSON(message.checkpointKeyId);
}
if (message.operator !== "") {
obj.operator = message.operator;
}
return obj;
},
};
exports.CertificateAuthority = {
fromJSON(object) {
return {
subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
uri: isSet(object.uri) ? globalThis.String(object.uri) : "",
certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
};
},
toJSON(message) {
const obj = {};
if (message.subject !== undefined) {
obj.subject = sigstore_common_1.DistinguishedName.toJSON(message.subject);
}
if (message.uri !== "") {
obj.uri = message.uri;
}
if (message.certChain !== undefined) {
obj.certChain = sigstore_common_1.X509CertificateChain.toJSON(message.certChain);
}
if (message.validFor !== undefined) {
obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
}
if (message.operator !== "") {
obj.operator = message.operator;
}
return obj;
},
};
exports.TrustedRoot = {
fromJSON(object) {
return {
mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
tlogs: globalThis.Array.isArray(object?.tlogs)
? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
: [],
certificateAuthorities: globalThis.Array.isArray(object?.certificateAuthorities)
? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
: [],
ctlogs: globalThis.Array.isArray(object?.ctlogs)
? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
: [],
timestampAuthorities: globalThis.Array.isArray(object?.timestampAuthorities)
? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
if (message.mediaType !== "") {
obj.mediaType = message.mediaType;
}
if (message.tlogs?.length) {
obj.tlogs = message.tlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
}
if (message.certificateAuthorities?.length) {
obj.certificateAuthorities = message.certificateAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
}
if (message.ctlogs?.length) {
obj.ctlogs = message.ctlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
}
if (message.timestampAuthorities?.length) {
obj.timestampAuthorities = message.timestampAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
}
return obj;
},
};
exports.SigningConfig = {
fromJSON(object) {
return {
mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
caUrls: globalThis.Array.isArray(object?.caUrls) ? object.caUrls.map((e) => exports.Service.fromJSON(e)) : [],
oidcUrls: globalThis.Array.isArray(object?.oidcUrls) ? object.oidcUrls.map((e) => exports.Service.fromJSON(e)) : [],
rekorTlogUrls: globalThis.Array.isArray(object?.rekorTlogUrls)
? object.rekorTlogUrls.map((e) => exports.Service.fromJSON(e))
: [],
rekorTlogConfig: isSet(object.rekorTlogConfig)
? exports.ServiceConfiguration.fromJSON(object.rekorTlogConfig)
: undefined,
tsaUrls: globalThis.Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => exports.Service.fromJSON(e)) : [],
tsaConfig: isSet(object.tsaConfig) ? exports.ServiceConfiguration.fromJSON(object.tsaConfig) : undefined,
};
},
toJSON(message) {
const obj = {};
if (message.mediaType !== "") {
obj.mediaType = message.mediaType;
}
if (message.caUrls?.length) {
obj.caUrls = message.caUrls.map((e) => exports.Service.toJSON(e));
}
if (message.oidcUrls?.length) {
obj.oidcUrls = message.oidcUrls.map((e) => exports.Service.toJSON(e));
}
if (message.rekorTlogUrls?.length) {
obj.rekorTlogUrls = message.rekorTlogUrls.map((e) => exports.Service.toJSON(e));
}
if (message.rekorTlogConfig !== undefined) {
obj.rekorTlogConfig = exports.ServiceConfiguration.toJSON(message.rekorTlogConfig);
}
if (message.tsaUrls?.length) {
obj.tsaUrls = message.tsaUrls.map((e) => exports.Service.toJSON(e));
}
if (message.tsaConfig !== undefined) {
obj.tsaConfig = exports.ServiceConfiguration.toJSON(message.tsaConfig);
}
return obj;
},
};
exports.Service = {
fromJSON(object) {
return {
url: isSet(object.url) ? globalThis.String(object.url) : "",
majorApiVersion: isSet(object.majorApiVersion) ? globalThis.Number(object.majorApiVersion) : 0,
validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
};
},
toJSON(message) {
const obj = {};
if (message.url !== "") {
obj.url = message.url;
}
if (message.majorApiVersion !== 0) {
obj.majorApiVersion = Math.round(message.majorApiVersion);
}
if (message.validFor !== undefined) {
obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
}
if (message.operator !== "") {
obj.operator = message.operator;
}
return obj;
},
};
exports.ServiceConfiguration = {
fromJSON(object) {
return {
selector: isSet(object.selector) ? serviceSelectorFromJSON(object.selector) : 0,
count: isSet(object.count) ? globalThis.Number(object.count) : 0,
};
},
toJSON(message) {
const obj = {};
if (message.selector !== 0) {
obj.selector = serviceSelectorToJSON(message.selector);
}
if (message.count !== 0) {
obj.count = Math.round(message.count);
}
return obj;
},
};
exports.ClientTrustConfig = {
fromJSON(object) {
return {
mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined,
signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined,
};
},
toJSON(message) {
const obj = {};
if (message.mediaType !== "") {
obj.mediaType = message.mediaType;
}
if (message.trustedRoot !== undefined) {
obj.trustedRoot = exports.TrustedRoot.toJSON(message.trustedRoot);
}
if (message.signingConfig !== undefined) {
obj.signingConfig = exports.SigningConfig.toJSON(message.signingConfig);
}
return obj;
},
};
function isSet(value) {
return value !== null && value !== undefined;
}

View File

@@ -0,0 +1,191 @@
import { Bundle } from "./sigstore_bundle";
import { HashOutput, ObjectIdentifierValuePair, PublicKey, SubjectAlternativeName } from "./sigstore_common";
import { TrustedRoot } from "./sigstore_trustroot";
/** The identity of a X.509 Certificate signer. */
export interface CertificateIdentity {
/** The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1) */
issuer: string;
san: SubjectAlternativeName | undefined;
/**
* An unordered list of OIDs that must be verified.
* All OID/values provided in this list MUST exactly match against
* the values in the certificate for verification to be successful.
*/
oids: ObjectIdentifierValuePair[];
}
export interface CertificateIdentities {
identities: CertificateIdentity[];
}
export interface PublicKeyIdentities {
publicKeys: PublicKey[];
}
/**
* A light-weight set of options/policies for identifying trusted signers,
* used during verification of a single artifact.
*/
export interface ArtifactVerificationOptions {
/**
* At least one identity MUST be provided. Providing zero identities
* is an error. If at least one provided identity is found as a
* signer, the verification is considered successful.
*/
signers?: {
$case: "certificateIdentities";
certificateIdentities: CertificateIdentities;
} | //
/**
* To simplify verification implementation, the logic for
* bundle verification should be implemented as a
* higher-order function, where one of argument should be an
* interface over the set of trusted public keys, like this:
* `Verify(bytes artifact, bytes signature, string key_id)`.
* This way the caller is in full control of mapping the
* identified (or hinted) key in the bundle to one of the
* trusted keys, as this process is inherently application
* specific.
*/
{
$case: "publicKeys";
publicKeys: PublicKeyIdentities;
} | undefined;
/**
* Optional options for artifact transparency log verification.
* If none is provided, the default verification options are:
* Threshold: 1
* Online verification: false
* Disable: false
*/
tlogOptions?: ArtifactVerificationOptions_TlogOptions | undefined;
/**
* Optional options for certificate transparency log verification.
* If none is provided, the default verification options are:
* Threshold: 1
* Disable: false
*/
ctlogOptions?: ArtifactVerificationOptions_CtlogOptions | undefined;
/**
* Optional options for certificate signed timestamp verification.
* If none is provided, the default verification options are:
* Threshold: 0
* Disable: true
*/
tsaOptions?: ArtifactVerificationOptions_TimestampAuthorityOptions | undefined;
/**
* Optional options for integrated timestamp verification.
* If none is provided, the default verification options are:
* Threshold: 0
* Disable: true
*/
integratedTsOptions?: ArtifactVerificationOptions_TlogIntegratedTimestampOptions | undefined;
/**
* Optional options for observed timestamp verification.
* If none is provided, the default verification options are:
* Threshold 1
* Disable: false
*/
observerOptions?: ArtifactVerificationOptions_ObserverTimestampOptions | undefined;
}
export interface ArtifactVerificationOptions_TlogOptions {
/** Number of transparency logs the entry must appear on. */
threshold: number;
/** Perform an online inclusion proof. */
performOnlineVerification: boolean;
/** Disable verification for transparency logs. */
disable: boolean;
}
export interface ArtifactVerificationOptions_CtlogOptions {
/**
* The number of ct transparency logs the certificate must
* appear on.
*/
threshold: number;
/** Disable ct transparency log verification */
disable: boolean;
}
export interface ArtifactVerificationOptions_TimestampAuthorityOptions {
/** The number of signed timestamps that are expected. */
threshold: number;
/** Disable signed timestamp verification. */
disable: boolean;
}
export interface ArtifactVerificationOptions_TlogIntegratedTimestampOptions {
/** The number of integrated timestamps that are expected. */
threshold: number;
/** Disable integrated timestamp verification. */
disable: boolean;
}
export interface ArtifactVerificationOptions_ObserverTimestampOptions {
/**
* The number of external observers of the timestamp.
* This is a union of RFC3161 signed timestamps, and
* integrated timestamps from a transparency log, that
* could include additional timestamp sources in the
* future.
*/
threshold: number;
/** Disable observer timestamp verification. */
disable: boolean;
}
export interface Artifact {
data?: //
/** Location of the artifact */
{
$case: "artifactUri";
artifactUri: string;
} | //
/** The raw bytes of the artifact */
{
$case: "artifact";
artifact: Buffer;
} | //
/**
* Digest of the artifact. SHOULD NOT be used when verifying an
* in-toto attestation as the subject digest cannot be
* reconstructed. This option will not work with Ed25519
* signatures, use Ed25519Ph or another algorithm instead.
*/
{
$case: "artifactDigest";
artifactDigest: HashOutput;
} | undefined;
}
/**
* Input captures all that is needed to call the bundle verification method,
* to verify a single artifact referenced by the bundle.
*/
export interface Input {
/**
* The verification materials provided during a bundle verification.
* The running process is usually preloaded with a "global"
* dev.sisgtore.trustroot.TrustedRoot.v1 instance. Prior to
* verifying an artifact (i.e a bundle), and/or based on current
* policy, some selection is expected to happen, to filter out the
* exact certificate authority to use, which transparency logs are
* relevant etc. The result should b ecaptured in the
* `artifact_trust_root`.
*/
artifactTrustRoot: TrustedRoot | undefined;
artifactVerificationOptions: ArtifactVerificationOptions | undefined;
bundle: Bundle | undefined;
/**
* If the bundle contains a message signature, the artifact must be
* provided.
*/
artifact?: Artifact | undefined;
}
export declare const CertificateIdentity: MessageFns<CertificateIdentity>;
export declare const CertificateIdentities: MessageFns<CertificateIdentities>;
export declare const PublicKeyIdentities: MessageFns<PublicKeyIdentities>;
export declare const ArtifactVerificationOptions: MessageFns<ArtifactVerificationOptions>;
export declare const ArtifactVerificationOptions_TlogOptions: MessageFns<ArtifactVerificationOptions_TlogOptions>;
export declare const ArtifactVerificationOptions_CtlogOptions: MessageFns<ArtifactVerificationOptions_CtlogOptions>;
export declare const ArtifactVerificationOptions_TimestampAuthorityOptions: MessageFns<ArtifactVerificationOptions_TimestampAuthorityOptions>;
export declare const ArtifactVerificationOptions_TlogIntegratedTimestampOptions: MessageFns<ArtifactVerificationOptions_TlogIntegratedTimestampOptions>;
export declare const ArtifactVerificationOptions_ObserverTimestampOptions: MessageFns<ArtifactVerificationOptions_ObserverTimestampOptions>;
export declare const Artifact: MessageFns<Artifact>;
export declare const Input: MessageFns<Input>;
interface MessageFns<T> {
fromJSON(object: any): T;
toJSON(message: T): unknown;
}
export {};

View File

@@ -0,0 +1,281 @@
"use strict";
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.0
// protoc v6.30.2
// source: sigstore_verification.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
/* eslint-disable */
const sigstore_bundle_1 = require("./sigstore_bundle");
const sigstore_common_1 = require("./sigstore_common");
const sigstore_trustroot_1 = require("./sigstore_trustroot");
exports.CertificateIdentity = {
fromJSON(object) {
return {
issuer: isSet(object.issuer) ? globalThis.String(object.issuer) : "",
san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
oids: globalThis.Array.isArray(object?.oids)
? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
if (message.issuer !== "") {
obj.issuer = message.issuer;
}
if (message.san !== undefined) {
obj.san = sigstore_common_1.SubjectAlternativeName.toJSON(message.san);
}
if (message.oids?.length) {
obj.oids = message.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.toJSON(e));
}
return obj;
},
};
exports.CertificateIdentities = {
fromJSON(object) {
return {
identities: globalThis.Array.isArray(object?.identities)
? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
if (message.identities?.length) {
obj.identities = message.identities.map((e) => exports.CertificateIdentity.toJSON(e));
}
return obj;
},
};
exports.PublicKeyIdentities = {
fromJSON(object) {
return {
publicKeys: globalThis.Array.isArray(object?.publicKeys)
? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
if (message.publicKeys?.length) {
obj.publicKeys = message.publicKeys.map((e) => sigstore_common_1.PublicKey.toJSON(e));
}
return obj;
},
};
exports.ArtifactVerificationOptions = {
fromJSON(object) {
return {
signers: isSet(object.certificateIdentities)
? {
$case: "certificateIdentities",
certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
}
: isSet(object.publicKeys)
? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
: undefined,
tlogOptions: isSet(object.tlogOptions)
? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
: undefined,
ctlogOptions: isSet(object.ctlogOptions)
? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
: undefined,
tsaOptions: isSet(object.tsaOptions)
? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
: undefined,
integratedTsOptions: isSet(object.integratedTsOptions)
? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions)
: undefined,
observerOptions: isSet(object.observerOptions)
? exports.ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions)
: undefined,
};
},
toJSON(message) {
const obj = {};
if (message.signers?.$case === "certificateIdentities") {
obj.certificateIdentities = exports.CertificateIdentities.toJSON(message.signers.certificateIdentities);
}
else if (message.signers?.$case === "publicKeys") {
obj.publicKeys = exports.PublicKeyIdentities.toJSON(message.signers.publicKeys);
}
if (message.tlogOptions !== undefined) {
obj.tlogOptions = exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions);
}
if (message.ctlogOptions !== undefined) {
obj.ctlogOptions = exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions);
}
if (message.tsaOptions !== undefined) {
obj.tsaOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions);
}
if (message.integratedTsOptions !== undefined) {
obj.integratedTsOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions);
}
if (message.observerOptions !== undefined) {
obj.observerOptions = exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions);
}
return obj;
},
};
exports.ArtifactVerificationOptions_TlogOptions = {
fromJSON(object) {
return {
threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
performOnlineVerification: isSet(object.performOnlineVerification)
? globalThis.Boolean(object.performOnlineVerification)
: false,
disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
};
},
toJSON(message) {
const obj = {};
if (message.threshold !== 0) {
obj.threshold = Math.round(message.threshold);
}
if (message.performOnlineVerification !== false) {
obj.performOnlineVerification = message.performOnlineVerification;
}
if (message.disable !== false) {
obj.disable = message.disable;
}
return obj;
},
};
exports.ArtifactVerificationOptions_CtlogOptions = {
fromJSON(object) {
return {
threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
};
},
toJSON(message) {
const obj = {};
if (message.threshold !== 0) {
obj.threshold = Math.round(message.threshold);
}
if (message.disable !== false) {
obj.disable = message.disable;
}
return obj;
},
};
exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
fromJSON(object) {
return {
threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
};
},
toJSON(message) {
const obj = {};
if (message.threshold !== 0) {
obj.threshold = Math.round(message.threshold);
}
if (message.disable !== false) {
obj.disable = message.disable;
}
return obj;
},
};
exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = {
fromJSON(object) {
return {
threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
};
},
toJSON(message) {
const obj = {};
if (message.threshold !== 0) {
obj.threshold = Math.round(message.threshold);
}
if (message.disable !== false) {
obj.disable = message.disable;
}
return obj;
},
};
exports.ArtifactVerificationOptions_ObserverTimestampOptions = {
fromJSON(object) {
return {
threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
};
},
toJSON(message) {
const obj = {};
if (message.threshold !== 0) {
obj.threshold = Math.round(message.threshold);
}
if (message.disable !== false) {
obj.disable = message.disable;
}
return obj;
},
};
exports.Artifact = {
fromJSON(object) {
return {
data: isSet(object.artifactUri)
? { $case: "artifactUri", artifactUri: globalThis.String(object.artifactUri) }
: isSet(object.artifact)
? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
: isSet(object.artifactDigest)
? { $case: "artifactDigest", artifactDigest: sigstore_common_1.HashOutput.fromJSON(object.artifactDigest) }
: undefined,
};
},
toJSON(message) {
const obj = {};
if (message.data?.$case === "artifactUri") {
obj.artifactUri = message.data.artifactUri;
}
else if (message.data?.$case === "artifact") {
obj.artifact = base64FromBytes(message.data.artifact);
}
else if (message.data?.$case === "artifactDigest") {
obj.artifactDigest = sigstore_common_1.HashOutput.toJSON(message.data.artifactDigest);
}
return obj;
},
};
exports.Input = {
fromJSON(object) {
return {
artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
artifactVerificationOptions: isSet(object.artifactVerificationOptions)
? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
: undefined,
bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
};
},
toJSON(message) {
const obj = {};
if (message.artifactTrustRoot !== undefined) {
obj.artifactTrustRoot = sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot);
}
if (message.artifactVerificationOptions !== undefined) {
obj.artifactVerificationOptions = exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions);
}
if (message.bundle !== undefined) {
obj.bundle = sigstore_bundle_1.Bundle.toJSON(message.bundle);
}
if (message.artifact !== undefined) {
obj.artifact = exports.Artifact.toJSON(message.artifact);
}
return obj;
},
};
function bytesFromBase64(b64) {
return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
return globalThis.Buffer.from(arr).toString("base64");
}
function isSet(value) {
return value !== null && value !== undefined;
}