Skip to content

Commit

Permalink
Built new protos from af7dd2f
Browse files Browse the repository at this point in the history
  • Loading branch information
github-actions[bot] committed Dec 20, 2024
1 parent af7dd2f commit 6098fb7
Show file tree
Hide file tree
Showing 6 changed files with 278 additions and 36 deletions.
101 changes: 67 additions & 34 deletions app/mlinference/v1/ml_inference.pb.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions gen/js/app/mlinference/v1/ml_inference_grpc_web_pb.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ grpc.web = require('grpc-web');


var app_data_v1_data_pb = require('../../../app/data/v1/data_pb.js')

var service_mlmodel_v1_mlmodel_pb = require('../../../service/mlmodel/v1/mlmodel_pb.js')
const proto = {};
proto.viam = {};
proto.viam.app = {};
Expand Down
13 changes: 13 additions & 0 deletions gen/js/app/mlinference/v1/ml_inference_pb.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import * as jspb from "google-protobuf";
import * as app_data_v1_data_pb from "../../../app/data/v1/data_pb";
import * as service_mlmodel_v1_mlmodel_pb from "../../../service/mlmodel/v1/mlmodel_pb";

export class GetInferenceRequest extends jspb.Message {
getRegistryItemId(): string;
Expand Down Expand Up @@ -39,6 +40,16 @@ export namespace GetInferenceRequest {
}

export class GetInferenceResponse extends jspb.Message {
hasOutputTensors(): boolean;
clearOutputTensors(): void;
getOutputTensors(): service_mlmodel_v1_mlmodel_pb.FlatTensors | undefined;
setOutputTensors(value?: service_mlmodel_v1_mlmodel_pb.FlatTensors): void;

hasAnnotations(): boolean;
clearAnnotations(): void;
getAnnotations(): app_data_v1_data_pb.Annotations | undefined;
setAnnotations(value?: app_data_v1_data_pb.Annotations): void;

serializeBinary(): Uint8Array;
toObject(includeInstance?: boolean): GetInferenceResponse.AsObject;
static toObject(includeInstance: boolean, msg: GetInferenceResponse): GetInferenceResponse.AsObject;
Expand All @@ -51,6 +62,8 @@ export class GetInferenceResponse extends jspb.Message {

export namespace GetInferenceResponse {
export type AsObject = {
outputTensors?: service_mlmodel_v1_mlmodel_pb.FlatTensors.AsObject,
annotations?: app_data_v1_data_pb.Annotations.AsObject,
}
}

105 changes: 104 additions & 1 deletion gen/js/app/mlinference/v1/ml_inference_pb.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ var global = (function() { return this || window || global || self || Function('

var app_data_v1_data_pb = require('../../../app/data/v1/data_pb.js');
goog.object.extend(proto, app_data_v1_data_pb);
var service_mlmodel_v1_mlmodel_pb = require('../../../service/mlmodel/v1/mlmodel_pb.js');
goog.object.extend(proto, service_mlmodel_v1_mlmodel_pb);
goog.exportSymbol('proto.viam.app.mlinference.v1.GetInferenceRequest', null, global);
goog.exportSymbol('proto.viam.app.mlinference.v1.GetInferenceResponse', null, global);
/**
Expand Down Expand Up @@ -334,7 +336,8 @@ proto.viam.app.mlinference.v1.GetInferenceResponse.prototype.toObject = function
*/
proto.viam.app.mlinference.v1.GetInferenceResponse.toObject = function(includeInstance, msg) {
var f, obj = {

outputTensors: (f = msg.getOutputTensors()) && service_mlmodel_v1_mlmodel_pb.FlatTensors.toObject(includeInstance, f),
annotations: (f = msg.getAnnotations()) && app_data_v1_data_pb.Annotations.toObject(includeInstance, f)
};

if (includeInstance) {
Expand Down Expand Up @@ -371,6 +374,16 @@ proto.viam.app.mlinference.v1.GetInferenceResponse.deserializeBinaryFromReader =
}
var field = reader.getFieldNumber();
switch (field) {
case 1:
var value = new service_mlmodel_v1_mlmodel_pb.FlatTensors;
reader.readMessage(value,service_mlmodel_v1_mlmodel_pb.FlatTensors.deserializeBinaryFromReader);
msg.setOutputTensors(value);
break;
case 2:
var value = new app_data_v1_data_pb.Annotations;
reader.readMessage(value,app_data_v1_data_pb.Annotations.deserializeBinaryFromReader);
msg.setAnnotations(value);
break;
default:
reader.skipField();
break;
Expand Down Expand Up @@ -400,6 +413,96 @@ proto.viam.app.mlinference.v1.GetInferenceResponse.prototype.serializeBinary = f
*/
proto.viam.app.mlinference.v1.GetInferenceResponse.serializeBinaryToWriter = function(message, writer) {
var f = undefined;
f = message.getOutputTensors();
if (f != null) {
writer.writeMessage(
1,
f,
service_mlmodel_v1_mlmodel_pb.FlatTensors.serializeBinaryToWriter
);
}
f = message.getAnnotations();
if (f != null) {
writer.writeMessage(
2,
f,
app_data_v1_data_pb.Annotations.serializeBinaryToWriter
);
}
};


/**
* optional viam.service.mlmodel.v1.FlatTensors output_tensors = 1;
* @return {?proto.viam.service.mlmodel.v1.FlatTensors}
*/
proto.viam.app.mlinference.v1.GetInferenceResponse.prototype.getOutputTensors = function() {
return /** @type{?proto.viam.service.mlmodel.v1.FlatTensors} */ (
jspb.Message.getWrapperField(this, service_mlmodel_v1_mlmodel_pb.FlatTensors, 1));
};


/**
* @param {?proto.viam.service.mlmodel.v1.FlatTensors|undefined} value
* @return {!proto.viam.app.mlinference.v1.GetInferenceResponse} returns this
*/
proto.viam.app.mlinference.v1.GetInferenceResponse.prototype.setOutputTensors = function(value) {
return jspb.Message.setWrapperField(this, 1, value);
};


/**
* Clears the message field making it undefined.
* @return {!proto.viam.app.mlinference.v1.GetInferenceResponse} returns this
*/
proto.viam.app.mlinference.v1.GetInferenceResponse.prototype.clearOutputTensors = function() {
return this.setOutputTensors(undefined);
};


/**
* Returns whether this field is set.
* @return {boolean}
*/
proto.viam.app.mlinference.v1.GetInferenceResponse.prototype.hasOutputTensors = function() {
return jspb.Message.getField(this, 1) != null;
};


/**
* optional viam.app.data.v1.Annotations annotations = 2;
* @return {?proto.viam.app.data.v1.Annotations}
*/
proto.viam.app.mlinference.v1.GetInferenceResponse.prototype.getAnnotations = function() {
return /** @type{?proto.viam.app.data.v1.Annotations} */ (
jspb.Message.getWrapperField(this, app_data_v1_data_pb.Annotations, 2));
};


/**
* @param {?proto.viam.app.data.v1.Annotations|undefined} value
* @return {!proto.viam.app.mlinference.v1.GetInferenceResponse} returns this
*/
proto.viam.app.mlinference.v1.GetInferenceResponse.prototype.setAnnotations = function(value) {
return jspb.Message.setWrapperField(this, 2, value);
};


/**
* Clears the message field making it undefined.
* @return {!proto.viam.app.mlinference.v1.GetInferenceResponse} returns this
*/
proto.viam.app.mlinference.v1.GetInferenceResponse.prototype.clearAnnotations = function() {
return this.setAnnotations(undefined);
};


/**
* Returns whether this field is set.
* @return {boolean}
*/
proto.viam.app.mlinference.v1.GetInferenceResponse.prototype.hasAnnotations = function() {
return jspb.Message.getField(this, 2) != null;
};


Expand Down
10 changes: 10 additions & 0 deletions gen/js/google/rpc/error_details_pb.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,14 @@ export namespace BadRequest {
getDescription(): string;
setDescription(value: string): void;

getReason(): string;
setReason(value: string): void;

hasLocalizedMessage(): boolean;
clearLocalizedMessage(): void;
getLocalizedMessage(): LocalizedMessage | undefined;
setLocalizedMessage(value?: LocalizedMessage): void;

serializeBinary(): Uint8Array;
toObject(includeInstance?: boolean): FieldViolation.AsObject;
static toObject(includeInstance: boolean, msg: FieldViolation): FieldViolation.AsObject;
Expand All @@ -217,6 +225,8 @@ export namespace BadRequest {
export type AsObject = {
field: string,
description: string,
reason: string,
localizedMessage?: LocalizedMessage.AsObject,
}
}
}
Expand Down
Loading

0 comments on commit 6098fb7

Please sign in to comment.