mirror of
https://github.com/ceph/ceph-csi.git
synced 2025-06-14 18:53:35 +00:00
vendor update for CSI 0.3.0
This commit is contained in:
1090
vendor/google.golang.org/genproto/googleapis/genomics/v1/annotations.pb.go
generated
vendored
1090
vendor/google.golang.org/genproto/googleapis/genomics/v1/annotations.pb.go
generated
vendored
File diff suppressed because it is too large
Load Diff
54
vendor/google.golang.org/genproto/googleapis/genomics/v1/cigar.pb.go
generated
vendored
54
vendor/google.golang.org/genproto/googleapis/genomics/v1/cigar.pb.go
generated
vendored
@ -1,7 +1,7 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/cigar.proto
|
||||
|
||||
package genomics
|
||||
package genomics // import "google.golang.org/genproto/googleapis/genomics/v1"
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
@ -13,6 +13,12 @@ var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// Describes the different types of CIGAR alignment operations that exist.
|
||||
// Used wherever CIGAR alignments are used.
|
||||
type CigarUnit_Operation int32
|
||||
@ -93,24 +99,48 @@ var CigarUnit_Operation_value = map[string]int32{
|
||||
func (x CigarUnit_Operation) String() string {
|
||||
return proto.EnumName(CigarUnit_Operation_name, int32(x))
|
||||
}
|
||||
func (CigarUnit_Operation) EnumDescriptor() ([]byte, []int) { return fileDescriptor1, []int{0, 0} }
|
||||
func (CigarUnit_Operation) EnumDescriptor() ([]byte, []int) {
|
||||
return fileDescriptor_cigar_ce8c8036b76f9461, []int{0, 0}
|
||||
}
|
||||
|
||||
// A single CIGAR operation.
|
||||
type CigarUnit struct {
|
||||
Operation CigarUnit_Operation `protobuf:"varint,1,opt,name=operation,enum=google.genomics.v1.CigarUnit_Operation" json:"operation,omitempty"`
|
||||
Operation CigarUnit_Operation `protobuf:"varint,1,opt,name=operation,proto3,enum=google.genomics.v1.CigarUnit_Operation" json:"operation,omitempty"`
|
||||
// The number of genomic bases that the operation runs for. Required.
|
||||
OperationLength int64 `protobuf:"varint,2,opt,name=operation_length,json=operationLength" json:"operation_length,omitempty"`
|
||||
OperationLength int64 `protobuf:"varint,2,opt,name=operation_length,json=operationLength,proto3" json:"operation_length,omitempty"`
|
||||
// `referenceSequence` is only used at mismatches
|
||||
// (`SEQUENCE_MISMATCH`) and deletions (`DELETE`).
|
||||
// Filling this field replaces SAM's MD tag. If the relevant information is
|
||||
// not available, this field is unset.
|
||||
ReferenceSequence string `protobuf:"bytes,3,opt,name=reference_sequence,json=referenceSequence" json:"reference_sequence,omitempty"`
|
||||
ReferenceSequence string `protobuf:"bytes,3,opt,name=reference_sequence,json=referenceSequence,proto3" json:"reference_sequence,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *CigarUnit) Reset() { *m = CigarUnit{} }
|
||||
func (m *CigarUnit) String() string { return proto.CompactTextString(m) }
|
||||
func (*CigarUnit) ProtoMessage() {}
|
||||
func (*CigarUnit) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{0} }
|
||||
func (m *CigarUnit) Reset() { *m = CigarUnit{} }
|
||||
func (m *CigarUnit) String() string { return proto.CompactTextString(m) }
|
||||
func (*CigarUnit) ProtoMessage() {}
|
||||
func (*CigarUnit) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_cigar_ce8c8036b76f9461, []int{0}
|
||||
}
|
||||
func (m *CigarUnit) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_CigarUnit.Unmarshal(m, b)
|
||||
}
|
||||
func (m *CigarUnit) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_CigarUnit.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *CigarUnit) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_CigarUnit.Merge(dst, src)
|
||||
}
|
||||
func (m *CigarUnit) XXX_Size() int {
|
||||
return xxx_messageInfo_CigarUnit.Size(m)
|
||||
}
|
||||
func (m *CigarUnit) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_CigarUnit.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_CigarUnit proto.InternalMessageInfo
|
||||
|
||||
func (m *CigarUnit) GetOperation() CigarUnit_Operation {
|
||||
if m != nil {
|
||||
@ -138,9 +168,11 @@ func init() {
|
||||
proto.RegisterEnum("google.genomics.v1.CigarUnit_Operation", CigarUnit_Operation_name, CigarUnit_Operation_value)
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/cigar.proto", fileDescriptor1) }
|
||||
func init() {
|
||||
proto.RegisterFile("google/genomics/v1/cigar.proto", fileDescriptor_cigar_ce8c8036b76f9461)
|
||||
}
|
||||
|
||||
var fileDescriptor1 = []byte{
|
||||
var fileDescriptor_cigar_ce8c8036b76f9461 = []byte{
|
||||
// 367 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x51, 0xcf, 0x0e, 0x93, 0x30,
|
||||
0x1c, 0xb6, 0x63, 0x6e, 0xe3, 0x97, 0xb8, 0x75, 0x35, 0x33, 0xd3, 0x18, 0xb3, 0xec, 0xe2, 0x3c,
|
||||
|
369
vendor/google.golang.org/genproto/googleapis/genomics/v1/datasets.pb.go
generated
vendored
369
vendor/google.golang.org/genproto/googleapis/genomics/v1/datasets.pb.go
generated
vendored
@ -1,17 +1,16 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/datasets.proto
|
||||
|
||||
package genomics
|
||||
package genomics // import "google.golang.org/genproto/googleapis/genomics/v1"
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
import math "math"
|
||||
import empty "github.com/golang/protobuf/ptypes/empty"
|
||||
import timestamp "github.com/golang/protobuf/ptypes/timestamp"
|
||||
import _ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
import google_iam_v11 "google.golang.org/genproto/googleapis/iam/v1"
|
||||
import google_iam_v1 "google.golang.org/genproto/googleapis/iam/v1"
|
||||
import google_protobuf1 "github.com/golang/protobuf/ptypes/empty"
|
||||
import google_protobuf2 "google.golang.org/genproto/protobuf/field_mask"
|
||||
import google_protobuf6 "github.com/golang/protobuf/ptypes/timestamp"
|
||||
import v1 "google.golang.org/genproto/googleapis/iam/v1"
|
||||
import field_mask "google.golang.org/genproto/protobuf/field_mask"
|
||||
|
||||
import (
|
||||
context "golang.org/x/net/context"
|
||||
@ -23,25 +22,53 @@ var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// A Dataset is a collection of genomic data.
|
||||
//
|
||||
// For more genomics resource definitions, see [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
type Dataset struct {
|
||||
// The server-generated dataset ID, unique across all datasets.
|
||||
Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"`
|
||||
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
|
||||
// The Google Cloud project ID that this dataset belongs to.
|
||||
ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId" json:"project_id,omitempty"`
|
||||
ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||||
// The dataset name.
|
||||
Name string `protobuf:"bytes,3,opt,name=name" json:"name,omitempty"`
|
||||
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
|
||||
// The time this dataset was created, in seconds from the epoch.
|
||||
CreateTime *google_protobuf6.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime" json:"create_time,omitempty"`
|
||||
CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Dataset) Reset() { *m = Dataset{} }
|
||||
func (m *Dataset) String() string { return proto.CompactTextString(m) }
|
||||
func (*Dataset) ProtoMessage() {}
|
||||
func (*Dataset) Descriptor() ([]byte, []int) { return fileDescriptor2, []int{0} }
|
||||
func (m *Dataset) Reset() { *m = Dataset{} }
|
||||
func (m *Dataset) String() string { return proto.CompactTextString(m) }
|
||||
func (*Dataset) ProtoMessage() {}
|
||||
func (*Dataset) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_datasets_6135563a56d7e4f8, []int{0}
|
||||
}
|
||||
func (m *Dataset) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Dataset.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Dataset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Dataset.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *Dataset) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Dataset.Merge(dst, src)
|
||||
}
|
||||
func (m *Dataset) XXX_Size() int {
|
||||
return xxx_messageInfo_Dataset.Size(m)
|
||||
}
|
||||
func (m *Dataset) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Dataset.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Dataset proto.InternalMessageInfo
|
||||
|
||||
func (m *Dataset) GetId() string {
|
||||
if m != nil {
|
||||
@ -64,7 +91,7 @@ func (m *Dataset) GetName() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Dataset) GetCreateTime() *google_protobuf6.Timestamp {
|
||||
func (m *Dataset) GetCreateTime() *timestamp.Timestamp {
|
||||
if m != nil {
|
||||
return m.CreateTime
|
||||
}
|
||||
@ -74,20 +101,42 @@ func (m *Dataset) GetCreateTime() *google_protobuf6.Timestamp {
|
||||
// The dataset list request.
|
||||
type ListDatasetsRequest struct {
|
||||
// Required. The Google Cloud project ID to list datasets for.
|
||||
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId" json:"project_id,omitempty"`
|
||||
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||||
// The maximum number of results to return in a single page. If unspecified,
|
||||
// defaults to 50. The maximum value is 1024.
|
||||
PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize" json:"page_size,omitempty"`
|
||||
PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
|
||||
// The continuation token, which is used to page through large result sets.
|
||||
// To get the next page of results, set this parameter to the value of
|
||||
// `nextPageToken` from the previous response.
|
||||
PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken" json:"page_token,omitempty"`
|
||||
PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ListDatasetsRequest) Reset() { *m = ListDatasetsRequest{} }
|
||||
func (m *ListDatasetsRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*ListDatasetsRequest) ProtoMessage() {}
|
||||
func (*ListDatasetsRequest) Descriptor() ([]byte, []int) { return fileDescriptor2, []int{1} }
|
||||
func (m *ListDatasetsRequest) Reset() { *m = ListDatasetsRequest{} }
|
||||
func (m *ListDatasetsRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*ListDatasetsRequest) ProtoMessage() {}
|
||||
func (*ListDatasetsRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_datasets_6135563a56d7e4f8, []int{1}
|
||||
}
|
||||
func (m *ListDatasetsRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ListDatasetsRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ListDatasetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ListDatasetsRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *ListDatasetsRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ListDatasetsRequest.Merge(dst, src)
|
||||
}
|
||||
func (m *ListDatasetsRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_ListDatasetsRequest.Size(m)
|
||||
}
|
||||
func (m *ListDatasetsRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ListDatasetsRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ListDatasetsRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *ListDatasetsRequest) GetProjectId() string {
|
||||
if m != nil {
|
||||
@ -113,17 +162,39 @@ func (m *ListDatasetsRequest) GetPageToken() string {
|
||||
// The dataset list response.
|
||||
type ListDatasetsResponse struct {
|
||||
// The list of matching Datasets.
|
||||
Datasets []*Dataset `protobuf:"bytes,1,rep,name=datasets" json:"datasets,omitempty"`
|
||||
Datasets []*Dataset `protobuf:"bytes,1,rep,name=datasets,proto3" json:"datasets,omitempty"`
|
||||
// The continuation token, which is used to page through large result sets.
|
||||
// Provide this value in a subsequent request to return the next page of
|
||||
// results. This field will be empty if there aren't any additional results.
|
||||
NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"`
|
||||
NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ListDatasetsResponse) Reset() { *m = ListDatasetsResponse{} }
|
||||
func (m *ListDatasetsResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*ListDatasetsResponse) ProtoMessage() {}
|
||||
func (*ListDatasetsResponse) Descriptor() ([]byte, []int) { return fileDescriptor2, []int{2} }
|
||||
func (m *ListDatasetsResponse) Reset() { *m = ListDatasetsResponse{} }
|
||||
func (m *ListDatasetsResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*ListDatasetsResponse) ProtoMessage() {}
|
||||
func (*ListDatasetsResponse) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_datasets_6135563a56d7e4f8, []int{2}
|
||||
}
|
||||
func (m *ListDatasetsResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ListDatasetsResponse.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ListDatasetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ListDatasetsResponse.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *ListDatasetsResponse) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ListDatasetsResponse.Merge(dst, src)
|
||||
}
|
||||
func (m *ListDatasetsResponse) XXX_Size() int {
|
||||
return xxx_messageInfo_ListDatasetsResponse.Size(m)
|
||||
}
|
||||
func (m *ListDatasetsResponse) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ListDatasetsResponse.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ListDatasetsResponse proto.InternalMessageInfo
|
||||
|
||||
func (m *ListDatasetsResponse) GetDatasets() []*Dataset {
|
||||
if m != nil {
|
||||
@ -141,13 +212,35 @@ func (m *ListDatasetsResponse) GetNextPageToken() string {
|
||||
|
||||
type CreateDatasetRequest struct {
|
||||
// The dataset to be created. Must contain projectId and name.
|
||||
Dataset *Dataset `protobuf:"bytes,1,opt,name=dataset" json:"dataset,omitempty"`
|
||||
Dataset *Dataset `protobuf:"bytes,1,opt,name=dataset,proto3" json:"dataset,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *CreateDatasetRequest) Reset() { *m = CreateDatasetRequest{} }
|
||||
func (m *CreateDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*CreateDatasetRequest) ProtoMessage() {}
|
||||
func (*CreateDatasetRequest) Descriptor() ([]byte, []int) { return fileDescriptor2, []int{3} }
|
||||
func (m *CreateDatasetRequest) Reset() { *m = CreateDatasetRequest{} }
|
||||
func (m *CreateDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*CreateDatasetRequest) ProtoMessage() {}
|
||||
func (*CreateDatasetRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_datasets_6135563a56d7e4f8, []int{3}
|
||||
}
|
||||
func (m *CreateDatasetRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_CreateDatasetRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *CreateDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_CreateDatasetRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *CreateDatasetRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_CreateDatasetRequest.Merge(dst, src)
|
||||
}
|
||||
func (m *CreateDatasetRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_CreateDatasetRequest.Size(m)
|
||||
}
|
||||
func (m *CreateDatasetRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_CreateDatasetRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_CreateDatasetRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *CreateDatasetRequest) GetDataset() *Dataset {
|
||||
if m != nil {
|
||||
@ -158,20 +251,42 @@ func (m *CreateDatasetRequest) GetDataset() *Dataset {
|
||||
|
||||
type UpdateDatasetRequest struct {
|
||||
// The ID of the dataset to be updated.
|
||||
DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId" json:"dataset_id,omitempty"`
|
||||
DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
|
||||
// The new dataset data.
|
||||
Dataset *Dataset `protobuf:"bytes,2,opt,name=dataset" json:"dataset,omitempty"`
|
||||
Dataset *Dataset `protobuf:"bytes,2,opt,name=dataset,proto3" json:"dataset,omitempty"`
|
||||
// An optional mask specifying which fields to update. At this time, the only
|
||||
// mutable field is [name][google.genomics.v1.Dataset.name]. The only
|
||||
// acceptable value is "name". If unspecified, all mutable fields will be
|
||||
// updated.
|
||||
UpdateMask *google_protobuf2.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask" json:"update_mask,omitempty"`
|
||||
UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *UpdateDatasetRequest) Reset() { *m = UpdateDatasetRequest{} }
|
||||
func (m *UpdateDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*UpdateDatasetRequest) ProtoMessage() {}
|
||||
func (*UpdateDatasetRequest) Descriptor() ([]byte, []int) { return fileDescriptor2, []int{4} }
|
||||
func (m *UpdateDatasetRequest) Reset() { *m = UpdateDatasetRequest{} }
|
||||
func (m *UpdateDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*UpdateDatasetRequest) ProtoMessage() {}
|
||||
func (*UpdateDatasetRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_datasets_6135563a56d7e4f8, []int{4}
|
||||
}
|
||||
func (m *UpdateDatasetRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_UpdateDatasetRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *UpdateDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_UpdateDatasetRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *UpdateDatasetRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_UpdateDatasetRequest.Merge(dst, src)
|
||||
}
|
||||
func (m *UpdateDatasetRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_UpdateDatasetRequest.Size(m)
|
||||
}
|
||||
func (m *UpdateDatasetRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_UpdateDatasetRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_UpdateDatasetRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *UpdateDatasetRequest) GetDatasetId() string {
|
||||
if m != nil {
|
||||
@ -187,7 +302,7 @@ func (m *UpdateDatasetRequest) GetDataset() *Dataset {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *UpdateDatasetRequest) GetUpdateMask() *google_protobuf2.FieldMask {
|
||||
func (m *UpdateDatasetRequest) GetUpdateMask() *field_mask.FieldMask {
|
||||
if m != nil {
|
||||
return m.UpdateMask
|
||||
}
|
||||
@ -196,13 +311,35 @@ func (m *UpdateDatasetRequest) GetUpdateMask() *google_protobuf2.FieldMask {
|
||||
|
||||
type DeleteDatasetRequest struct {
|
||||
// The ID of the dataset to be deleted.
|
||||
DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId" json:"dataset_id,omitempty"`
|
||||
DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *DeleteDatasetRequest) Reset() { *m = DeleteDatasetRequest{} }
|
||||
func (m *DeleteDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*DeleteDatasetRequest) ProtoMessage() {}
|
||||
func (*DeleteDatasetRequest) Descriptor() ([]byte, []int) { return fileDescriptor2, []int{5} }
|
||||
func (m *DeleteDatasetRequest) Reset() { *m = DeleteDatasetRequest{} }
|
||||
func (m *DeleteDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*DeleteDatasetRequest) ProtoMessage() {}
|
||||
func (*DeleteDatasetRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_datasets_6135563a56d7e4f8, []int{5}
|
||||
}
|
||||
func (m *DeleteDatasetRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_DeleteDatasetRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *DeleteDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_DeleteDatasetRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *DeleteDatasetRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_DeleteDatasetRequest.Merge(dst, src)
|
||||
}
|
||||
func (m *DeleteDatasetRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_DeleteDatasetRequest.Size(m)
|
||||
}
|
||||
func (m *DeleteDatasetRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_DeleteDatasetRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_DeleteDatasetRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *DeleteDatasetRequest) GetDatasetId() string {
|
||||
if m != nil {
|
||||
@ -213,13 +350,35 @@ func (m *DeleteDatasetRequest) GetDatasetId() string {
|
||||
|
||||
type UndeleteDatasetRequest struct {
|
||||
// The ID of the dataset to be undeleted.
|
||||
DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId" json:"dataset_id,omitempty"`
|
||||
DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *UndeleteDatasetRequest) Reset() { *m = UndeleteDatasetRequest{} }
|
||||
func (m *UndeleteDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*UndeleteDatasetRequest) ProtoMessage() {}
|
||||
func (*UndeleteDatasetRequest) Descriptor() ([]byte, []int) { return fileDescriptor2, []int{6} }
|
||||
func (m *UndeleteDatasetRequest) Reset() { *m = UndeleteDatasetRequest{} }
|
||||
func (m *UndeleteDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*UndeleteDatasetRequest) ProtoMessage() {}
|
||||
func (*UndeleteDatasetRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_datasets_6135563a56d7e4f8, []int{6}
|
||||
}
|
||||
func (m *UndeleteDatasetRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_UndeleteDatasetRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *UndeleteDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_UndeleteDatasetRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *UndeleteDatasetRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_UndeleteDatasetRequest.Merge(dst, src)
|
||||
}
|
||||
func (m *UndeleteDatasetRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_UndeleteDatasetRequest.Size(m)
|
||||
}
|
||||
func (m *UndeleteDatasetRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_UndeleteDatasetRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_UndeleteDatasetRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *UndeleteDatasetRequest) GetDatasetId() string {
|
||||
if m != nil {
|
||||
@ -230,13 +389,35 @@ func (m *UndeleteDatasetRequest) GetDatasetId() string {
|
||||
|
||||
type GetDatasetRequest struct {
|
||||
// The ID of the dataset.
|
||||
DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId" json:"dataset_id,omitempty"`
|
||||
DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *GetDatasetRequest) Reset() { *m = GetDatasetRequest{} }
|
||||
func (m *GetDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*GetDatasetRequest) ProtoMessage() {}
|
||||
func (*GetDatasetRequest) Descriptor() ([]byte, []int) { return fileDescriptor2, []int{7} }
|
||||
func (m *GetDatasetRequest) Reset() { *m = GetDatasetRequest{} }
|
||||
func (m *GetDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*GetDatasetRequest) ProtoMessage() {}
|
||||
func (*GetDatasetRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_datasets_6135563a56d7e4f8, []int{7}
|
||||
}
|
||||
func (m *GetDatasetRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_GetDatasetRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *GetDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_GetDatasetRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *GetDatasetRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_GetDatasetRequest.Merge(dst, src)
|
||||
}
|
||||
func (m *GetDatasetRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_GetDatasetRequest.Size(m)
|
||||
}
|
||||
func (m *GetDatasetRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_GetDatasetRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_GetDatasetRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *GetDatasetRequest) GetDatasetId() string {
|
||||
if m != nil {
|
||||
@ -264,8 +445,9 @@ var _ grpc.ClientConn
|
||||
// is compatible with the grpc package it is being compiled against.
|
||||
const _ = grpc.SupportPackageIsVersion4
|
||||
|
||||
// Client API for DatasetServiceV1 service
|
||||
|
||||
// DatasetServiceV1Client is the client API for DatasetServiceV1 service.
|
||||
//
|
||||
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
|
||||
type DatasetServiceV1Client interface {
|
||||
// Lists datasets within a project.
|
||||
//
|
||||
@ -303,7 +485,7 @@ type DatasetServiceV1Client interface {
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
DeleteDataset(ctx context.Context, in *DeleteDatasetRequest, opts ...grpc.CallOption) (*google_protobuf1.Empty, error)
|
||||
DeleteDataset(ctx context.Context, in *DeleteDatasetRequest, opts ...grpc.CallOption) (*empty.Empty, error)
|
||||
// Undeletes a dataset by restoring a dataset which was deleted via this API.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
@ -321,7 +503,7 @@ type DatasetServiceV1Client interface {
|
||||
//
|
||||
// See <a href="/iam/docs/managing-policies#setting_a_policy">Setting a
|
||||
// Policy</a> for more information.
|
||||
SetIamPolicy(ctx context.Context, in *google_iam_v11.SetIamPolicyRequest, opts ...grpc.CallOption) (*google_iam_v1.Policy, error)
|
||||
SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error)
|
||||
// Gets the access control policy for the dataset. This is empty if the
|
||||
// policy or resource does not exist.
|
||||
//
|
||||
@ -331,7 +513,7 @@ type DatasetServiceV1Client interface {
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
GetIamPolicy(ctx context.Context, in *google_iam_v11.GetIamPolicyRequest, opts ...grpc.CallOption) (*google_iam_v1.Policy, error)
|
||||
GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error)
|
||||
// Returns permissions that a caller has on the specified resource.
|
||||
// See <a href="/iam/docs/managing-policies#testing_permissions">Testing
|
||||
// Permissions</a> for more information.
|
||||
@ -339,7 +521,7 @@ type DatasetServiceV1Client interface {
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
TestIamPermissions(ctx context.Context, in *google_iam_v11.TestIamPermissionsRequest, opts ...grpc.CallOption) (*google_iam_v11.TestIamPermissionsResponse, error)
|
||||
TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error)
|
||||
}
|
||||
|
||||
type datasetServiceV1Client struct {
|
||||
@ -352,7 +534,7 @@ func NewDatasetServiceV1Client(cc *grpc.ClientConn) DatasetServiceV1Client {
|
||||
|
||||
func (c *datasetServiceV1Client) ListDatasets(ctx context.Context, in *ListDatasetsRequest, opts ...grpc.CallOption) (*ListDatasetsResponse, error) {
|
||||
out := new(ListDatasetsResponse)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/ListDatasets", in, out, c.cc, opts...)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/ListDatasets", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -361,7 +543,7 @@ func (c *datasetServiceV1Client) ListDatasets(ctx context.Context, in *ListDatas
|
||||
|
||||
func (c *datasetServiceV1Client) CreateDataset(ctx context.Context, in *CreateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) {
|
||||
out := new(Dataset)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/CreateDataset", in, out, c.cc, opts...)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/CreateDataset", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -370,7 +552,7 @@ func (c *datasetServiceV1Client) CreateDataset(ctx context.Context, in *CreateDa
|
||||
|
||||
func (c *datasetServiceV1Client) GetDataset(ctx context.Context, in *GetDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) {
|
||||
out := new(Dataset)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/GetDataset", in, out, c.cc, opts...)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/GetDataset", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -379,16 +561,16 @@ func (c *datasetServiceV1Client) GetDataset(ctx context.Context, in *GetDatasetR
|
||||
|
||||
func (c *datasetServiceV1Client) UpdateDataset(ctx context.Context, in *UpdateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) {
|
||||
out := new(Dataset)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/UpdateDataset", in, out, c.cc, opts...)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/UpdateDataset", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) DeleteDataset(ctx context.Context, in *DeleteDatasetRequest, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) {
|
||||
out := new(google_protobuf1.Empty)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/DeleteDataset", in, out, c.cc, opts...)
|
||||
func (c *datasetServiceV1Client) DeleteDataset(ctx context.Context, in *DeleteDatasetRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
|
||||
out := new(empty.Empty)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/DeleteDataset", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -397,42 +579,41 @@ func (c *datasetServiceV1Client) DeleteDataset(ctx context.Context, in *DeleteDa
|
||||
|
||||
func (c *datasetServiceV1Client) UndeleteDataset(ctx context.Context, in *UndeleteDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) {
|
||||
out := new(Dataset)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/UndeleteDataset", in, out, c.cc, opts...)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/UndeleteDataset", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) SetIamPolicy(ctx context.Context, in *google_iam_v11.SetIamPolicyRequest, opts ...grpc.CallOption) (*google_iam_v1.Policy, error) {
|
||||
out := new(google_iam_v1.Policy)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/SetIamPolicy", in, out, c.cc, opts...)
|
||||
func (c *datasetServiceV1Client) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) {
|
||||
out := new(v1.Policy)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/SetIamPolicy", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) GetIamPolicy(ctx context.Context, in *google_iam_v11.GetIamPolicyRequest, opts ...grpc.CallOption) (*google_iam_v1.Policy, error) {
|
||||
out := new(google_iam_v1.Policy)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/GetIamPolicy", in, out, c.cc, opts...)
|
||||
func (c *datasetServiceV1Client) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) {
|
||||
out := new(v1.Policy)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/GetIamPolicy", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) TestIamPermissions(ctx context.Context, in *google_iam_v11.TestIamPermissionsRequest, opts ...grpc.CallOption) (*google_iam_v11.TestIamPermissionsResponse, error) {
|
||||
out := new(google_iam_v11.TestIamPermissionsResponse)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/TestIamPermissions", in, out, c.cc, opts...)
|
||||
func (c *datasetServiceV1Client) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) {
|
||||
out := new(v1.TestIamPermissionsResponse)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/TestIamPermissions", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// Server API for DatasetServiceV1 service
|
||||
|
||||
// DatasetServiceV1Server is the server API for DatasetServiceV1 service.
|
||||
type DatasetServiceV1Server interface {
|
||||
// Lists datasets within a project.
|
||||
//
|
||||
@ -470,7 +651,7 @@ type DatasetServiceV1Server interface {
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
DeleteDataset(context.Context, *DeleteDatasetRequest) (*google_protobuf1.Empty, error)
|
||||
DeleteDataset(context.Context, *DeleteDatasetRequest) (*empty.Empty, error)
|
||||
// Undeletes a dataset by restoring a dataset which was deleted via this API.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
@ -488,7 +669,7 @@ type DatasetServiceV1Server interface {
|
||||
//
|
||||
// See <a href="/iam/docs/managing-policies#setting_a_policy">Setting a
|
||||
// Policy</a> for more information.
|
||||
SetIamPolicy(context.Context, *google_iam_v11.SetIamPolicyRequest) (*google_iam_v1.Policy, error)
|
||||
SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error)
|
||||
// Gets the access control policy for the dataset. This is empty if the
|
||||
// policy or resource does not exist.
|
||||
//
|
||||
@ -498,7 +679,7 @@ type DatasetServiceV1Server interface {
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
GetIamPolicy(context.Context, *google_iam_v11.GetIamPolicyRequest) (*google_iam_v1.Policy, error)
|
||||
GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error)
|
||||
// Returns permissions that a caller has on the specified resource.
|
||||
// See <a href="/iam/docs/managing-policies#testing_permissions">Testing
|
||||
// Permissions</a> for more information.
|
||||
@ -506,7 +687,7 @@ type DatasetServiceV1Server interface {
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
TestIamPermissions(context.Context, *google_iam_v11.TestIamPermissionsRequest) (*google_iam_v11.TestIamPermissionsResponse, error)
|
||||
TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error)
|
||||
}
|
||||
|
||||
func RegisterDatasetServiceV1Server(s *grpc.Server, srv DatasetServiceV1Server) {
|
||||
@ -622,7 +803,7 @@ func _DatasetServiceV1_UndeleteDataset_Handler(srv interface{}, ctx context.Cont
|
||||
}
|
||||
|
||||
func _DatasetServiceV1_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(google_iam_v11.SetIamPolicyRequest)
|
||||
in := new(v1.SetIamPolicyRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -634,13 +815,13 @@ func _DatasetServiceV1_SetIamPolicy_Handler(srv interface{}, ctx context.Context
|
||||
FullMethod: "/google.genomics.v1.DatasetServiceV1/SetIamPolicy",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(DatasetServiceV1Server).SetIamPolicy(ctx, req.(*google_iam_v11.SetIamPolicyRequest))
|
||||
return srv.(DatasetServiceV1Server).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _DatasetServiceV1_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(google_iam_v11.GetIamPolicyRequest)
|
||||
in := new(v1.GetIamPolicyRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -652,13 +833,13 @@ func _DatasetServiceV1_GetIamPolicy_Handler(srv interface{}, ctx context.Context
|
||||
FullMethod: "/google.genomics.v1.DatasetServiceV1/GetIamPolicy",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(DatasetServiceV1Server).GetIamPolicy(ctx, req.(*google_iam_v11.GetIamPolicyRequest))
|
||||
return srv.(DatasetServiceV1Server).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _DatasetServiceV1_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(google_iam_v11.TestIamPermissionsRequest)
|
||||
in := new(v1.TestIamPermissionsRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -670,7 +851,7 @@ func _DatasetServiceV1_TestIamPermissions_Handler(srv interface{}, ctx context.C
|
||||
FullMethod: "/google.genomics.v1.DatasetServiceV1/TestIamPermissions",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(DatasetServiceV1Server).TestIamPermissions(ctx, req.(*google_iam_v11.TestIamPermissionsRequest))
|
||||
return srv.(DatasetServiceV1Server).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
@ -720,9 +901,11 @@ var _DatasetServiceV1_serviceDesc = grpc.ServiceDesc{
|
||||
Metadata: "google/genomics/v1/datasets.proto",
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/datasets.proto", fileDescriptor2) }
|
||||
func init() {
|
||||
proto.RegisterFile("google/genomics/v1/datasets.proto", fileDescriptor_datasets_6135563a56d7e4f8)
|
||||
}
|
||||
|
||||
var fileDescriptor2 = []byte{
|
||||
var fileDescriptor_datasets_6135563a56d7e4f8 = []byte{
|
||||
// 786 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xd1, 0x4e, 0x13, 0x4d,
|
||||
0x14, 0xce, 0x16, 0xfe, 0x1f, 0x7a, 0xa0, 0xa0, 0x63, 0xc5, 0xda, 0x8a, 0x96, 0x8d, 0x42, 0xad,
|
||||
|
117
vendor/google.golang.org/genproto/googleapis/genomics/v1/operations.pb.go
generated
vendored
117
vendor/google.golang.org/genproto/googleapis/genomics/v1/operations.pb.go
generated
vendored
@ -1,52 +1,80 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/operations.proto
|
||||
|
||||
package genomics
|
||||
package genomics // import "google.golang.org/genproto/googleapis/genomics/v1"
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
import math "math"
|
||||
import any "github.com/golang/protobuf/ptypes/any"
|
||||
import timestamp "github.com/golang/protobuf/ptypes/timestamp"
|
||||
import _ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
import google_protobuf5 "github.com/golang/protobuf/ptypes/any"
|
||||
import google_protobuf6 "github.com/golang/protobuf/ptypes/timestamp"
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// Metadata describing an [Operation][google.longrunning.Operation].
|
||||
type OperationMetadata struct {
|
||||
// The Google Cloud Project in which the job is scoped.
|
||||
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId" json:"project_id,omitempty"`
|
||||
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||||
// The time at which the job was submitted to the Genomics service.
|
||||
CreateTime *google_protobuf6.Timestamp `protobuf:"bytes,2,opt,name=create_time,json=createTime" json:"create_time,omitempty"`
|
||||
CreateTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"`
|
||||
// The time at which the job began to run.
|
||||
StartTime *google_protobuf6.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime" json:"start_time,omitempty"`
|
||||
StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"`
|
||||
// The time at which the job stopped running.
|
||||
EndTime *google_protobuf6.Timestamp `protobuf:"bytes,4,opt,name=end_time,json=endTime" json:"end_time,omitempty"`
|
||||
EndTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"`
|
||||
// The original request that started the operation. Note that this will be in
|
||||
// current version of the API. If the operation was started with v1beta2 API
|
||||
// and a GetOperation is performed on v1 API, a v1 request will be returned.
|
||||
Request *google_protobuf5.Any `protobuf:"bytes,5,opt,name=request" json:"request,omitempty"`
|
||||
Request *any.Any `protobuf:"bytes,5,opt,name=request,proto3" json:"request,omitempty"`
|
||||
// Optional event messages that were generated during the job's execution.
|
||||
// This also contains any warnings that were generated during import
|
||||
// or export.
|
||||
Events []*OperationEvent `protobuf:"bytes,6,rep,name=events" json:"events,omitempty"`
|
||||
Events []*OperationEvent `protobuf:"bytes,6,rep,name=events,proto3" json:"events,omitempty"`
|
||||
// This field is deprecated. Use `labels` instead. Optionally provided by the
|
||||
// caller when submitting the request that creates the operation.
|
||||
ClientId string `protobuf:"bytes,7,opt,name=client_id,json=clientId" json:"client_id,omitempty"`
|
||||
ClientId string `protobuf:"bytes,7,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"`
|
||||
// Runtime metadata on this Operation.
|
||||
RuntimeMetadata *google_protobuf5.Any `protobuf:"bytes,8,opt,name=runtime_metadata,json=runtimeMetadata" json:"runtime_metadata,omitempty"`
|
||||
RuntimeMetadata *any.Any `protobuf:"bytes,8,opt,name=runtime_metadata,json=runtimeMetadata,proto3" json:"runtime_metadata,omitempty"`
|
||||
// Optionally provided by the caller when submitting the request that creates
|
||||
// the operation.
|
||||
Labels map[string]string `protobuf:"bytes,9,rep,name=labels" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
Labels map[string]string `protobuf:"bytes,9,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) Reset() { *m = OperationMetadata{} }
|
||||
func (m *OperationMetadata) String() string { return proto.CompactTextString(m) }
|
||||
func (*OperationMetadata) ProtoMessage() {}
|
||||
func (*OperationMetadata) Descriptor() ([]byte, []int) { return fileDescriptor3, []int{0} }
|
||||
func (m *OperationMetadata) Reset() { *m = OperationMetadata{} }
|
||||
func (m *OperationMetadata) String() string { return proto.CompactTextString(m) }
|
||||
func (*OperationMetadata) ProtoMessage() {}
|
||||
func (*OperationMetadata) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_operations_4f155d6eb213ff75, []int{0}
|
||||
}
|
||||
func (m *OperationMetadata) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_OperationMetadata.Unmarshal(m, b)
|
||||
}
|
||||
func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *OperationMetadata) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_OperationMetadata.Merge(dst, src)
|
||||
}
|
||||
func (m *OperationMetadata) XXX_Size() int {
|
||||
return xxx_messageInfo_OperationMetadata.Size(m)
|
||||
}
|
||||
func (m *OperationMetadata) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_OperationMetadata.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo
|
||||
|
||||
func (m *OperationMetadata) GetProjectId() string {
|
||||
if m != nil {
|
||||
@ -55,28 +83,28 @@ func (m *OperationMetadata) GetProjectId() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetCreateTime() *google_protobuf6.Timestamp {
|
||||
func (m *OperationMetadata) GetCreateTime() *timestamp.Timestamp {
|
||||
if m != nil {
|
||||
return m.CreateTime
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetStartTime() *google_protobuf6.Timestamp {
|
||||
func (m *OperationMetadata) GetStartTime() *timestamp.Timestamp {
|
||||
if m != nil {
|
||||
return m.StartTime
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetEndTime() *google_protobuf6.Timestamp {
|
||||
func (m *OperationMetadata) GetEndTime() *timestamp.Timestamp {
|
||||
if m != nil {
|
||||
return m.EndTime
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetRequest() *google_protobuf5.Any {
|
||||
func (m *OperationMetadata) GetRequest() *any.Any {
|
||||
if m != nil {
|
||||
return m.Request
|
||||
}
|
||||
@ -97,7 +125,7 @@ func (m *OperationMetadata) GetClientId() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetRuntimeMetadata() *google_protobuf5.Any {
|
||||
func (m *OperationMetadata) GetRuntimeMetadata() *any.Any {
|
||||
if m != nil {
|
||||
return m.RuntimeMetadata
|
||||
}
|
||||
@ -114,27 +142,49 @@ func (m *OperationMetadata) GetLabels() map[string]string {
|
||||
// An event that occurred during an [Operation][google.longrunning.Operation].
|
||||
type OperationEvent struct {
|
||||
// Optional time of when event started.
|
||||
StartTime *google_protobuf6.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime" json:"start_time,omitempty"`
|
||||
StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"`
|
||||
// Optional time of when event finished. An event can have a start time and no
|
||||
// finish time. If an event has a finish time, there must be a start time.
|
||||
EndTime *google_protobuf6.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime" json:"end_time,omitempty"`
|
||||
EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"`
|
||||
// Required description of event.
|
||||
Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"`
|
||||
Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *OperationEvent) Reset() { *m = OperationEvent{} }
|
||||
func (m *OperationEvent) String() string { return proto.CompactTextString(m) }
|
||||
func (*OperationEvent) ProtoMessage() {}
|
||||
func (*OperationEvent) Descriptor() ([]byte, []int) { return fileDescriptor3, []int{1} }
|
||||
func (m *OperationEvent) Reset() { *m = OperationEvent{} }
|
||||
func (m *OperationEvent) String() string { return proto.CompactTextString(m) }
|
||||
func (*OperationEvent) ProtoMessage() {}
|
||||
func (*OperationEvent) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_operations_4f155d6eb213ff75, []int{1}
|
||||
}
|
||||
func (m *OperationEvent) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_OperationEvent.Unmarshal(m, b)
|
||||
}
|
||||
func (m *OperationEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_OperationEvent.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *OperationEvent) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_OperationEvent.Merge(dst, src)
|
||||
}
|
||||
func (m *OperationEvent) XXX_Size() int {
|
||||
return xxx_messageInfo_OperationEvent.Size(m)
|
||||
}
|
||||
func (m *OperationEvent) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_OperationEvent.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
func (m *OperationEvent) GetStartTime() *google_protobuf6.Timestamp {
|
||||
var xxx_messageInfo_OperationEvent proto.InternalMessageInfo
|
||||
|
||||
func (m *OperationEvent) GetStartTime() *timestamp.Timestamp {
|
||||
if m != nil {
|
||||
return m.StartTime
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *OperationEvent) GetEndTime() *google_protobuf6.Timestamp {
|
||||
func (m *OperationEvent) GetEndTime() *timestamp.Timestamp {
|
||||
if m != nil {
|
||||
return m.EndTime
|
||||
}
|
||||
@ -150,12 +200,15 @@ func (m *OperationEvent) GetDescription() string {
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*OperationMetadata)(nil), "google.genomics.v1.OperationMetadata")
|
||||
proto.RegisterMapType((map[string]string)(nil), "google.genomics.v1.OperationMetadata.LabelsEntry")
|
||||
proto.RegisterType((*OperationEvent)(nil), "google.genomics.v1.OperationEvent")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/operations.proto", fileDescriptor3) }
|
||||
func init() {
|
||||
proto.RegisterFile("google/genomics/v1/operations.proto", fileDescriptor_operations_4f155d6eb213ff75)
|
||||
}
|
||||
|
||||
var fileDescriptor3 = []byte{
|
||||
var fileDescriptor_operations_4f155d6eb213ff75 = []byte{
|
||||
// 456 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x93, 0x41, 0x6f, 0xd3, 0x30,
|
||||
0x14, 0xc7, 0xe5, 0x76, 0x6b, 0x9b, 0x17, 0x89, 0x0d, 0x6b, 0x42, 0xa1, 0x80, 0xa8, 0xca, 0xa5,
|
||||
|
50
vendor/google.golang.org/genproto/googleapis/genomics/v1/position.pb.go
generated
vendored
50
vendor/google.golang.org/genproto/googleapis/genomics/v1/position.pb.go
generated
vendored
@ -1,7 +1,7 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/position.proto
|
||||
|
||||
package genomics
|
||||
package genomics // import "google.golang.org/genproto/googleapis/genomics/v1"
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
@ -13,24 +13,52 @@ var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// An abstraction for referring to a genomic position, in relation to some
|
||||
// already known reference. For now, represents a genomic position as a
|
||||
// reference name, a base number on that reference (0-based), and a
|
||||
// determination of forward or reverse strand.
|
||||
type Position struct {
|
||||
// The name of the reference in whatever reference set is being used.
|
||||
ReferenceName string `protobuf:"bytes,1,opt,name=reference_name,json=referenceName" json:"reference_name,omitempty"`
|
||||
ReferenceName string `protobuf:"bytes,1,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"`
|
||||
// The 0-based offset from the start of the forward strand for that reference.
|
||||
Position int64 `protobuf:"varint,2,opt,name=position" json:"position,omitempty"`
|
||||
Position int64 `protobuf:"varint,2,opt,name=position,proto3" json:"position,omitempty"`
|
||||
// Whether this position is on the reverse strand, as opposed to the forward
|
||||
// strand.
|
||||
ReverseStrand bool `protobuf:"varint,3,opt,name=reverse_strand,json=reverseStrand" json:"reverse_strand,omitempty"`
|
||||
ReverseStrand bool `protobuf:"varint,3,opt,name=reverse_strand,json=reverseStrand,proto3" json:"reverse_strand,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Position) Reset() { *m = Position{} }
|
||||
func (m *Position) String() string { return proto.CompactTextString(m) }
|
||||
func (*Position) ProtoMessage() {}
|
||||
func (*Position) Descriptor() ([]byte, []int) { return fileDescriptor4, []int{0} }
|
||||
func (m *Position) Reset() { *m = Position{} }
|
||||
func (m *Position) String() string { return proto.CompactTextString(m) }
|
||||
func (*Position) ProtoMessage() {}
|
||||
func (*Position) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_position_a03b9f5830126916, []int{0}
|
||||
}
|
||||
func (m *Position) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Position.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Position) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Position.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *Position) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Position.Merge(dst, src)
|
||||
}
|
||||
func (m *Position) XXX_Size() int {
|
||||
return xxx_messageInfo_Position.Size(m)
|
||||
}
|
||||
func (m *Position) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Position.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Position proto.InternalMessageInfo
|
||||
|
||||
func (m *Position) GetReferenceName() string {
|
||||
if m != nil {
|
||||
@ -57,9 +85,11 @@ func init() {
|
||||
proto.RegisterType((*Position)(nil), "google.genomics.v1.Position")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/position.proto", fileDescriptor4) }
|
||||
func init() {
|
||||
proto.RegisterFile("google/genomics/v1/position.proto", fileDescriptor_position_a03b9f5830126916)
|
||||
}
|
||||
|
||||
var fileDescriptor4 = []byte{
|
||||
var fileDescriptor_position_a03b9f5830126916 = []byte{
|
||||
// 223 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x90, 0x41, 0x4b, 0x03, 0x31,
|
||||
0x14, 0x84, 0x89, 0x05, 0x59, 0x03, 0xf5, 0xb0, 0x07, 0x59, 0x8a, 0x87, 0x55, 0x10, 0xf6, 0x94,
|
||||
|
50
vendor/google.golang.org/genproto/googleapis/genomics/v1/range.pb.go
generated
vendored
50
vendor/google.golang.org/genproto/googleapis/genomics/v1/range.pb.go
generated
vendored
@ -1,7 +1,7 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/range.proto
|
||||
|
||||
package genomics
|
||||
package genomics // import "google.golang.org/genproto/googleapis/genomics/v1"
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
@ -13,21 +13,49 @@ var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// A 0-based half-open genomic coordinate range for search requests.
|
||||
type Range struct {
|
||||
// The reference sequence name, for example `chr1`,
|
||||
// `1`, or `chrX`.
|
||||
ReferenceName string `protobuf:"bytes,1,opt,name=reference_name,json=referenceName" json:"reference_name,omitempty"`
|
||||
ReferenceName string `protobuf:"bytes,1,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"`
|
||||
// The start position of the range on the reference, 0-based inclusive.
|
||||
Start int64 `protobuf:"varint,2,opt,name=start" json:"start,omitempty"`
|
||||
Start int64 `protobuf:"varint,2,opt,name=start,proto3" json:"start,omitempty"`
|
||||
// The end position of the range on the reference, 0-based exclusive.
|
||||
End int64 `protobuf:"varint,3,opt,name=end" json:"end,omitempty"`
|
||||
End int64 `protobuf:"varint,3,opt,name=end,proto3" json:"end,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Range) Reset() { *m = Range{} }
|
||||
func (m *Range) String() string { return proto.CompactTextString(m) }
|
||||
func (*Range) ProtoMessage() {}
|
||||
func (*Range) Descriptor() ([]byte, []int) { return fileDescriptor5, []int{0} }
|
||||
func (m *Range) Reset() { *m = Range{} }
|
||||
func (m *Range) String() string { return proto.CompactTextString(m) }
|
||||
func (*Range) ProtoMessage() {}
|
||||
func (*Range) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_range_ea4bc4104a5a55de, []int{0}
|
||||
}
|
||||
func (m *Range) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Range.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Range) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Range.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *Range) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Range.Merge(dst, src)
|
||||
}
|
||||
func (m *Range) XXX_Size() int {
|
||||
return xxx_messageInfo_Range.Size(m)
|
||||
}
|
||||
func (m *Range) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Range.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Range proto.InternalMessageInfo
|
||||
|
||||
func (m *Range) GetReferenceName() string {
|
||||
if m != nil {
|
||||
@ -54,9 +82,11 @@ func init() {
|
||||
proto.RegisterType((*Range)(nil), "google.genomics.v1.Range")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/range.proto", fileDescriptor5) }
|
||||
func init() {
|
||||
proto.RegisterFile("google/genomics/v1/range.proto", fileDescriptor_range_ea4bc4104a5a55de)
|
||||
}
|
||||
|
||||
var fileDescriptor5 = []byte{
|
||||
var fileDescriptor_range_ea4bc4104a5a55de = []byte{
|
||||
// 209 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x4f, 0x4d, 0x4b, 0xc4, 0x30,
|
||||
0x10, 0x25, 0x96, 0x15, 0x0c, 0x28, 0x12, 0x44, 0x8a, 0x88, 0x2c, 0x82, 0xb0, 0xa7, 0x84, 0xe2,
|
||||
|
119
vendor/google.golang.org/genproto/googleapis/genomics/v1/readalignment.pb.go
generated
vendored
119
vendor/google.golang.org/genproto/googleapis/genomics/v1/readalignment.pb.go
generated
vendored
@ -1,39 +1,67 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/readalignment.proto
|
||||
|
||||
package genomics
|
||||
package genomics // import "google.golang.org/genproto/googleapis/genomics/v1"
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
import math "math"
|
||||
import _struct "github.com/golang/protobuf/ptypes/struct"
|
||||
import _ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
import google_protobuf3 "github.com/golang/protobuf/ptypes/struct"
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// A linear alignment can be represented by one CIGAR string. Describes the
|
||||
// mapped position and local alignment of the read to the reference.
|
||||
type LinearAlignment struct {
|
||||
// The position of this alignment.
|
||||
Position *Position `protobuf:"bytes,1,opt,name=position" json:"position,omitempty"`
|
||||
Position *Position `protobuf:"bytes,1,opt,name=position,proto3" json:"position,omitempty"`
|
||||
// The mapping quality of this alignment. Represents how likely
|
||||
// the read maps to this position as opposed to other locations.
|
||||
//
|
||||
// Specifically, this is -10 log10 Pr(mapping position is wrong), rounded to
|
||||
// the nearest integer.
|
||||
MappingQuality int32 `protobuf:"varint,2,opt,name=mapping_quality,json=mappingQuality" json:"mapping_quality,omitempty"`
|
||||
MappingQuality int32 `protobuf:"varint,2,opt,name=mapping_quality,json=mappingQuality,proto3" json:"mapping_quality,omitempty"`
|
||||
// Represents the local alignment of this sequence (alignment matches, indels,
|
||||
// etc) against the reference.
|
||||
Cigar []*CigarUnit `protobuf:"bytes,3,rep,name=cigar" json:"cigar,omitempty"`
|
||||
Cigar []*CigarUnit `protobuf:"bytes,3,rep,name=cigar,proto3" json:"cigar,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *LinearAlignment) Reset() { *m = LinearAlignment{} }
|
||||
func (m *LinearAlignment) String() string { return proto.CompactTextString(m) }
|
||||
func (*LinearAlignment) ProtoMessage() {}
|
||||
func (*LinearAlignment) Descriptor() ([]byte, []int) { return fileDescriptor6, []int{0} }
|
||||
func (m *LinearAlignment) Reset() { *m = LinearAlignment{} }
|
||||
func (m *LinearAlignment) String() string { return proto.CompactTextString(m) }
|
||||
func (*LinearAlignment) ProtoMessage() {}
|
||||
func (*LinearAlignment) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_readalignment_b0fdaef32d6e6f98, []int{0}
|
||||
}
|
||||
func (m *LinearAlignment) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_LinearAlignment.Unmarshal(m, b)
|
||||
}
|
||||
func (m *LinearAlignment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_LinearAlignment.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *LinearAlignment) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_LinearAlignment.Merge(dst, src)
|
||||
}
|
||||
func (m *LinearAlignment) XXX_Size() int {
|
||||
return xxx_messageInfo_LinearAlignment.Size(m)
|
||||
}
|
||||
func (m *LinearAlignment) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_LinearAlignment.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_LinearAlignment proto.InternalMessageInfo
|
||||
|
||||
func (m *LinearAlignment) GetPosition() *Position {
|
||||
if m != nil {
|
||||
@ -141,42 +169,42 @@ func (m *LinearAlignment) GetCigar() []*CigarUnit {
|
||||
type Read struct {
|
||||
// The server-generated read ID, unique across all reads. This is different
|
||||
// from the `fragmentName`.
|
||||
Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"`
|
||||
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
|
||||
// The ID of the read group this read belongs to. A read belongs to exactly
|
||||
// one read group. This is a server-generated ID which is distinct from SAM's
|
||||
// RG tag (for that value, see
|
||||
// [ReadGroup.name][google.genomics.v1.ReadGroup.name]).
|
||||
ReadGroupId string `protobuf:"bytes,2,opt,name=read_group_id,json=readGroupId" json:"read_group_id,omitempty"`
|
||||
ReadGroupId string `protobuf:"bytes,2,opt,name=read_group_id,json=readGroupId,proto3" json:"read_group_id,omitempty"`
|
||||
// The ID of the read group set this read belongs to. A read belongs to
|
||||
// exactly one read group set.
|
||||
ReadGroupSetId string `protobuf:"bytes,3,opt,name=read_group_set_id,json=readGroupSetId" json:"read_group_set_id,omitempty"`
|
||||
ReadGroupSetId string `protobuf:"bytes,3,opt,name=read_group_set_id,json=readGroupSetId,proto3" json:"read_group_set_id,omitempty"`
|
||||
// The fragment name. Equivalent to QNAME (query template name) in SAM.
|
||||
FragmentName string `protobuf:"bytes,4,opt,name=fragment_name,json=fragmentName" json:"fragment_name,omitempty"`
|
||||
FragmentName string `protobuf:"bytes,4,opt,name=fragment_name,json=fragmentName,proto3" json:"fragment_name,omitempty"`
|
||||
// The orientation and the distance between reads from the fragment are
|
||||
// consistent with the sequencing protocol (SAM flag 0x2).
|
||||
ProperPlacement bool `protobuf:"varint,5,opt,name=proper_placement,json=properPlacement" json:"proper_placement,omitempty"`
|
||||
ProperPlacement bool `protobuf:"varint,5,opt,name=proper_placement,json=properPlacement,proto3" json:"proper_placement,omitempty"`
|
||||
// The fragment is a PCR or optical duplicate (SAM flag 0x400).
|
||||
DuplicateFragment bool `protobuf:"varint,6,opt,name=duplicate_fragment,json=duplicateFragment" json:"duplicate_fragment,omitempty"`
|
||||
DuplicateFragment bool `protobuf:"varint,6,opt,name=duplicate_fragment,json=duplicateFragment,proto3" json:"duplicate_fragment,omitempty"`
|
||||
// The observed length of the fragment, equivalent to TLEN in SAM.
|
||||
FragmentLength int32 `protobuf:"varint,7,opt,name=fragment_length,json=fragmentLength" json:"fragment_length,omitempty"`
|
||||
FragmentLength int32 `protobuf:"varint,7,opt,name=fragment_length,json=fragmentLength,proto3" json:"fragment_length,omitempty"`
|
||||
// The read number in sequencing. 0-based and less than numberReads. This
|
||||
// field replaces SAM flag 0x40 and 0x80.
|
||||
ReadNumber int32 `protobuf:"varint,8,opt,name=read_number,json=readNumber" json:"read_number,omitempty"`
|
||||
ReadNumber int32 `protobuf:"varint,8,opt,name=read_number,json=readNumber,proto3" json:"read_number,omitempty"`
|
||||
// The number of reads in the fragment (extension to SAM flag 0x1).
|
||||
NumberReads int32 `protobuf:"varint,9,opt,name=number_reads,json=numberReads" json:"number_reads,omitempty"`
|
||||
NumberReads int32 `protobuf:"varint,9,opt,name=number_reads,json=numberReads,proto3" json:"number_reads,omitempty"`
|
||||
// Whether this read did not pass filters, such as platform or vendor quality
|
||||
// controls (SAM flag 0x200).
|
||||
FailedVendorQualityChecks bool `protobuf:"varint,10,opt,name=failed_vendor_quality_checks,json=failedVendorQualityChecks" json:"failed_vendor_quality_checks,omitempty"`
|
||||
FailedVendorQualityChecks bool `protobuf:"varint,10,opt,name=failed_vendor_quality_checks,json=failedVendorQualityChecks,proto3" json:"failed_vendor_quality_checks,omitempty"`
|
||||
// The linear alignment for this alignment record. This field is null for
|
||||
// unmapped reads.
|
||||
Alignment *LinearAlignment `protobuf:"bytes,11,opt,name=alignment" json:"alignment,omitempty"`
|
||||
Alignment *LinearAlignment `protobuf:"bytes,11,opt,name=alignment,proto3" json:"alignment,omitempty"`
|
||||
// Whether this alignment is secondary. Equivalent to SAM flag 0x100.
|
||||
// A secondary alignment represents an alternative to the primary alignment
|
||||
// for this read. Aligners may return secondary alignments if a read can map
|
||||
// ambiguously to multiple coordinates in the genome. By convention, each read
|
||||
// has one and only one alignment where both `secondaryAlignment`
|
||||
// and `supplementaryAlignment` are false.
|
||||
SecondaryAlignment bool `protobuf:"varint,12,opt,name=secondary_alignment,json=secondaryAlignment" json:"secondary_alignment,omitempty"`
|
||||
SecondaryAlignment bool `protobuf:"varint,12,opt,name=secondary_alignment,json=secondaryAlignment,proto3" json:"secondary_alignment,omitempty"`
|
||||
// Whether this alignment is supplementary. Equivalent to SAM flag 0x800.
|
||||
// Supplementary alignments are used in the representation of a chimeric
|
||||
// alignment. In a chimeric alignment, a read is split into multiple
|
||||
@ -188,7 +216,7 @@ type Read struct {
|
||||
// will be hard clipped. The `alignedSequence` and
|
||||
// `alignedQuality` fields in the alignment record will only
|
||||
// represent the bases for its respective linear alignment.
|
||||
SupplementaryAlignment bool `protobuf:"varint,13,opt,name=supplementary_alignment,json=supplementaryAlignment" json:"supplementary_alignment,omitempty"`
|
||||
SupplementaryAlignment bool `protobuf:"varint,13,opt,name=supplementary_alignment,json=supplementaryAlignment,proto3" json:"supplementary_alignment,omitempty"`
|
||||
// The bases of the read sequence contained in this alignment record,
|
||||
// **without CIGAR operations applied** (equivalent to SEQ in SAM).
|
||||
// `alignedSequence` and `alignedQuality` may be
|
||||
@ -196,7 +224,7 @@ type Read struct {
|
||||
// alignment is part of a chimeric alignment, or if the read was trimmed. When
|
||||
// this occurs, the CIGAR for this read will begin/end with a hard clip
|
||||
// operator that will indicate the length of the excised sequence.
|
||||
AlignedSequence string `protobuf:"bytes,14,opt,name=aligned_sequence,json=alignedSequence" json:"aligned_sequence,omitempty"`
|
||||
AlignedSequence string `protobuf:"bytes,14,opt,name=aligned_sequence,json=alignedSequence,proto3" json:"aligned_sequence,omitempty"`
|
||||
// The quality of the read sequence contained in this alignment record
|
||||
// (equivalent to QUAL in SAM).
|
||||
// `alignedSequence` and `alignedQuality` may be shorter than the full read
|
||||
@ -204,20 +232,42 @@ type Read struct {
|
||||
// chimeric alignment, or if the read was trimmed. When this occurs, the CIGAR
|
||||
// for this read will begin/end with a hard clip operator that will indicate
|
||||
// the length of the excised sequence.
|
||||
AlignedQuality []int32 `protobuf:"varint,15,rep,packed,name=aligned_quality,json=alignedQuality" json:"aligned_quality,omitempty"`
|
||||
AlignedQuality []int32 `protobuf:"varint,15,rep,packed,name=aligned_quality,json=alignedQuality,proto3" json:"aligned_quality,omitempty"`
|
||||
// The mapping of the primary alignment of the
|
||||
// `(readNumber+1)%numberReads` read in the fragment. It replaces
|
||||
// mate position and mate strand in SAM.
|
||||
NextMatePosition *Position `protobuf:"bytes,16,opt,name=next_mate_position,json=nextMatePosition" json:"next_mate_position,omitempty"`
|
||||
NextMatePosition *Position `protobuf:"bytes,16,opt,name=next_mate_position,json=nextMatePosition,proto3" json:"next_mate_position,omitempty"`
|
||||
// A map of additional read alignment information. This must be of the form
|
||||
// map<string, string[]> (string key mapping to a list of string values).
|
||||
Info map[string]*google_protobuf3.ListValue `protobuf:"bytes,17,rep,name=info" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
Info map[string]*_struct.ListValue `protobuf:"bytes,17,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Read) Reset() { *m = Read{} }
|
||||
func (m *Read) String() string { return proto.CompactTextString(m) }
|
||||
func (*Read) ProtoMessage() {}
|
||||
func (*Read) Descriptor() ([]byte, []int) { return fileDescriptor6, []int{1} }
|
||||
func (m *Read) Reset() { *m = Read{} }
|
||||
func (m *Read) String() string { return proto.CompactTextString(m) }
|
||||
func (*Read) ProtoMessage() {}
|
||||
func (*Read) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_readalignment_b0fdaef32d6e6f98, []int{1}
|
||||
}
|
||||
func (m *Read) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Read.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Read) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Read.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *Read) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Read.Merge(dst, src)
|
||||
}
|
||||
func (m *Read) XXX_Size() int {
|
||||
return xxx_messageInfo_Read.Size(m)
|
||||
}
|
||||
func (m *Read) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Read.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Read proto.InternalMessageInfo
|
||||
|
||||
func (m *Read) GetId() string {
|
||||
if m != nil {
|
||||
@ -331,7 +381,7 @@ func (m *Read) GetNextMatePosition() *Position {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Read) GetInfo() map[string]*google_protobuf3.ListValue {
|
||||
func (m *Read) GetInfo() map[string]*_struct.ListValue {
|
||||
if m != nil {
|
||||
return m.Info
|
||||
}
|
||||
@ -341,11 +391,14 @@ func (m *Read) GetInfo() map[string]*google_protobuf3.ListValue {
|
||||
func init() {
|
||||
proto.RegisterType((*LinearAlignment)(nil), "google.genomics.v1.LinearAlignment")
|
||||
proto.RegisterType((*Read)(nil), "google.genomics.v1.Read")
|
||||
proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.Read.InfoEntry")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/readalignment.proto", fileDescriptor6) }
|
||||
func init() {
|
||||
proto.RegisterFile("google/genomics/v1/readalignment.proto", fileDescriptor_readalignment_b0fdaef32d6e6f98)
|
||||
}
|
||||
|
||||
var fileDescriptor6 = []byte{
|
||||
var fileDescriptor_readalignment_b0fdaef32d6e6f98 = []byte{
|
||||
// 683 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x94, 0xcd, 0x4e, 0xdb, 0x4a,
|
||||
0x14, 0xc7, 0xe5, 0x84, 0x70, 0xc9, 0x09, 0x24, 0x61, 0xae, 0xc4, 0xf5, 0x8d, 0xb8, 0xb7, 0x21,
|
||||
|
147
vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroup.pb.go
generated
vendored
147
vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroup.pb.go
generated
vendored
@ -1,54 +1,82 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/readgroup.proto
|
||||
|
||||
package genomics
|
||||
package genomics // import "google.golang.org/genproto/googleapis/genomics/v1"
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
import math "math"
|
||||
import _struct "github.com/golang/protobuf/ptypes/struct"
|
||||
import _ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
import google_protobuf3 "github.com/golang/protobuf/ptypes/struct"
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// A read group is all the data that's processed the same way by the sequencer.
|
||||
type ReadGroup struct {
|
||||
// The server-generated read group ID, unique for all read groups.
|
||||
// Note: This is different than the @RG ID field in the SAM spec. For that
|
||||
// value, see [name][google.genomics.v1.ReadGroup.name].
|
||||
Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"`
|
||||
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
|
||||
// The dataset to which this read group belongs.
|
||||
DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId" json:"dataset_id,omitempty"`
|
||||
DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
|
||||
// The read group name. This corresponds to the @RG ID field in the SAM spec.
|
||||
Name string `protobuf:"bytes,3,opt,name=name" json:"name,omitempty"`
|
||||
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
|
||||
// A free-form text description of this read group.
|
||||
Description string `protobuf:"bytes,4,opt,name=description" json:"description,omitempty"`
|
||||
Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"`
|
||||
// A client-supplied sample identifier for the reads in this read group.
|
||||
SampleId string `protobuf:"bytes,5,opt,name=sample_id,json=sampleId" json:"sample_id,omitempty"`
|
||||
SampleId string `protobuf:"bytes,5,opt,name=sample_id,json=sampleId,proto3" json:"sample_id,omitempty"`
|
||||
// The experiment used to generate this read group.
|
||||
Experiment *ReadGroup_Experiment `protobuf:"bytes,6,opt,name=experiment" json:"experiment,omitempty"`
|
||||
Experiment *ReadGroup_Experiment `protobuf:"bytes,6,opt,name=experiment,proto3" json:"experiment,omitempty"`
|
||||
// The predicted insert size of this read group. The insert size is the length
|
||||
// the sequenced DNA fragment from end-to-end, not including the adapters.
|
||||
PredictedInsertSize int32 `protobuf:"varint,7,opt,name=predicted_insert_size,json=predictedInsertSize" json:"predicted_insert_size,omitempty"`
|
||||
PredictedInsertSize int32 `protobuf:"varint,7,opt,name=predicted_insert_size,json=predictedInsertSize,proto3" json:"predicted_insert_size,omitempty"`
|
||||
// The programs used to generate this read group. Programs are always
|
||||
// identical for all read groups within a read group set. For this reason,
|
||||
// only the first read group in a returned set will have this field
|
||||
// populated.
|
||||
Programs []*ReadGroup_Program `protobuf:"bytes,10,rep,name=programs" json:"programs,omitempty"`
|
||||
Programs []*ReadGroup_Program `protobuf:"bytes,10,rep,name=programs,proto3" json:"programs,omitempty"`
|
||||
// The reference set the reads in this read group are aligned to.
|
||||
ReferenceSetId string `protobuf:"bytes,11,opt,name=reference_set_id,json=referenceSetId" json:"reference_set_id,omitempty"`
|
||||
ReferenceSetId string `protobuf:"bytes,11,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"`
|
||||
// A map of additional read group information. This must be of the form
|
||||
// map<string, string[]> (string key mapping to a list of string values).
|
||||
Info map[string]*google_protobuf3.ListValue `protobuf:"bytes,12,rep,name=info" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
Info map[string]*_struct.ListValue `protobuf:"bytes,12,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ReadGroup) Reset() { *m = ReadGroup{} }
|
||||
func (m *ReadGroup) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadGroup) ProtoMessage() {}
|
||||
func (*ReadGroup) Descriptor() ([]byte, []int) { return fileDescriptor7, []int{0} }
|
||||
func (m *ReadGroup) Reset() { *m = ReadGroup{} }
|
||||
func (m *ReadGroup) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadGroup) ProtoMessage() {}
|
||||
func (*ReadGroup) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_readgroup_d1ee37f21a1efad6, []int{0}
|
||||
}
|
||||
func (m *ReadGroup) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ReadGroup.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ReadGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ReadGroup.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *ReadGroup) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ReadGroup.Merge(dst, src)
|
||||
}
|
||||
func (m *ReadGroup) XXX_Size() int {
|
||||
return xxx_messageInfo_ReadGroup.Size(m)
|
||||
}
|
||||
func (m *ReadGroup) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ReadGroup.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ReadGroup proto.InternalMessageInfo
|
||||
|
||||
func (m *ReadGroup) GetId() string {
|
||||
if m != nil {
|
||||
@ -113,7 +141,7 @@ func (m *ReadGroup) GetReferenceSetId() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup) GetInfo() map[string]*google_protobuf3.ListValue {
|
||||
func (m *ReadGroup) GetInfo() map[string]*_struct.ListValue {
|
||||
if m != nil {
|
||||
return m.Info
|
||||
}
|
||||
@ -125,22 +153,44 @@ type ReadGroup_Experiment struct {
|
||||
// fragments which have been prepared for sequencing from a sample. This
|
||||
// field is important for quality control as error or bias can be introduced
|
||||
// during sample preparation.
|
||||
LibraryId string `protobuf:"bytes,1,opt,name=library_id,json=libraryId" json:"library_id,omitempty"`
|
||||
LibraryId string `protobuf:"bytes,1,opt,name=library_id,json=libraryId,proto3" json:"library_id,omitempty"`
|
||||
// The platform unit used as part of this experiment, for example
|
||||
// flowcell-barcode.lane for Illumina or slide for SOLiD. Corresponds to the
|
||||
// @RG PU field in the SAM spec.
|
||||
PlatformUnit string `protobuf:"bytes,2,opt,name=platform_unit,json=platformUnit" json:"platform_unit,omitempty"`
|
||||
PlatformUnit string `protobuf:"bytes,2,opt,name=platform_unit,json=platformUnit,proto3" json:"platform_unit,omitempty"`
|
||||
// The sequencing center used as part of this experiment.
|
||||
SequencingCenter string `protobuf:"bytes,3,opt,name=sequencing_center,json=sequencingCenter" json:"sequencing_center,omitempty"`
|
||||
SequencingCenter string `protobuf:"bytes,3,opt,name=sequencing_center,json=sequencingCenter,proto3" json:"sequencing_center,omitempty"`
|
||||
// The instrument model used as part of this experiment. This maps to
|
||||
// sequencing technology in the SAM spec.
|
||||
InstrumentModel string `protobuf:"bytes,4,opt,name=instrument_model,json=instrumentModel" json:"instrument_model,omitempty"`
|
||||
InstrumentModel string `protobuf:"bytes,4,opt,name=instrument_model,json=instrumentModel,proto3" json:"instrument_model,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Experiment) Reset() { *m = ReadGroup_Experiment{} }
|
||||
func (m *ReadGroup_Experiment) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadGroup_Experiment) ProtoMessage() {}
|
||||
func (*ReadGroup_Experiment) Descriptor() ([]byte, []int) { return fileDescriptor7, []int{0, 0} }
|
||||
func (m *ReadGroup_Experiment) Reset() { *m = ReadGroup_Experiment{} }
|
||||
func (m *ReadGroup_Experiment) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadGroup_Experiment) ProtoMessage() {}
|
||||
func (*ReadGroup_Experiment) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_readgroup_d1ee37f21a1efad6, []int{0, 0}
|
||||
}
|
||||
func (m *ReadGroup_Experiment) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ReadGroup_Experiment.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ReadGroup_Experiment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ReadGroup_Experiment.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *ReadGroup_Experiment) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ReadGroup_Experiment.Merge(dst, src)
|
||||
}
|
||||
func (m *ReadGroup_Experiment) XXX_Size() int {
|
||||
return xxx_messageInfo_ReadGroup_Experiment.Size(m)
|
||||
}
|
||||
func (m *ReadGroup_Experiment) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ReadGroup_Experiment.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ReadGroup_Experiment proto.InternalMessageInfo
|
||||
|
||||
func (m *ReadGroup_Experiment) GetLibraryId() string {
|
||||
if m != nil {
|
||||
@ -172,23 +222,45 @@ func (m *ReadGroup_Experiment) GetInstrumentModel() string {
|
||||
|
||||
type ReadGroup_Program struct {
|
||||
// The command line used to run this program.
|
||||
CommandLine string `protobuf:"bytes,1,opt,name=command_line,json=commandLine" json:"command_line,omitempty"`
|
||||
CommandLine string `protobuf:"bytes,1,opt,name=command_line,json=commandLine,proto3" json:"command_line,omitempty"`
|
||||
// The user specified locally unique ID of the program. Used along with
|
||||
// `prevProgramId` to define an ordering between programs.
|
||||
Id string `protobuf:"bytes,2,opt,name=id" json:"id,omitempty"`
|
||||
Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"`
|
||||
// The display name of the program. This is typically the colloquial name of
|
||||
// the tool used, for example 'bwa' or 'picard'.
|
||||
Name string `protobuf:"bytes,3,opt,name=name" json:"name,omitempty"`
|
||||
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
|
||||
// The ID of the program run before this one.
|
||||
PrevProgramId string `protobuf:"bytes,4,opt,name=prev_program_id,json=prevProgramId" json:"prev_program_id,omitempty"`
|
||||
PrevProgramId string `protobuf:"bytes,4,opt,name=prev_program_id,json=prevProgramId,proto3" json:"prev_program_id,omitempty"`
|
||||
// The version of the program run.
|
||||
Version string `protobuf:"bytes,5,opt,name=version" json:"version,omitempty"`
|
||||
Version string `protobuf:"bytes,5,opt,name=version,proto3" json:"version,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Program) Reset() { *m = ReadGroup_Program{} }
|
||||
func (m *ReadGroup_Program) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadGroup_Program) ProtoMessage() {}
|
||||
func (*ReadGroup_Program) Descriptor() ([]byte, []int) { return fileDescriptor7, []int{0, 1} }
|
||||
func (m *ReadGroup_Program) Reset() { *m = ReadGroup_Program{} }
|
||||
func (m *ReadGroup_Program) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadGroup_Program) ProtoMessage() {}
|
||||
func (*ReadGroup_Program) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_readgroup_d1ee37f21a1efad6, []int{0, 1}
|
||||
}
|
||||
func (m *ReadGroup_Program) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ReadGroup_Program.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ReadGroup_Program) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ReadGroup_Program.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *ReadGroup_Program) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ReadGroup_Program.Merge(dst, src)
|
||||
}
|
||||
func (m *ReadGroup_Program) XXX_Size() int {
|
||||
return xxx_messageInfo_ReadGroup_Program.Size(m)
|
||||
}
|
||||
func (m *ReadGroup_Program) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ReadGroup_Program.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ReadGroup_Program proto.InternalMessageInfo
|
||||
|
||||
func (m *ReadGroup_Program) GetCommandLine() string {
|
||||
if m != nil {
|
||||
@ -227,13 +299,16 @@ func (m *ReadGroup_Program) GetVersion() string {
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*ReadGroup)(nil), "google.genomics.v1.ReadGroup")
|
||||
proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.ReadGroup.InfoEntry")
|
||||
proto.RegisterType((*ReadGroup_Experiment)(nil), "google.genomics.v1.ReadGroup.Experiment")
|
||||
proto.RegisterType((*ReadGroup_Program)(nil), "google.genomics.v1.ReadGroup.Program")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/readgroup.proto", fileDescriptor7) }
|
||||
func init() {
|
||||
proto.RegisterFile("google/genomics/v1/readgroup.proto", fileDescriptor_readgroup_d1ee37f21a1efad6)
|
||||
}
|
||||
|
||||
var fileDescriptor7 = []byte{
|
||||
var fileDescriptor_readgroup_d1ee37f21a1efad6 = []byte{
|
||||
// 585 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x54, 0xcb, 0x6e, 0xd4, 0x30,
|
||||
0x14, 0x55, 0xa6, 0xcf, 0xb9, 0xd3, 0xc7, 0x60, 0x04, 0x8a, 0x06, 0x90, 0x86, 0x22, 0x60, 0x10,
|
||||
|
63
vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroupset.pb.go
generated
vendored
63
vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroupset.pb.go
generated
vendored
@ -1,19 +1,25 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/readgroupset.proto
|
||||
|
||||
package genomics
|
||||
package genomics // import "google.golang.org/genproto/googleapis/genomics/v1"
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
import math "math"
|
||||
import _struct "github.com/golang/protobuf/ptypes/struct"
|
||||
import _ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
import google_protobuf3 "github.com/golang/protobuf/ptypes/struct"
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// A read group set is a logical collection of read groups, which are
|
||||
// collections of reads produced by a sequencer. A read group set typically
|
||||
// models reads corresponding to one sample, sequenced one way, and aligned one
|
||||
@ -27,27 +33,49 @@ var _ = math.Inf
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
type ReadGroupSet struct {
|
||||
// The server-generated read group set ID, unique for all read group sets.
|
||||
Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"`
|
||||
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
|
||||
// The dataset to which this read group set belongs.
|
||||
DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId" json:"dataset_id,omitempty"`
|
||||
DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
|
||||
// The reference set to which the reads in this read group set are aligned.
|
||||
ReferenceSetId string `protobuf:"bytes,3,opt,name=reference_set_id,json=referenceSetId" json:"reference_set_id,omitempty"`
|
||||
ReferenceSetId string `protobuf:"bytes,3,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"`
|
||||
// The read group set name. By default this will be initialized to the sample
|
||||
// name of the sequenced data contained in this set.
|
||||
Name string `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"`
|
||||
Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"`
|
||||
// The filename of the original source file for this read group set, if any.
|
||||
Filename string `protobuf:"bytes,5,opt,name=filename" json:"filename,omitempty"`
|
||||
Filename string `protobuf:"bytes,5,opt,name=filename,proto3" json:"filename,omitempty"`
|
||||
// The read groups in this set. There are typically 1-10 read groups in a read
|
||||
// group set.
|
||||
ReadGroups []*ReadGroup `protobuf:"bytes,6,rep,name=read_groups,json=readGroups" json:"read_groups,omitempty"`
|
||||
ReadGroups []*ReadGroup `protobuf:"bytes,6,rep,name=read_groups,json=readGroups,proto3" json:"read_groups,omitempty"`
|
||||
// A map of additional read group set information.
|
||||
Info map[string]*google_protobuf3.ListValue `protobuf:"bytes,7,rep,name=info" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
Info map[string]*_struct.ListValue `protobuf:"bytes,7,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ReadGroupSet) Reset() { *m = ReadGroupSet{} }
|
||||
func (m *ReadGroupSet) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadGroupSet) ProtoMessage() {}
|
||||
func (*ReadGroupSet) Descriptor() ([]byte, []int) { return fileDescriptor8, []int{0} }
|
||||
func (m *ReadGroupSet) Reset() { *m = ReadGroupSet{} }
|
||||
func (m *ReadGroupSet) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadGroupSet) ProtoMessage() {}
|
||||
func (*ReadGroupSet) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_readgroupset_a1ee2d6c49daf62b, []int{0}
|
||||
}
|
||||
func (m *ReadGroupSet) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ReadGroupSet.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ReadGroupSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ReadGroupSet.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *ReadGroupSet) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ReadGroupSet.Merge(dst, src)
|
||||
}
|
||||
func (m *ReadGroupSet) XXX_Size() int {
|
||||
return xxx_messageInfo_ReadGroupSet.Size(m)
|
||||
}
|
||||
func (m *ReadGroupSet) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ReadGroupSet.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ReadGroupSet proto.InternalMessageInfo
|
||||
|
||||
func (m *ReadGroupSet) GetId() string {
|
||||
if m != nil {
|
||||
@ -91,7 +119,7 @@ func (m *ReadGroupSet) GetReadGroups() []*ReadGroup {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ReadGroupSet) GetInfo() map[string]*google_protobuf3.ListValue {
|
||||
func (m *ReadGroupSet) GetInfo() map[string]*_struct.ListValue {
|
||||
if m != nil {
|
||||
return m.Info
|
||||
}
|
||||
@ -100,11 +128,14 @@ func (m *ReadGroupSet) GetInfo() map[string]*google_protobuf3.ListValue {
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*ReadGroupSet)(nil), "google.genomics.v1.ReadGroupSet")
|
||||
proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.ReadGroupSet.InfoEntry")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/readgroupset.proto", fileDescriptor8) }
|
||||
func init() {
|
||||
proto.RegisterFile("google/genomics/v1/readgroupset.proto", fileDescriptor_readgroupset_a1ee2d6c49daf62b)
|
||||
}
|
||||
|
||||
var fileDescriptor8 = []byte{
|
||||
var fileDescriptor_readgroupset_a1ee2d6c49daf62b = []byte{
|
||||
// 367 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x4f, 0x8b, 0xdb, 0x30,
|
||||
0x10, 0xc5, 0xb1, 0xf3, 0xa7, 0xcd, 0xa4, 0x84, 0x54, 0x87, 0x62, 0x4c, 0x03, 0x21, 0x50, 0x08,
|
||||
|
630
vendor/google.golang.org/genproto/googleapis/genomics/v1/reads.pb.go
generated
vendored
630
vendor/google.golang.org/genproto/googleapis/genomics/v1/reads.pb.go
generated
vendored
File diff suppressed because it is too large
Load Diff
410
vendor/google.golang.org/genproto/googleapis/genomics/v1/references.pb.go
generated
vendored
410
vendor/google.golang.org/genproto/googleapis/genomics/v1/references.pb.go
generated
vendored
@ -1,7 +1,7 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/references.proto
|
||||
|
||||
package genomics
|
||||
package genomics // import "google.golang.org/genproto/googleapis/genomics/v1"
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
@ -18,6 +18,12 @@ var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// A reference is a canonical assembled DNA sequence, intended to act as a
|
||||
// reference coordinate space for other genomic annotations. A single reference
|
||||
// might represent the human chromosome 1 or mitochandrial DNA, for instance. A
|
||||
@ -27,29 +33,51 @@ var _ = math.Inf
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
type Reference struct {
|
||||
// The server-generated reference ID, unique across all references.
|
||||
Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"`
|
||||
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
|
||||
// The length of this reference's sequence.
|
||||
Length int64 `protobuf:"varint,2,opt,name=length" json:"length,omitempty"`
|
||||
Length int64 `protobuf:"varint,2,opt,name=length,proto3" json:"length,omitempty"`
|
||||
// MD5 of the upper-case sequence excluding all whitespace characters (this
|
||||
// is equivalent to SQ:M5 in SAM). This value is represented in lower case
|
||||
// hexadecimal format.
|
||||
Md5Checksum string `protobuf:"bytes,3,opt,name=md5checksum" json:"md5checksum,omitempty"`
|
||||
Md5Checksum string `protobuf:"bytes,3,opt,name=md5checksum,proto3" json:"md5checksum,omitempty"`
|
||||
// The name of this reference, for example `22`.
|
||||
Name string `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"`
|
||||
Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"`
|
||||
// The URI from which the sequence was obtained. Typically specifies a FASTA
|
||||
// format file.
|
||||
SourceUri string `protobuf:"bytes,5,opt,name=source_uri,json=sourceUri" json:"source_uri,omitempty"`
|
||||
SourceUri string `protobuf:"bytes,5,opt,name=source_uri,json=sourceUri,proto3" json:"source_uri,omitempty"`
|
||||
// All known corresponding accession IDs in INSDC (GenBank/ENA/DDBJ) ideally
|
||||
// with a version number, for example `GCF_000001405.26`.
|
||||
SourceAccessions []string `protobuf:"bytes,6,rep,name=source_accessions,json=sourceAccessions" json:"source_accessions,omitempty"`
|
||||
SourceAccessions []string `protobuf:"bytes,6,rep,name=source_accessions,json=sourceAccessions,proto3" json:"source_accessions,omitempty"`
|
||||
// ID from http://www.ncbi.nlm.nih.gov/taxonomy. For example, 9606 for human.
|
||||
NcbiTaxonId int32 `protobuf:"varint,7,opt,name=ncbi_taxon_id,json=ncbiTaxonId" json:"ncbi_taxon_id,omitempty"`
|
||||
NcbiTaxonId int32 `protobuf:"varint,7,opt,name=ncbi_taxon_id,json=ncbiTaxonId,proto3" json:"ncbi_taxon_id,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Reference) Reset() { *m = Reference{} }
|
||||
func (m *Reference) String() string { return proto.CompactTextString(m) }
|
||||
func (*Reference) ProtoMessage() {}
|
||||
func (*Reference) Descriptor() ([]byte, []int) { return fileDescriptor10, []int{0} }
|
||||
func (m *Reference) Reset() { *m = Reference{} }
|
||||
func (m *Reference) String() string { return proto.CompactTextString(m) }
|
||||
func (*Reference) ProtoMessage() {}
|
||||
func (*Reference) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_references_4f3354422baff2a6, []int{0}
|
||||
}
|
||||
func (m *Reference) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Reference.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Reference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Reference.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *Reference) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Reference.Merge(dst, src)
|
||||
}
|
||||
func (m *Reference) XXX_Size() int {
|
||||
return xxx_messageInfo_Reference.Size(m)
|
||||
}
|
||||
func (m *Reference) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Reference.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Reference proto.InternalMessageInfo
|
||||
|
||||
func (m *Reference) GetId() string {
|
||||
if m != nil {
|
||||
@ -110,37 +138,59 @@ func (m *Reference) GetNcbiTaxonId() int32 {
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
type ReferenceSet struct {
|
||||
// The server-generated reference set ID, unique across all reference sets.
|
||||
Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"`
|
||||
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
|
||||
// The IDs of the reference objects that are part of this set.
|
||||
// `Reference.md5checksum` must be unique within this set.
|
||||
ReferenceIds []string `protobuf:"bytes,2,rep,name=reference_ids,json=referenceIds" json:"reference_ids,omitempty"`
|
||||
ReferenceIds []string `protobuf:"bytes,2,rep,name=reference_ids,json=referenceIds,proto3" json:"reference_ids,omitempty"`
|
||||
// Order-independent MD5 checksum which identifies this reference set. The
|
||||
// checksum is computed by sorting all lower case hexidecimal string
|
||||
// `reference.md5checksum` (for all reference in this set) in
|
||||
// ascending lexicographic order, concatenating, and taking the MD5 of that
|
||||
// value. The resulting value is represented in lower case hexadecimal format.
|
||||
Md5Checksum string `protobuf:"bytes,3,opt,name=md5checksum" json:"md5checksum,omitempty"`
|
||||
Md5Checksum string `protobuf:"bytes,3,opt,name=md5checksum,proto3" json:"md5checksum,omitempty"`
|
||||
// ID from http://www.ncbi.nlm.nih.gov/taxonomy (for example, 9606 for human)
|
||||
// indicating the species which this reference set is intended to model. Note
|
||||
// that contained references may specify a different `ncbiTaxonId`, as
|
||||
// assemblies may contain reference sequences which do not belong to the
|
||||
// modeled species, for example EBV in a human reference genome.
|
||||
NcbiTaxonId int32 `protobuf:"varint,4,opt,name=ncbi_taxon_id,json=ncbiTaxonId" json:"ncbi_taxon_id,omitempty"`
|
||||
NcbiTaxonId int32 `protobuf:"varint,4,opt,name=ncbi_taxon_id,json=ncbiTaxonId,proto3" json:"ncbi_taxon_id,omitempty"`
|
||||
// Free text description of this reference set.
|
||||
Description string `protobuf:"bytes,5,opt,name=description" json:"description,omitempty"`
|
||||
Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"`
|
||||
// Public id of this reference set, such as `GRCh37`.
|
||||
AssemblyId string `protobuf:"bytes,6,opt,name=assembly_id,json=assemblyId" json:"assembly_id,omitempty"`
|
||||
AssemblyId string `protobuf:"bytes,6,opt,name=assembly_id,json=assemblyId,proto3" json:"assembly_id,omitempty"`
|
||||
// The URI from which the references were obtained.
|
||||
SourceUri string `protobuf:"bytes,7,opt,name=source_uri,json=sourceUri" json:"source_uri,omitempty"`
|
||||
SourceUri string `protobuf:"bytes,7,opt,name=source_uri,json=sourceUri,proto3" json:"source_uri,omitempty"`
|
||||
// All known corresponding accession IDs in INSDC (GenBank/ENA/DDBJ) ideally
|
||||
// with a version number, for example `NC_000001.11`.
|
||||
SourceAccessions []string `protobuf:"bytes,8,rep,name=source_accessions,json=sourceAccessions" json:"source_accessions,omitempty"`
|
||||
SourceAccessions []string `protobuf:"bytes,8,rep,name=source_accessions,json=sourceAccessions,proto3" json:"source_accessions,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ReferenceSet) Reset() { *m = ReferenceSet{} }
|
||||
func (m *ReferenceSet) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReferenceSet) ProtoMessage() {}
|
||||
func (*ReferenceSet) Descriptor() ([]byte, []int) { return fileDescriptor10, []int{1} }
|
||||
func (m *ReferenceSet) Reset() { *m = ReferenceSet{} }
|
||||
func (m *ReferenceSet) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReferenceSet) ProtoMessage() {}
|
||||
func (*ReferenceSet) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_references_4f3354422baff2a6, []int{1}
|
||||
}
|
||||
func (m *ReferenceSet) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ReferenceSet.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ReferenceSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ReferenceSet.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *ReferenceSet) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ReferenceSet.Merge(dst, src)
|
||||
}
|
||||
func (m *ReferenceSet) XXX_Size() int {
|
||||
return xxx_messageInfo_ReferenceSet.Size(m)
|
||||
}
|
||||
func (m *ReferenceSet) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ReferenceSet.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ReferenceSet proto.InternalMessageInfo
|
||||
|
||||
func (m *ReferenceSet) GetId() string {
|
||||
if m != nil {
|
||||
@ -201,28 +251,50 @@ func (m *ReferenceSet) GetSourceAccessions() []string {
|
||||
type SearchReferenceSetsRequest struct {
|
||||
// If present, return reference sets for which the
|
||||
// [md5checksum][google.genomics.v1.ReferenceSet.md5checksum] matches exactly.
|
||||
Md5Checksums []string `protobuf:"bytes,1,rep,name=md5checksums" json:"md5checksums,omitempty"`
|
||||
Md5Checksums []string `protobuf:"bytes,1,rep,name=md5checksums,proto3" json:"md5checksums,omitempty"`
|
||||
// If present, return reference sets for which a prefix of any of
|
||||
// [sourceAccessions][google.genomics.v1.ReferenceSet.source_accessions]
|
||||
// match any of these strings. Accession numbers typically have a main number
|
||||
// and a version, for example `NC_000001.11`.
|
||||
Accessions []string `protobuf:"bytes,2,rep,name=accessions" json:"accessions,omitempty"`
|
||||
Accessions []string `protobuf:"bytes,2,rep,name=accessions,proto3" json:"accessions,omitempty"`
|
||||
// If present, return reference sets for which a substring of their
|
||||
// `assemblyId` matches this string (case insensitive).
|
||||
AssemblyId string `protobuf:"bytes,3,opt,name=assembly_id,json=assemblyId" json:"assembly_id,omitempty"`
|
||||
AssemblyId string `protobuf:"bytes,3,opt,name=assembly_id,json=assemblyId,proto3" json:"assembly_id,omitempty"`
|
||||
// The continuation token, which is used to page through large result sets.
|
||||
// To get the next page of results, set this parameter to the value of
|
||||
// `nextPageToken` from the previous response.
|
||||
PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken" json:"page_token,omitempty"`
|
||||
PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
|
||||
// The maximum number of results to return in a single page. If unspecified,
|
||||
// defaults to 1024. The maximum value is 4096.
|
||||
PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize" json:"page_size,omitempty"`
|
||||
PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *SearchReferenceSetsRequest) Reset() { *m = SearchReferenceSetsRequest{} }
|
||||
func (m *SearchReferenceSetsRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*SearchReferenceSetsRequest) ProtoMessage() {}
|
||||
func (*SearchReferenceSetsRequest) Descriptor() ([]byte, []int) { return fileDescriptor10, []int{2} }
|
||||
func (m *SearchReferenceSetsRequest) Reset() { *m = SearchReferenceSetsRequest{} }
|
||||
func (m *SearchReferenceSetsRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*SearchReferenceSetsRequest) ProtoMessage() {}
|
||||
func (*SearchReferenceSetsRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_references_4f3354422baff2a6, []int{2}
|
||||
}
|
||||
func (m *SearchReferenceSetsRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_SearchReferenceSetsRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *SearchReferenceSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_SearchReferenceSetsRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *SearchReferenceSetsRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_SearchReferenceSetsRequest.Merge(dst, src)
|
||||
}
|
||||
func (m *SearchReferenceSetsRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_SearchReferenceSetsRequest.Size(m)
|
||||
}
|
||||
func (m *SearchReferenceSetsRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_SearchReferenceSetsRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_SearchReferenceSetsRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *SearchReferenceSetsRequest) GetMd5Checksums() []string {
|
||||
if m != nil {
|
||||
@ -261,17 +333,39 @@ func (m *SearchReferenceSetsRequest) GetPageSize() int32 {
|
||||
|
||||
type SearchReferenceSetsResponse struct {
|
||||
// The matching references sets.
|
||||
ReferenceSets []*ReferenceSet `protobuf:"bytes,1,rep,name=reference_sets,json=referenceSets" json:"reference_sets,omitempty"`
|
||||
ReferenceSets []*ReferenceSet `protobuf:"bytes,1,rep,name=reference_sets,json=referenceSets,proto3" json:"reference_sets,omitempty"`
|
||||
// The continuation token, which is used to page through large result sets.
|
||||
// Provide this value in a subsequent request to return the next page of
|
||||
// results. This field will be empty if there aren't any additional results.
|
||||
NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"`
|
||||
NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *SearchReferenceSetsResponse) Reset() { *m = SearchReferenceSetsResponse{} }
|
||||
func (m *SearchReferenceSetsResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*SearchReferenceSetsResponse) ProtoMessage() {}
|
||||
func (*SearchReferenceSetsResponse) Descriptor() ([]byte, []int) { return fileDescriptor10, []int{3} }
|
||||
func (m *SearchReferenceSetsResponse) Reset() { *m = SearchReferenceSetsResponse{} }
|
||||
func (m *SearchReferenceSetsResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*SearchReferenceSetsResponse) ProtoMessage() {}
|
||||
func (*SearchReferenceSetsResponse) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_references_4f3354422baff2a6, []int{3}
|
||||
}
|
||||
func (m *SearchReferenceSetsResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_SearchReferenceSetsResponse.Unmarshal(m, b)
|
||||
}
|
||||
func (m *SearchReferenceSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_SearchReferenceSetsResponse.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *SearchReferenceSetsResponse) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_SearchReferenceSetsResponse.Merge(dst, src)
|
||||
}
|
||||
func (m *SearchReferenceSetsResponse) XXX_Size() int {
|
||||
return xxx_messageInfo_SearchReferenceSetsResponse.Size(m)
|
||||
}
|
||||
func (m *SearchReferenceSetsResponse) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_SearchReferenceSetsResponse.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_SearchReferenceSetsResponse proto.InternalMessageInfo
|
||||
|
||||
func (m *SearchReferenceSetsResponse) GetReferenceSets() []*ReferenceSet {
|
||||
if m != nil {
|
||||
@ -289,13 +383,35 @@ func (m *SearchReferenceSetsResponse) GetNextPageToken() string {
|
||||
|
||||
type GetReferenceSetRequest struct {
|
||||
// The ID of the reference set.
|
||||
ReferenceSetId string `protobuf:"bytes,1,opt,name=reference_set_id,json=referenceSetId" json:"reference_set_id,omitempty"`
|
||||
ReferenceSetId string `protobuf:"bytes,1,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *GetReferenceSetRequest) Reset() { *m = GetReferenceSetRequest{} }
|
||||
func (m *GetReferenceSetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*GetReferenceSetRequest) ProtoMessage() {}
|
||||
func (*GetReferenceSetRequest) Descriptor() ([]byte, []int) { return fileDescriptor10, []int{4} }
|
||||
func (m *GetReferenceSetRequest) Reset() { *m = GetReferenceSetRequest{} }
|
||||
func (m *GetReferenceSetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*GetReferenceSetRequest) ProtoMessage() {}
|
||||
func (*GetReferenceSetRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_references_4f3354422baff2a6, []int{4}
|
||||
}
|
||||
func (m *GetReferenceSetRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_GetReferenceSetRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *GetReferenceSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_GetReferenceSetRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *GetReferenceSetRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_GetReferenceSetRequest.Merge(dst, src)
|
||||
}
|
||||
func (m *GetReferenceSetRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_GetReferenceSetRequest.Size(m)
|
||||
}
|
||||
func (m *GetReferenceSetRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_GetReferenceSetRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_GetReferenceSetRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *GetReferenceSetRequest) GetReferenceSetId() string {
|
||||
if m != nil {
|
||||
@ -307,27 +423,49 @@ func (m *GetReferenceSetRequest) GetReferenceSetId() string {
|
||||
type SearchReferencesRequest struct {
|
||||
// If present, return references for which the
|
||||
// [md5checksum][google.genomics.v1.Reference.md5checksum] matches exactly.
|
||||
Md5Checksums []string `protobuf:"bytes,1,rep,name=md5checksums" json:"md5checksums,omitempty"`
|
||||
Md5Checksums []string `protobuf:"bytes,1,rep,name=md5checksums,proto3" json:"md5checksums,omitempty"`
|
||||
// If present, return references for which a prefix of any of
|
||||
// [sourceAccessions][google.genomics.v1.Reference.source_accessions] match
|
||||
// any of these strings. Accession numbers typically have a main number and a
|
||||
// version, for example `GCF_000001405.26`.
|
||||
Accessions []string `protobuf:"bytes,2,rep,name=accessions" json:"accessions,omitempty"`
|
||||
Accessions []string `protobuf:"bytes,2,rep,name=accessions,proto3" json:"accessions,omitempty"`
|
||||
// If present, return only references which belong to this reference set.
|
||||
ReferenceSetId string `protobuf:"bytes,3,opt,name=reference_set_id,json=referenceSetId" json:"reference_set_id,omitempty"`
|
||||
ReferenceSetId string `protobuf:"bytes,3,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"`
|
||||
// The continuation token, which is used to page through large result sets.
|
||||
// To get the next page of results, set this parameter to the value of
|
||||
// `nextPageToken` from the previous response.
|
||||
PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken" json:"page_token,omitempty"`
|
||||
PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
|
||||
// The maximum number of results to return in a single page. If unspecified,
|
||||
// defaults to 1024. The maximum value is 4096.
|
||||
PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize" json:"page_size,omitempty"`
|
||||
PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *SearchReferencesRequest) Reset() { *m = SearchReferencesRequest{} }
|
||||
func (m *SearchReferencesRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*SearchReferencesRequest) ProtoMessage() {}
|
||||
func (*SearchReferencesRequest) Descriptor() ([]byte, []int) { return fileDescriptor10, []int{5} }
|
||||
func (m *SearchReferencesRequest) Reset() { *m = SearchReferencesRequest{} }
|
||||
func (m *SearchReferencesRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*SearchReferencesRequest) ProtoMessage() {}
|
||||
func (*SearchReferencesRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_references_4f3354422baff2a6, []int{5}
|
||||
}
|
||||
func (m *SearchReferencesRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_SearchReferencesRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *SearchReferencesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_SearchReferencesRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *SearchReferencesRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_SearchReferencesRequest.Merge(dst, src)
|
||||
}
|
||||
func (m *SearchReferencesRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_SearchReferencesRequest.Size(m)
|
||||
}
|
||||
func (m *SearchReferencesRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_SearchReferencesRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_SearchReferencesRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *SearchReferencesRequest) GetMd5Checksums() []string {
|
||||
if m != nil {
|
||||
@ -366,17 +504,39 @@ func (m *SearchReferencesRequest) GetPageSize() int32 {
|
||||
|
||||
type SearchReferencesResponse struct {
|
||||
// The matching references.
|
||||
References []*Reference `protobuf:"bytes,1,rep,name=references" json:"references,omitempty"`
|
||||
References []*Reference `protobuf:"bytes,1,rep,name=references,proto3" json:"references,omitempty"`
|
||||
// The continuation token, which is used to page through large result sets.
|
||||
// Provide this value in a subsequent request to return the next page of
|
||||
// results. This field will be empty if there aren't any additional results.
|
||||
NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"`
|
||||
NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *SearchReferencesResponse) Reset() { *m = SearchReferencesResponse{} }
|
||||
func (m *SearchReferencesResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*SearchReferencesResponse) ProtoMessage() {}
|
||||
func (*SearchReferencesResponse) Descriptor() ([]byte, []int) { return fileDescriptor10, []int{6} }
|
||||
func (m *SearchReferencesResponse) Reset() { *m = SearchReferencesResponse{} }
|
||||
func (m *SearchReferencesResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*SearchReferencesResponse) ProtoMessage() {}
|
||||
func (*SearchReferencesResponse) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_references_4f3354422baff2a6, []int{6}
|
||||
}
|
||||
func (m *SearchReferencesResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_SearchReferencesResponse.Unmarshal(m, b)
|
||||
}
|
||||
func (m *SearchReferencesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_SearchReferencesResponse.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *SearchReferencesResponse) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_SearchReferencesResponse.Merge(dst, src)
|
||||
}
|
||||
func (m *SearchReferencesResponse) XXX_Size() int {
|
||||
return xxx_messageInfo_SearchReferencesResponse.Size(m)
|
||||
}
|
||||
func (m *SearchReferencesResponse) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_SearchReferencesResponse.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_SearchReferencesResponse proto.InternalMessageInfo
|
||||
|
||||
func (m *SearchReferencesResponse) GetReferences() []*Reference {
|
||||
if m != nil {
|
||||
@ -394,13 +554,35 @@ func (m *SearchReferencesResponse) GetNextPageToken() string {
|
||||
|
||||
type GetReferenceRequest struct {
|
||||
// The ID of the reference.
|
||||
ReferenceId string `protobuf:"bytes,1,opt,name=reference_id,json=referenceId" json:"reference_id,omitempty"`
|
||||
ReferenceId string `protobuf:"bytes,1,opt,name=reference_id,json=referenceId,proto3" json:"reference_id,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *GetReferenceRequest) Reset() { *m = GetReferenceRequest{} }
|
||||
func (m *GetReferenceRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*GetReferenceRequest) ProtoMessage() {}
|
||||
func (*GetReferenceRequest) Descriptor() ([]byte, []int) { return fileDescriptor10, []int{7} }
|
||||
func (m *GetReferenceRequest) Reset() { *m = GetReferenceRequest{} }
|
||||
func (m *GetReferenceRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*GetReferenceRequest) ProtoMessage() {}
|
||||
func (*GetReferenceRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_references_4f3354422baff2a6, []int{7}
|
||||
}
|
||||
func (m *GetReferenceRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_GetReferenceRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *GetReferenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_GetReferenceRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *GetReferenceRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_GetReferenceRequest.Merge(dst, src)
|
||||
}
|
||||
func (m *GetReferenceRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_GetReferenceRequest.Size(m)
|
||||
}
|
||||
func (m *GetReferenceRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_GetReferenceRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_GetReferenceRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *GetReferenceRequest) GetReferenceId() string {
|
||||
if m != nil {
|
||||
@ -411,26 +593,48 @@ func (m *GetReferenceRequest) GetReferenceId() string {
|
||||
|
||||
type ListBasesRequest struct {
|
||||
// The ID of the reference.
|
||||
ReferenceId string `protobuf:"bytes,1,opt,name=reference_id,json=referenceId" json:"reference_id,omitempty"`
|
||||
ReferenceId string `protobuf:"bytes,1,opt,name=reference_id,json=referenceId,proto3" json:"reference_id,omitempty"`
|
||||
// The start position (0-based) of this query. Defaults to 0.
|
||||
Start int64 `protobuf:"varint,2,opt,name=start" json:"start,omitempty"`
|
||||
Start int64 `protobuf:"varint,2,opt,name=start,proto3" json:"start,omitempty"`
|
||||
// The end position (0-based, exclusive) of this query. Defaults to the length
|
||||
// of this reference.
|
||||
End int64 `protobuf:"varint,3,opt,name=end" json:"end,omitempty"`
|
||||
End int64 `protobuf:"varint,3,opt,name=end,proto3" json:"end,omitempty"`
|
||||
// The continuation token, which is used to page through large result sets.
|
||||
// To get the next page of results, set this parameter to the value of
|
||||
// `nextPageToken` from the previous response.
|
||||
PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken" json:"page_token,omitempty"`
|
||||
PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
|
||||
// The maximum number of bases to return in a single page. If unspecified,
|
||||
// defaults to 200Kbp (kilo base pairs). The maximum value is 10Mbp (mega base
|
||||
// pairs).
|
||||
PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize" json:"page_size,omitempty"`
|
||||
PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ListBasesRequest) Reset() { *m = ListBasesRequest{} }
|
||||
func (m *ListBasesRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*ListBasesRequest) ProtoMessage() {}
|
||||
func (*ListBasesRequest) Descriptor() ([]byte, []int) { return fileDescriptor10, []int{8} }
|
||||
func (m *ListBasesRequest) Reset() { *m = ListBasesRequest{} }
|
||||
func (m *ListBasesRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*ListBasesRequest) ProtoMessage() {}
|
||||
func (*ListBasesRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_references_4f3354422baff2a6, []int{8}
|
||||
}
|
||||
func (m *ListBasesRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ListBasesRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ListBasesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ListBasesRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *ListBasesRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ListBasesRequest.Merge(dst, src)
|
||||
}
|
||||
func (m *ListBasesRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_ListBasesRequest.Size(m)
|
||||
}
|
||||
func (m *ListBasesRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ListBasesRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ListBasesRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *ListBasesRequest) GetReferenceId() string {
|
||||
if m != nil {
|
||||
@ -471,19 +675,41 @@ type ListBasesResponse struct {
|
||||
// The offset position (0-based) of the given `sequence` from the
|
||||
// start of this `Reference`. This value will differ for each page
|
||||
// in a paginated request.
|
||||
Offset int64 `protobuf:"varint,1,opt,name=offset" json:"offset,omitempty"`
|
||||
Offset int64 `protobuf:"varint,1,opt,name=offset,proto3" json:"offset,omitempty"`
|
||||
// A substring of the bases that make up this reference.
|
||||
Sequence string `protobuf:"bytes,2,opt,name=sequence" json:"sequence,omitempty"`
|
||||
Sequence string `protobuf:"bytes,2,opt,name=sequence,proto3" json:"sequence,omitempty"`
|
||||
// The continuation token, which is used to page through large result sets.
|
||||
// Provide this value in a subsequent request to return the next page of
|
||||
// results. This field will be empty if there aren't any additional results.
|
||||
NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"`
|
||||
NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ListBasesResponse) Reset() { *m = ListBasesResponse{} }
|
||||
func (m *ListBasesResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*ListBasesResponse) ProtoMessage() {}
|
||||
func (*ListBasesResponse) Descriptor() ([]byte, []int) { return fileDescriptor10, []int{9} }
|
||||
func (m *ListBasesResponse) Reset() { *m = ListBasesResponse{} }
|
||||
func (m *ListBasesResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*ListBasesResponse) ProtoMessage() {}
|
||||
func (*ListBasesResponse) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_references_4f3354422baff2a6, []int{9}
|
||||
}
|
||||
func (m *ListBasesResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ListBasesResponse.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ListBasesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ListBasesResponse.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (dst *ListBasesResponse) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ListBasesResponse.Merge(dst, src)
|
||||
}
|
||||
func (m *ListBasesResponse) XXX_Size() int {
|
||||
return xxx_messageInfo_ListBasesResponse.Size(m)
|
||||
}
|
||||
func (m *ListBasesResponse) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ListBasesResponse.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ListBasesResponse proto.InternalMessageInfo
|
||||
|
||||
func (m *ListBasesResponse) GetOffset() int64 {
|
||||
if m != nil {
|
||||
@ -527,8 +753,9 @@ var _ grpc.ClientConn
|
||||
// is compatible with the grpc package it is being compiled against.
|
||||
const _ = grpc.SupportPackageIsVersion4
|
||||
|
||||
// Client API for ReferenceServiceV1 service
|
||||
|
||||
// ReferenceServiceV1Client is the client API for ReferenceServiceV1 service.
|
||||
//
|
||||
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
|
||||
type ReferenceServiceV1Client interface {
|
||||
// Searches for reference sets which match the given criteria.
|
||||
//
|
||||
@ -587,7 +814,7 @@ func NewReferenceServiceV1Client(cc *grpc.ClientConn) ReferenceServiceV1Client {
|
||||
|
||||
func (c *referenceServiceV1Client) SearchReferenceSets(ctx context.Context, in *SearchReferenceSetsRequest, opts ...grpc.CallOption) (*SearchReferenceSetsResponse, error) {
|
||||
out := new(SearchReferenceSetsResponse)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/SearchReferenceSets", in, out, c.cc, opts...)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/SearchReferenceSets", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -596,7 +823,7 @@ func (c *referenceServiceV1Client) SearchReferenceSets(ctx context.Context, in *
|
||||
|
||||
func (c *referenceServiceV1Client) GetReferenceSet(ctx context.Context, in *GetReferenceSetRequest, opts ...grpc.CallOption) (*ReferenceSet, error) {
|
||||
out := new(ReferenceSet)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/GetReferenceSet", in, out, c.cc, opts...)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/GetReferenceSet", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -605,7 +832,7 @@ func (c *referenceServiceV1Client) GetReferenceSet(ctx context.Context, in *GetR
|
||||
|
||||
func (c *referenceServiceV1Client) SearchReferences(ctx context.Context, in *SearchReferencesRequest, opts ...grpc.CallOption) (*SearchReferencesResponse, error) {
|
||||
out := new(SearchReferencesResponse)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/SearchReferences", in, out, c.cc, opts...)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/SearchReferences", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -614,7 +841,7 @@ func (c *referenceServiceV1Client) SearchReferences(ctx context.Context, in *Sea
|
||||
|
||||
func (c *referenceServiceV1Client) GetReference(ctx context.Context, in *GetReferenceRequest, opts ...grpc.CallOption) (*Reference, error) {
|
||||
out := new(Reference)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/GetReference", in, out, c.cc, opts...)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/GetReference", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -623,15 +850,14 @@ func (c *referenceServiceV1Client) GetReference(ctx context.Context, in *GetRefe
|
||||
|
||||
func (c *referenceServiceV1Client) ListBases(ctx context.Context, in *ListBasesRequest, opts ...grpc.CallOption) (*ListBasesResponse, error) {
|
||||
out := new(ListBasesResponse)
|
||||
err := grpc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/ListBases", in, out, c.cc, opts...)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/ListBases", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// Server API for ReferenceServiceV1 service
|
||||
|
||||
// ReferenceServiceV1Server is the server API for ReferenceServiceV1 service.
|
||||
type ReferenceServiceV1Server interface {
|
||||
// Searches for reference sets which match the given criteria.
|
||||
//
|
||||
@ -803,9 +1029,11 @@ var _ReferenceServiceV1_serviceDesc = grpc.ServiceDesc{
|
||||
Metadata: "google/genomics/v1/references.proto",
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/references.proto", fileDescriptor10) }
|
||||
func init() {
|
||||
proto.RegisterFile("google/genomics/v1/references.proto", fileDescriptor_references_4f3354422baff2a6)
|
||||
}
|
||||
|
||||
var fileDescriptor10 = []byte{
|
||||
var fileDescriptor_references_4f3354422baff2a6 = []byte{
|
||||
// 851 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x41, 0x6f, 0x1b, 0x45,
|
||||
0x14, 0xd6, 0x78, 0x63, 0x37, 0x7e, 0x76, 0x12, 0xf7, 0x15, 0xc2, 0xca, 0x25, 0xd4, 0x6c, 0x9a,
|
||||
|
1262
vendor/google.golang.org/genproto/googleapis/genomics/v1/variants.pb.go
generated
vendored
1262
vendor/google.golang.org/genproto/googleapis/genomics/v1/variants.pb.go
generated
vendored
File diff suppressed because it is too large
Load Diff
954
vendor/google.golang.org/genproto/googleapis/genomics/v1alpha2/pipelines.pb.go
generated
vendored
954
vendor/google.golang.org/genproto/googleapis/genomics/v1alpha2/pipelines.pb.go
generated
vendored
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user