Synchronize new proto changes.

This commit is contained in:
Google APIs 2017-11-08 13:06:31 -08:00
parent 761650c613
commit 112ffe43cc
8 changed files with 1399 additions and 5 deletions

View File

@ -0,0 +1,34 @@
common:
api_name: bigquerydatatransfer
api_version: v1
organization_name: google-cloud
proto_deps:
- name: google-common-protos
src_proto_paths:
- v1
service_yaml: datatransfer.yaml
gapic_yaml: v1/bigquerydatatransfer_gapic.yaml
artifacts:
- name: gapic_config
type: GAPIC_CONFIG
- name: java_gapic
type: GAPIC
language: JAVA
- name: python_gapic
type: GAPIC
language: PYTHON
- name: nodejs_gapic
type: GAPIC
language: NODEJS
- name: php_gapic
type: GAPIC
language: PHP
- name: go_gapic
type: GAPIC
language: GO
- name: ruby_gapic
type: GAPIC
language: RUBY
- name: csharp_gapic
type: GAPIC
language: CSHARP

View File

@ -0,0 +1,31 @@
type: google.api.Service
config_version: 3
name: bigquerydatatransfer.googleapis.com
title: BigQuery Data Transfer API
apis:
- name: google.cloud.bigquery.datatransfer.v1.DataTransferService
documentation:
summary: |-
Transfers data from partner SaaS applications to Google BigQuery on a
scheduled, managed basis.
authentication:
rules:
- selector: '*'
oauth:
canonical_scopes: |-
https://www.googleapis.com/auth/bigquery,
https://www.googleapis.com/auth/cloud-platform,
https://www.googleapis.com/auth/cloud-platform.read-only
- selector: |-
google.cloud.bigquery.datatransfer.v1.DataTransferService.CreateTransferConfig,
google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferConfig,
google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferRun,
google.cloud.bigquery.datatransfer.v1.DataTransferService.ScheduleTransferRuns,
google.cloud.bigquery.datatransfer.v1.DataTransferService.UpdateTransferConfig
oauth:
canonical_scopes: |-
https://www.googleapis.com/auth/bigquery,
https://www.googleapis.com/auth/cloud-platform

View File

@ -0,0 +1,411 @@
type: com.google.api.codegen.ConfigProto
config_schema_version: 1.0.0
# The settings of generated code in a specific language.
language_settings:
java:
package_name: com.google.cloud.bigquery.datatransfer.v1
python:
package_name: google.cloud.bigquery.datatransfer_v1.gapic
go:
package_name: cloud.google.com/go/cloud/bigquery/datatransfer/apiv1
csharp:
package_name: Google.Cloud.Bigquery.Datatransfer.V1
ruby:
package_name: Google::Cloud::Bigquery::Datatransfer::V1
php:
package_name: Google\Cloud\Bigquery\Datatransfer\V1
nodejs:
package_name: datatransfer.v1
# The configuration for the license header to put on generated files.
license_header:
# The file containing the copyright line(s).
copyright_file: copyright-google.txt
# The file containing the raw license header without any copyright line(s).
license_file: license-header-apache-2.0.txt
collection_oneofs:
- oneof_name: parent_oneof
collection_names:
- location
- project
- oneof_name: data_source_oneof
collection_names:
- location_data_source
- data_source
- oneof_name: transfer_config_oneof
collection_names:
- location_transfer_config
- transfer_config
- oneof_name: run_oneof
collection_names:
- location_run
- run
# A list of API interface configurations.
interfaces:
# The fully qualified name of the API interface.
- name: google.cloud.bigquery.datatransfer.v1.DataTransferService
# A list of resource collection configurations.
# Consists of a name_pattern and an entity_name.
# The name_pattern is a pattern to describe the names of the resources of this
# collection, using the platform's conventions for URI patterns. A generator
# may use this to generate methods to compose and decompose such names. The
# pattern should use named placeholders as in `shelves/{shelf}/books/{book}`;
# those will be taken as hints for the parameter names of the generated
# methods. If empty, no name methods are generated.
# The entity_name is the name to be used as a basis for generated methods and
# classes.
collections:
- name_pattern: projects/{project}
entity_name: project
- name_pattern: projects/{project}/locations/{location}
entity_name: location
- name_pattern: projects/{project}/locations/{location}/dataSources/{data_source}
entity_name: location_data_source
- name_pattern: projects/{project}/locations/{location}/transferConfigs/{transfer_config}
entity_name: location_transfer_config
- name_pattern: projects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}
entity_name: location_run
- name_pattern: projects/{project}/dataSources/{data_source}
entity_name: data_source
- name_pattern: projects/{project}/transferConfigs/{transfer_config}
entity_name: transfer_config
- name_pattern: projects/{project}/transferConfigs/{transfer_config}/runs/{run}
entity_name: run
# Definition for retryable codes.
retry_codes_def:
- name: idempotent
retry_codes:
- UNAVAILABLE
- DEADLINE_EXCEEDED
- name: non_idempotent
retry_codes: []
# Definition for retry/backoff parameters.
retry_params_def:
- name: default
initial_retry_delay_millis: 100
retry_delay_multiplier: 1.3
max_retry_delay_millis: 60000
initial_rpc_timeout_millis: 20000
rpc_timeout_multiplier: 1
max_rpc_timeout_millis: 20000
total_timeout_millis: 600000
# A list of method configurations.
# Common properties:
# name - The simple name of the method.
# flattening - Specifies the configuration for parameter flattening.
# Describes the parameter groups for which a generator should produce
# method overloads which allow a client to directly pass request message
# fields as method parameters. This information may or may not be used,
# depending on the target language.
# Consists of groups, which each represent a list of parameters to be
# flattened. Each parameter listed must be a field of the request
# message.
# required_fields - Fields that are always required for a request to be
# valid.
# request_object_method - Turns on or off the generation of a method whose
# sole parameter is a request object. Not all languages will generate
# this method.
# resource_name_treatment - An enum that specifies how to treat the
# resource name formats defined in the field_name_patterns
# and response_field_name_patterns fields.
# UNSET: default value
# NONE: the collection configs will not be used by the generated code.
# VALIDATE: string fields will be validated by the client against the
# specified resource name formats.
# STATIC_TYPES: the client will use generated types for resource names.
# page_streaming - Specifies the configuration for paging.
# Describes information for generating a method which transforms a
# paging list RPC into a stream of resources.
# Consists of a request and a response.
# The request specifies request information of the list method. It
# defines which fields match the paging pattern in the request. The
# request consists of a page_size_field and a token_field. The
# page_size_field is the name of the optional field specifying the
# maximum number of elements to be returned in the response. The
# token_field is the name of the field in the request containing the
# page token.
# The response specifies response information of the list method. It
# defines which fields match the paging pattern in the response. The
# response consists of a token_field and a resources_field. The
# token_field is the name of the field in the response containing the
# next page token. The resources_field is the name of the field in the
# response containing the list of resources belonging to the page.
# retry_codes_name - Specifies the configuration for retryable codes. The
# name must be defined in interfaces.retry_codes_def.
# retry_params_name - Specifies the configuration for retry/backoff
# parameters. The name must be defined in interfaces.retry_params_def.
# field_name_patterns - Maps the field name of the request type to
# entity_name of interfaces.collections.
# Specifies the string pattern that the field must follow.
# timeout_millis - Specifies the default timeout for a non-retrying call. If
# the call is retrying, refer to retry_params_name instead.
methods:
- name: GetDataSource
flattening:
groups:
- parameters:
- name
required_fields:
- name
request_object_method: false
retry_codes_name: idempotent
retry_params_name: default
field_name_patterns:
name: data_source_oneof
timeout_millis: 20000
resource_name_treatment: STATIC_TYPES
- name: ListDataSources
flattening:
groups:
- parameters:
- parent
required_fields:
- parent
request_object_method: true
page_streaming:
request:
page_size_field: page_size
token_field: page_token
response:
token_field: next_page_token
resources_field: data_sources
retry_codes_name: idempotent
retry_params_name: default
field_name_patterns:
parent: parent_oneof
timeout_millis: 20000
resource_name_treatment: STATIC_TYPES
- name: CreateTransferConfig
flattening:
groups:
- parameters:
- parent
- transfer_config
- authorization_code
required_fields:
- parent
- transfer_config
request_object_method: true
retry_codes_name: non_idempotent
retry_params_name: default
field_name_patterns:
parent: parent_oneof
timeout_millis: 30000
resource_name_treatment: STATIC_TYPES
- name: UpdateTransferConfig
flattening:
groups:
- parameters:
- transfer_config
- authorization_code
- update_mask
required_fields:
- transfer_config
- update_mask
request_object_method: true
retry_codes_name: non_idempotent
retry_params_name: default
field_name_patterns:
transfer_config.name: transfer_config_oneof
timeout_millis: 30000
resource_name_treatment: STATIC_TYPES
- name: DeleteTransferConfig
flattening:
groups:
- parameters:
- name
required_fields:
- name
request_object_method: false
retry_codes_name: idempotent
retry_params_name: default
field_name_patterns:
name: transfer_config_oneof
timeout_millis: 30000
resource_name_treatment: STATIC_TYPES
- name: GetTransferConfig
flattening:
groups:
- parameters:
- name
required_fields:
- name
request_object_method: false
retry_codes_name: idempotent
retry_params_name: default
field_name_patterns:
name: transfer_config_oneof
timeout_millis: 30000
resource_name_treatment: STATIC_TYPES
- name: ListTransferConfigs
flattening:
groups:
- parameters:
- parent
- data_source_ids
required_fields:
- parent
request_object_method: true
page_streaming:
request:
page_size_field: page_size
token_field: page_token
response:
token_field: next_page_token
resources_field: transfer_configs
retry_codes_name: idempotent
retry_params_name: default
field_name_patterns:
parent: parent_oneof
timeout_millis: 30000
resource_name_treatment: STATIC_TYPES
- name: ScheduleTransferRuns
flattening:
groups:
- parameters:
- parent
- start_time
- end_time
required_fields:
- parent
- start_time
- end_time
request_object_method: true
retry_codes_name: non_idempotent
retry_params_name: default
field_name_patterns:
parent: transfer_config_oneof
timeout_millis: 30000
resource_name_treatment: STATIC_TYPES
- name: GetTransferRun
flattening:
groups:
- parameters:
- name
required_fields:
- name
request_object_method: false
retry_codes_name: idempotent
retry_params_name: default
field_name_patterns:
name: run_oneof
timeout_millis: 30000
resource_name_treatment: STATIC_TYPES
- name: DeleteTransferRun
flattening:
groups:
- parameters:
- name
required_fields:
- name
request_object_method: false
retry_codes_name: idempotent
retry_params_name: default
field_name_patterns:
name: run_oneof
timeout_millis: 30000
resource_name_treatment: STATIC_TYPES
- name: ListTransferRuns
flattening:
groups:
- parameters:
- parent
- states
- run_attempt
required_fields:
- parent
request_object_method: true
page_streaming:
request:
page_size_field: page_size
token_field: page_token
response:
token_field: next_page_token
resources_field: transfer_runs
retry_codes_name: idempotent
retry_params_name: default
field_name_patterns:
parent: transfer_config_oneof
timeout_millis: 30000
resource_name_treatment: STATIC_TYPES
- name: ListTransferLogs
flattening:
groups:
- parameters:
- parent
- message_types
required_fields:
- parent
request_object_method: true
page_streaming:
request:
page_size_field: page_size
token_field: page_token
response:
token_field: next_page_token
resources_field: transfer_messages
retry_codes_name: idempotent
retry_params_name: default
field_name_patterns:
parent: run_oneof
timeout_millis: 30000
resource_name_treatment: STATIC_TYPES
- name: CheckValidCreds
flattening:
groups:
- parameters:
- name
required_fields:
- name
request_object_method: false
retry_codes_name: idempotent
retry_params_name: default
field_name_patterns:
name: data_source_oneof
timeout_millis: 30000
resource_name_treatment: STATIC_TYPES
resource_name_generation:
- message_name: GetDataSourceRequest
field_entity_map:
name: data_source_oneof
- message_name: ListDataSourcesRequest
field_entity_map:
parent: parent_oneof
- message_name: CreateTransferConfigRequest
field_entity_map:
parent: parent_oneof
- message_name: GetTransferConfigRequest
field_entity_map:
name: transfer_config_oneof
- message_name: DeleteTransferConfigRequest
field_entity_map:
name: transfer_config_oneof
- message_name: GetTransferRunRequest
field_entity_map:
name: run_oneof
- message_name: DeleteTransferRunRequest
field_entity_map:
name: run_oneof
- message_name: ListTransferConfigsRequest
field_entity_map:
parent: parent_oneof
- message_name: ListTransferRunsRequest
field_entity_map:
parent: transfer_config_oneof
- message_name: ListTransferLogsRequest
field_entity_map:
parent: run_oneof
- message_name: CheckValidCredsRequest
field_entity_map:
name: data_source_oneof
- message_name: ScheduleTransferRunsRequest
field_entity_map:
parent: transfer_config_oneof
- message_name: DataSource
field_entity_map:
name: data_source_oneof
- message_name: TransferConfig
field_entity_map:
name: transfer_config_oneof
- message_name: TransferRun
field_entity_map:
name: run_oneof

View File

@ -0,0 +1,623 @@
// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.cloud.bigquery.datatransfer.v1;
import "google/api/annotations.proto";
import "google/cloud/bigquery/datatransfer/v1/transfer.proto";
import "google/protobuf/duration.proto";
import "google/protobuf/empty.proto";
import "google/protobuf/field_mask.proto";
import "google/protobuf/timestamp.proto";
import "google/protobuf/wrappers.proto";
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer";
option java_multiple_files = true;
option java_outer_classname = "DataTransferProto";
option java_package = "com.google.cloud.bigquery.datatransfer.v1";
// The Google BigQuery Data Transfer Service API enables BigQuery users to
// configure the transfer of their data from other Google Products into BigQuery.
// This service contains methods that are end user exposed. It backs up the
// frontend.
service DataTransferService {
// Retrieves a supported data source and returns its settings,
// which can be used for UI rendering.
rpc GetDataSource(GetDataSourceRequest) returns (DataSource) {
option (google.api.http) = {
get: "/v1/{name=projects/*/locations/*/dataSources/*}"
};
}
// Lists supported data sources and returns their settings,
// which can be used for UI rendering.
rpc ListDataSources(ListDataSourcesRequest)
returns (ListDataSourcesResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*}/dataSources"
};
}
// Creates a new data transfer configuration.
rpc CreateTransferConfig(CreateTransferConfigRequest)
returns (TransferConfig) {
option (google.api.http) = {
post: "/v1/{parent=projects/*/locations/*}/transferConfigs"
body: "transfer_config"
};
}
// Updates a data transfer configuration.
// All fields must be set, even if they are not updated.
rpc UpdateTransferConfig(UpdateTransferConfigRequest)
returns (TransferConfig) {
option (google.api.http) = {
patch:
"/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}"
body: "transfer_config"
};
}
// Deletes a data transfer configuration,
// including any associated transfer runs and logs.
rpc DeleteTransferConfig(DeleteTransferConfigRequest)
returns (google.protobuf.Empty) {
option (google.api.http) = {
delete: "/v1/{name=projects/*/locations/*/transferConfigs/*}"
};
}
// Returns information about a data transfer config.
rpc GetTransferConfig(GetTransferConfigRequest) returns (TransferConfig) {
option (google.api.http) = {
get: "/v1/{name=projects/*/locations/*/transferConfigs/*}"
};
}
// Returns information about all data transfers in the project.
rpc ListTransferConfigs(ListTransferConfigsRequest)
returns (ListTransferConfigsResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*}/transferConfigs"
};
}
// Creates transfer runs for a time range [range_start_time, range_end_time].
// For each date - or whatever granularity the data source supports - in the
// range, one transfer run is created.
// Note that runs are created per UTC time in the time range.
rpc ScheduleTransferRuns(ScheduleTransferRunsRequest)
returns (ScheduleTransferRunsResponse) {
option (google.api.http) = {
post: "/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns"
body: "*"
};
}
// Returns information about the particular transfer run.
rpc GetTransferRun(GetTransferRunRequest) returns (TransferRun) {
option (google.api.http) = {
get: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}"
};
}
// Deletes the specified transfer run.
rpc DeleteTransferRun(DeleteTransferRunRequest)
returns (google.protobuf.Empty) {
option (google.api.http) = {
delete: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}"
};
}
// Returns information about running and completed jobs.
rpc ListTransferRuns(ListTransferRunsRequest)
returns (ListTransferRunsResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*/transferConfigs/*}/runs"
};
}
// Returns user facing log messages for the data transfer run.
rpc ListTransferLogs(ListTransferLogsRequest)
returns (ListTransferLogsResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/"
"transferLogs"
};
}
// Returns true if valid credentials exist for the given data source and
// requesting user.
// Some data sources doesn't support service account, so we need to talk to
// them on behalf of the end user. This API just checks whether we have OAuth
// token for the particular user, which is a pre-requisite before user can
// create a transfer config.
rpc CheckValidCreds(CheckValidCredsRequest)
returns (CheckValidCredsResponse) {
option (google.api.http) = {
post: "/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds"
body: "*"
};
}
}
// Represents a data source parameter with validation rules, so that
// parameters can be rendered in the UI. These parameters are given to us by
// supported data sources, and include all needed information for rendering
// and validation.
// Thus, whoever uses this api can decide to generate either generic ui,
// or custom data source specific forms.
message DataSourceParameter {
// Parameter type.
enum Type {
// Type unspecified.
TYPE_UNSPECIFIED = 0;
// String parameter.
STRING = 1;
// Integer parameter (64-bits).
// Will be serialized to json as string.
INTEGER = 2;
// Double precision floating point parameter.
DOUBLE = 3;
// Boolean parameter.
BOOLEAN = 4;
// Record parameter.
RECORD = 5;
// Page ID for a Google+ Page.
PLUS_PAGE = 6;
}
// Parameter identifier.
string param_id = 1;
// Parameter display name in the user interface.
string display_name = 2;
// Parameter description.
string description = 3;
// Parameter type.
Type type = 4;
// Is parameter required.
bool required = 5;
// Can parameter have multiple values.
bool repeated = 6;
// Regular expression which can be used for parameter validation.
string validation_regex = 7;
// All possible values for the parameter.
repeated string allowed_values = 8;
// For integer and double values specifies minimum allowed value.
google.protobuf.DoubleValue min_value = 9;
// For integer and double values specifies maxminum allowed value.
google.protobuf.DoubleValue max_value = 10;
// When parameter is a record, describes child fields.
repeated DataSourceParameter fields = 11;
// Description of the requirements for this field, in case the user input does
// not fulfill the regex pattern or min/max values.
string validation_description = 12;
// URL to a help document to further explain the naming requirements.
string validation_help_url = 13;
// Cannot be changed after initial creation.
bool immutable = 14;
// If set to true, schema should be taken from the parent with the same
// parameter_id. Only applicable when parameter type is RECORD.
bool recurse = 15;
}
// Represents data source metadata. Metadata is sufficient to
// render UI and request proper OAuth tokens.
message DataSource {
// The type of authorization needed for this data source.
enum AuthorizationType {
// Type unspecified.
AUTHORIZATION_TYPE_UNSPECIFIED = 0;
// Use OAuth 2 authorization codes that can be exchanged
// for a refresh token on the backend.
AUTHORIZATION_CODE = 1;
// Return an authorization code for a given Google+ page that can then be
// exchanged for a refresh token on the backend.
GOOGLE_PLUS_AUTHORIZATION_CODE = 2;
}
// Represents how the data source supports data auto refresh.
enum DataRefreshType {
// The data source won't support data auto refresh, which is default value.
DATA_REFRESH_TYPE_UNSPECIFIED = 0;
// The data source supports data auto refresh, and runs will be scheduled
// for the past few days. Does not allow custom values to be set for each
// transfer config.
SLIDING_WINDOW = 1;
// The data source supports data auto refresh, and runs will be scheduled
// for the past few days. Allows custom values to be set for each transfer
// config.
CUSTOM_SLIDING_WINDOW = 2;
}
// Data source resource name.
string name = 1;
// Data source id.
string data_source_id = 2;
// User friendly data source name.
string display_name = 3;
// User friendly data source description string.
string description = 4;
// Data source client id which should be used to receive refresh token.
// When not supplied, no offline credentials are populated for data transfer.
string client_id = 5;
// Api auth scopes for which refresh token needs to be obtained. Only valid
// when `client_id` is specified. Ignored otherwise. These are scopes needed
// by a data source to prepare data and ingest them into BigQuery,
// e.g., https://www.googleapis.com/auth/bigquery
repeated string scopes = 6;
// Transfer type. Currently supports only batch transfers,
// which are transfers that use the BigQuery batch APIs (load or
// query) to ingest the data.
TransferType transfer_type = 7;
// Indicates whether the data source supports multiple transfers
// to different BigQuery targets.
bool supports_multiple_transfers = 8;
// The number of seconds to wait for an update from the data source
// before BigQuery marks the transfer as failed.
int32 update_deadline_seconds = 9;
// Default data transfer schedule.
// Examples of valid schedules include:
// `1st,3rd monday of month 15:30`,
// `every wed,fri of jan,jun 13:15`, and
// `first sunday of quarter 00:00`.
string default_schedule = 10;
// Specifies whether the data source supports a user defined schedule, or
// operates on the default schedule.
// When set to `true`, user can override default schedule.
bool supports_custom_schedule = 11;
// Data source parameters.
repeated DataSourceParameter parameters = 12;
// Url for the help document for this data source.
string help_url = 13;
// Indicates the type of authorization.
AuthorizationType authorization_type = 14;
// Specifies whether the data source supports automatic data refresh for the
// past few days, and how it's supported.
// For some data sources, data might not be complete until a few days later,
// so it's useful to refresh data automatically.
DataRefreshType data_refresh_type = 15;
// Default data refresh window on days.
// Only meaningful when `data_refresh_type` = `SLIDING_WINDOW`.
int32 default_data_refresh_window_days = 16;
// Disables backfilling and manual run scheduling
// for the data source.
bool manual_runs_disabled = 17;
// The minimum interval between two consecutive scheduled runs.
google.protobuf.Duration minimum_schedule_interval = 18;
}
// A request to get data source info.
message GetDataSourceRequest {
// The field will contain name of the resource requested, for example:
// `projects/{project_id}/dataSources/{data_source_id}`
string name = 1;
}
// Request to list supported data sources and their data transfer settings.
message ListDataSourcesRequest {
// The BigQuery project id for which data sources should be returned.
// Must be in the form: `projects/{project_id}`
string parent = 1;
// Pagination token, which can be used to request a specific page
// of `ListDataSourcesRequest` list results. For multiple-page
// results, `ListDataSourcesResponse` outputs
// a `next_page` token, which can be used as the
// `page_token` value to request the next page of list results.
string page_token = 3;
// Page size. The default page size is the maximum value of 1000 results.
int32 page_size = 4;
}
// Returns list of supported data sources and their metadata.
message ListDataSourcesResponse {
// List of supported data sources and their transfer settings.
repeated DataSource data_sources = 1;
// Output only. The next-pagination token. For multiple-page list results,
// this token can be used as the
// `ListDataSourcesRequest.page_token`
// to request the next page of list results.
string next_page_token = 2;
}
// A request to create a data transfer configuration. If new credentials are
// needed for this transfer configuration, an authorization code must be
// provided. If an authorization code is provided, the transfer configuration
// will be associated with the user id corresponding to the
// authorization code. Otherwise, the transfer configuration will be associated
// with the calling user.
message CreateTransferConfigRequest {
// The BigQuery project id where the transfer configuration should be created.
// Must be in the format /projects/{project_id}/locations/{location_id}
// or
// /projects/{project_id}/locations/-
// In case when '-' is specified as location_id, location is infered from
// the destination dataset region.
string parent = 1;
// Data transfer configuration to create.
TransferConfig transfer_config = 2;
// Optional OAuth2 authorization code to use with this transfer configuration.
// This is required if new credentials are needed, as indicated by
// `CheckValidCreds`.
// In order to obtain authorization_code, please make a
// request to
// https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
//
// * client_id should be OAuth client_id of BigQuery DTS API for the given
// data source returned by ListDataSources method.
// * data_source_scopes are the scopes returned by ListDataSources method.
// * redirect_uri is an optional parameter. If not specified, then
// authorization code is posted to the opener of authorization flow window.
// Otherwise it will be sent to the redirect uri. A special value of
// urn:ietf:wg:oauth:2.0:oob means that authorization code should be
// returned in the title bar of the browser, with the page text prompting
// the user to copy the code and paste it in the application.
string authorization_code = 3;
}
// A request to update a transfer configuration. To update the user id of the
// transfer configuration, an authorization code needs to be provided.
message UpdateTransferConfigRequest {
// Data transfer configuration to create.
TransferConfig transfer_config = 1;
// Optional OAuth2 authorization code to use with this transfer configuration.
// If it is provided, the transfer configuration will be associated with the
// authorizing user.
// In order to obtain authorization_code, please make a
// request to
// https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
//
// * client_id should be OAuth client_id of BigQuery DTS API for the given
// data source returned by ListDataSources method.
// * data_source_scopes are the scopes returned by ListDataSources method.
// * redirect_uri is an optional parameter. If not specified, then
// authorization code is posted to the opener of authorization flow window.
// Otherwise it will be sent to the redirect uri. A special value of
// urn:ietf:wg:oauth:2.0:oob means that authorization code should be
// returned in the title bar of the browser, with the page text prompting
// the user to copy the code and paste it in the application.
string authorization_code = 3;
// Required list of fields to be updated in this request.
google.protobuf.FieldMask update_mask = 4;
}
// A request to get data transfer information.
message GetTransferConfigRequest {
// The field will contain name of the resource requested, for example:
// `projects/{project_id}/transferConfigs/{config_id}`
string name = 1;
}
// A request to delete data transfer information. All associated transfer runs
// and log messages will be deleted as well.
message DeleteTransferConfigRequest {
// The field will contain name of the resource requested, for example:
// `projects/{project_id}/transferConfigs/{config_id}`
string name = 1;
}
// A request to get data transfer run information.
message GetTransferRunRequest {
// The field will contain name of the resource requested, for example:
// `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`
string name = 1;
}
// A request to delete data transfer run information.
message DeleteTransferRunRequest {
// The field will contain name of the resource requested, for example:
// `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`
string name = 1;
}
// A request to list data transfers configured for a BigQuery project.
message ListTransferConfigsRequest {
// The BigQuery project id for which data sources
// should be returned: `projects/{project_id}`.
string parent = 1;
// When specified, only configurations of requested data sources are returned.
repeated string data_source_ids = 2;
// Pagination token, which can be used to request a specific page
// of `ListTransfersRequest` list results. For multiple-page
// results, `ListTransfersResponse` outputs
// a `next_page` token, which can be used as the
// `page_token` value to request the next page of list results.
string page_token = 3;
// Page size. The default page size is the maximum value of 1000 results.
int32 page_size = 4;
}
// The returned list of pipelines in the project.
message ListTransferConfigsResponse {
// Output only. The stored pipeline transfer configurations.
repeated TransferConfig transfer_configs = 1;
// Output only. The next-pagination token. For multiple-page list results,
// this token can be used as the
// `ListTransferConfigsRequest.page_token`
// to request the next page of list results.
string next_page_token = 2;
}
// A request to list data transfer runs. UI can use this method to show/filter
// specific data transfer runs. The data source can use this method to request
// all scheduled transfer runs.
message ListTransferRunsRequest {
// Represents which runs should be pulled.
enum RunAttempt {
// All runs should be returned.
RUN_ATTEMPT_UNSPECIFIED = 0;
// Only latest run per day should be returned.
LATEST = 1;
}
// Name of transfer configuration for which transfer runs should be retrieved.
// Format of transfer configuration resource name is:
// `projects/{project_id}/transferConfigs/{config_id}`.
string parent = 1;
// When specified, only transfer runs with requested states are returned.
repeated TransferState states = 2;
// Pagination token, which can be used to request a specific page
// of `ListTransferRunsRequest` list results. For multiple-page
// results, `ListTransferRunsResponse` outputs
// a `next_page` token, which can be used as the
// `page_token` value to request the next page of list results.
string page_token = 3;
// Page size. The default page size is the maximum value of 1000 results.
int32 page_size = 4;
// Indicates how run attempts are to be pulled.
RunAttempt run_attempt = 5;
}
// The returned list of pipelines in the project.
message ListTransferRunsResponse {
// Output only. The stored pipeline transfer runs.
repeated TransferRun transfer_runs = 1;
// Output only. The next-pagination token. For multiple-page list results,
// this token can be used as the
// `ListTransferRunsRequest.page_token`
// to request the next page of list results.
string next_page_token = 2;
}
// A request to get user facing log messages associated with data transfer run.
message ListTransferLogsRequest {
// Transfer run name in the form:
// `projects/{project_id}/transferConfigs/{config_Id}/runs/{run_id}`.
string parent = 1;
// Pagination token, which can be used to request a specific page
// of `ListTransferLogsRequest` list results. For multiple-page
// results, `ListTransferLogsResponse` outputs
// a `next_page` token, which can be used as the
// `page_token` value to request the next page of list results.
string page_token = 4;
// Page size. The default page size is the maximum value of 1000 results.
int32 page_size = 5;
// Message types to return. If not populated - INFO, WARNING and ERROR
// messages are returned.
repeated TransferMessage.MessageSeverity message_types = 6;
}
// The returned list transfer run messages.
message ListTransferLogsResponse {
// Output only. The stored pipeline transfer messages.
repeated TransferMessage transfer_messages = 1;
// Output only. The next-pagination token. For multiple-page list results,
// this token can be used as the
// `GetTransferRunLogRequest.page_token`
// to request the next page of list results.
string next_page_token = 2;
}
// A request to determine whether the user has valid credentials. This method
// is used to limit the number of OAuth popups in the user interface. The
// user id is inferred from the API call context.
// If the data source has the Google+ authorization type, this method
// returns false, as it cannot be determined whether the credentials are
// already valid merely based on the user id.
message CheckValidCredsRequest {
// The data source in the form:
// `projects/{project_id}/dataSources/{data_source_id}`
string name = 1;
}
// A response indicating whether the credentials exist and are valid.
message CheckValidCredsResponse {
// If set to `true`, the credentials exist and are valid.
bool has_valid_creds = 1;
}
// A request to schedule transfer runs for a time range.
message ScheduleTransferRunsRequest {
// Transfer configuration name in the form:
// `projects/{project_id}/transferConfigs/{config_id}`.
string parent = 1;
// Start time of the range of transfer runs. For example,
// `"2017-05-25T00:00:00+00:00"`.
google.protobuf.Timestamp start_time = 2;
// End time of the range of transfer runs. For example,
// `"2017-05-30T00:00:00+00:00"`.
google.protobuf.Timestamp end_time = 3;
}
// A response to schedule transfer runs for a time range.
message ScheduleTransferRunsResponse {
// The transfer runs that were scheduled.
repeated TransferRun runs = 1;
}

View File

@ -0,0 +1,218 @@
// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.cloud.bigquery.datatransfer.v1;
import "google/api/annotations.proto";
import "google/protobuf/struct.proto";
import "google/protobuf/timestamp.proto";
import "google/rpc/status.proto";
option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1";
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer";
option java_multiple_files = true;
option java_outer_classname = "TransferProto";
option java_package = "com.google.cloud.bigquery.datatransfer.v1";
option objc_class_prefix = "GCBDT";
// Represents a data transfer configuration. A transfer configuration
// contains all metadata needed to perform a data transfer. For example,
// `destination_dataset_id` specifies where data should be stored.
// When a new transfer configuration is created, the specified
// `destination_dataset_id` is created when needed and shared with the
// appropriate data source service account.
message TransferConfig {
// The resource name of the transfer config.
// Transfer config names have the form
// `projects/{project_id}/transferConfigs/{config_id}`.
// Where `config_id` is usually a uuid, even though it is not
// guaranteed or required. The name is ignored when creating a transfer
// config.
string name = 1;
// The BigQuery target dataset id.
string destination_dataset_id = 2;
// User specified display name for the data transfer.
string display_name = 3;
// Data source id. Cannot be changed once data transfer is created.
string data_source_id = 5;
// Data transfer specific parameters.
google.protobuf.Struct params = 9;
// Data transfer schedule.
// If the data source does not support a custom schedule, this should be
// empty. If it is empty, the default value for the data source will be
// used.
// The specified times are in UTC.
// Examples of valid format:
// `1st,3rd monday of month 15:30`,
// `every wed,fri of jan,jun 13:15`, and
// `first sunday of quarter 00:00`.
// See more explanation about the format here:
// https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
// NOTE: the granularity should be at least 8 hours, or less frequent.
string schedule = 7;
// The number of days to look back to automatically refresh the data.
// For example, if `data_refresh_window_days = 10`, then every day
// BigQuery reingests data for [today-10, today-1], rather than ingesting data
// for just [today-1].
// Only valid if the data source supports the feature. Set the value to 0
// to use the default value.
int32 data_refresh_window_days = 12;
// Is this config disabled. When set to true, no runs are scheduled
// for a given transfer.
bool disabled = 13;
// Output only. Data transfer modification time. Ignored by server on input.
google.protobuf.Timestamp update_time = 4;
// Output only. Next time when data transfer will run.
google.protobuf.Timestamp next_run_time = 8;
// Output only. State of the most recently updated transfer run.
TransferState state = 10;
// Output only. Unique ID of the user on whose behalf transfer is done.
// Applicable only to data sources that do not support service accounts.
// When set to 0, the data source service account credentials are used.
int64 user_id = 11;
// Output only. Region in which BigQuery dataset is located.
string dataset_region = 14;
}
// Represents a data transfer run.
// Next id: 23
message TransferRun {
// The resource name of the transfer run.
// Transfer run names have the form
// `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`.
// The name is ignored when creating a transfer run.
string name = 1;
// The BigQuery target dataset id.
string destination_dataset_id = 2;
// Minimum time after which a transfer run can be started.
google.protobuf.Timestamp schedule_time = 3;
// Data transfer specific parameters.
google.protobuf.Struct params = 9;
// For batch transfer runs, specifies the date and time that
// data should be ingested.
google.protobuf.Timestamp run_time = 10;
// Output only. Time when transfer run was started.
// Parameter ignored by server for input requests.
google.protobuf.Timestamp start_time = 4;
// Output only. Time when transfer run ended.
// Parameter ignored by server for input requests.
google.protobuf.Timestamp end_time = 5;
// Output only. Last time the data transfer run state was updated.
google.protobuf.Timestamp update_time = 6;
// Output only. Data source id.
string data_source_id = 7;
// Data transfer run state. Ignored for input requests.
TransferState state = 8;
// Output only. Unique ID of the user on whose behalf transfer is done.
// Applicable only to data sources that do not support service accounts.
// When set to 0, the data source service account credentials are used.
// May be negative.
int64 user_id = 11;
// Output only. Describes the schedule of this transfer run if it was
// created as part of a regular schedule. For batch transfer runs that are
// scheduled manually, this is empty.
// NOTE: the system might choose to delay the schedule depending on the
// current load, so `schedule_time` doesn't always matches this.
string schedule = 12;
}
// Represents a user facing message for a particular data transfer run.
message TransferMessage {
// Represents data transfer user facing message severity.
enum MessageSeverity {
// No severity specified.
MESSAGE_SEVERITY_UNSPECIFIED = 0;
// Informational message.
INFO = 1;
// Warning message.
WARNING = 2;
// Error message.
ERROR = 3;
}
// Time when message was logged.
google.protobuf.Timestamp message_time = 1;
// Message severity.
MessageSeverity severity = 2;
// Message text.
string message_text = 3;
}
// Represents data transfer type.
enum TransferType {
// Invalid or Unknown transfer type placeholder.
TRANSFER_TYPE_UNSPECIFIED = 0;
// Batch data transfer.
BATCH = 1;
// Streaming data transfer. Streaming data source currently doesn't
// support multiple transfer configs per project.
STREAMING = 2;
}
// Represents data transfer run state.
enum TransferState {
// State placeholder.
TRANSFER_STATE_UNSPECIFIED = 0;
// Data transfer is inactive.
INACTIVE = 1;
// Data transfer is scheduled and is waiting to be picked up by
// data transfer backend.
PENDING = 2;
// Data transfer is in progress.
RUNNING = 3;
// Data transfer completed successsfully.
SUCCEEDED = 4;
// Data transfer failed.
FAILED = 5;
// Data transfer is cancelled.
CANCELLED = 6;
}

View File

@ -30,6 +30,33 @@ documentation:
- selector: google.longrunning.Operations.ListOperations
description: Fetches the list of long running operations.
backend:
rules:
- selector: google.longrunning.Operations.ListOperations
deadline: 300.0
- selector: google.longrunning.Operations.GetOperation
deadline: 300.0
- selector: google.longrunning.Operations.DeleteOperation
deadline: 300.0
- selector: google.longrunning.Operations.CancelOperation
deadline: 300.0
- selector: google.privacy.dlp.v2beta1.DlpService.InspectContent
deadline: 300.0
- selector: google.privacy.dlp.v2beta1.DlpService.RedactContent
deadline: 300.0
- selector: google.privacy.dlp.v2beta1.DlpService.DeidentifyContent
deadline: 300.0
- selector: google.privacy.dlp.v2beta1.DlpService.CreateInspectOperation
deadline: 300.0
- selector: google.privacy.dlp.v2beta1.DlpService.AnalyzeDataSourceRisk
deadline: 300.0
- selector: google.privacy.dlp.v2beta1.DlpService.ListInspectFindings
deadline: 300.0
- selector: google.privacy.dlp.v2beta1.DlpService.ListInfoTypes
deadline: 300.0
- selector: google.privacy.dlp.v2beta1.DlpService.ListRootCategories
deadline: 300.0
http:
rules:
- selector: google.longrunning.Operations.ListOperations

View File

@ -121,6 +121,9 @@ message InspectConfig {
// Configuration of findings limit given for specified info types.
repeated InfoTypeLimit info_type_limits = 7;
// Custom info types provided by the user.
repeated CustomInfoType custom_info_types = 8;
}
// Additional configuration for inspect long running operations.
@ -258,7 +261,7 @@ message RedactContentRequest {
string replace_with = 2;
}
// Configuration for determing how redaction of images should occur.
// Configuration for determining how redaction of images should occur.
message ImageRedactionConfig {
// Type of information to redact from images.
oneof target {
@ -985,13 +988,13 @@ message BucketingConfig {
repeated Bucket buckets = 1;
}
// Replaces an identifier with an surrogate using FPE with the FFX
// Replaces an identifier with a surrogate using FPE with the FFX
// mode of operation.
// The identifier must be representable by the US-ASCII character set.
// For a given crypto key and context, the same identifier will be
// replaced with the same surrogate.
// Note that a given identifier must be either the empty string or be at
// least two characters long.
// Identifiers must be at least two characters long.
// In the case that the identifier is the empty string, it will be skipped.
message CryptoReplaceFfxFpeConfig {
// These are commonly used subsets of the alphabet that the FFX mode
// natively supports. In the algorithm, the alphabet is selected using
@ -1171,7 +1174,7 @@ message RecordSuppression {
RecordCondition condition = 1;
}
// A condition for determing whether a transformation should be applied to
// A condition for determining whether a transformation should be applied to
// a field.
message RecordCondition {
// The field type of `value` and `field` do not need to match to be

View File

@ -32,6 +32,53 @@ message InfoType {
string name = 1;
}
// Custom information type provided by the user. Used to find domain-specific
// sensitive information configurable to the data in question.
message CustomInfoType {
// Custom information type based on a dictionary of words or phrases. This can
// be used to match sensitive information specific to the data, such as a list
// of employee IDs or job titles.
//
// Dictionary words are case-insensitive and all characters other than letters
// and digits in the unicode [Basic Multilingual
// Plane](https://en.wikipedia.org/wiki/Plane_%28Unicode%29#Basic_Multilingual_Plane)
// will be replaced with whitespace when scanning for matches, so the
// dictionary phrase "Sam Johnson" will match all three phrases "sam johnson",
// "Sam, Johnson", and "Sam (Johnson)". Additionally, the characters
// surrounding any match must be of a different type than the adjacent
// characters within the word, so letters must be next to non-letters and
// digits next to non-digits. For example, the dictionary word "jen" will
// match the first three letters of the text "jen123" but will return no
// matches for "jennifer".
//
// Dictionary words containing a large number of characters that are not
// letters or digits may result in unexpected findings because such characters
// are treated as whitespace.
message Dictionary {
// Message defining a list of words or phrases to search for in the data.
message WordList {
// Words or phrases defining the dictionary. The dictionary must contain
// at least one phrase and every phrase must contain at least 2 characters
// that are letters or digits. [required]
repeated string words = 1;
}
oneof source {
// List of words or phrases to search for.
WordList word_list = 1;
}
}
// Info type configuration. All custom info types must have configurations
// that do not conflict with built-in info types or other custom info types.
InfoType info_type = 1;
oneof type {
// Dictionary-based custom info type.
Dictionary dictionary = 2;
}
}
// General identifier of a data field in a storage service.
message FieldId {
// Name describing the field.