Synchronize new proto changes.

This commit is contained in:
Google APIs 2018-01-31 21:06:38 -08:00
parent 143084a262
commit 62273f28e1
3 changed files with 21 additions and 31 deletions

View File

@ -16,16 +16,4 @@ authentication:
- selector: '*'
oauth:
canonical_scopes: |-
https://www.googleapis.com/auth/bigquery,
https://www.googleapis.com/auth/cloud-platform,
https://www.googleapis.com/auth/cloud-platform.read-only
- selector: |-
google.cloud.bigquery.datatransfer.v1.DataTransferService.CreateTransferConfig,
google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferConfig,
google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferRun,
google.cloud.bigquery.datatransfer.v1.DataTransferService.ScheduleTransferRuns,
google.cloud.bigquery.datatransfer.v1.DataTransferService.UpdateTransferConfig
oauth:
canonical_scopes: |-
https://www.googleapis.com/auth/bigquery,
https://www.googleapis.com/auth/cloud-platform

View File

@ -1,4 +1,4 @@
// Copyright 2017 Google Inc.
// Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -97,7 +97,7 @@ service DataTransferService {
};
}
// Creates transfer runs for a time range [range_start_time, range_end_time].
// Creates transfer runs for a time range [start_time, end_time].
// For each date - or whatever granularity the data source supports - in the
// range, one transfer run is created.
// Note that runs are created per UTC time in the time range.
@ -339,7 +339,7 @@ message DataSource {
// for the data source.
bool manual_runs_disabled = 17;
// The minimum interval between two consecutive scheduled runs.
// The minimum interval for scheduler to schedule runs.
google.protobuf.Duration minimum_schedule_interval = 18;
}
@ -388,10 +388,8 @@ message ListDataSourcesResponse {
message CreateTransferConfigRequest {
// The BigQuery project id where the transfer configuration should be created.
// Must be in the format /projects/{project_id}/locations/{location_id}
// or
// /projects/{project_id}/locations/-
// In case when '-' is specified as location_id, location is infered from
// the destination dataset region.
// If specified location and location of the destination bigquery dataset
// do not match - the request will fail.
string parent = 1;
// Data transfer configuration to create.

View File

@ -1,4 +1,4 @@
// Copyright 2017 Google Inc.
// Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -35,6 +35,7 @@ option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1";
// When a new transfer configuration is created, the specified
// `destination_dataset_id` is created when needed and shared with the
// appropriate data source service account.
// Next id: 20
message TransferConfig {
// The resource name of the transfer config.
// Transfer config names have the form
@ -94,6 +95,8 @@ message TransferConfig {
// Output only. Unique ID of the user on whose behalf transfer is done.
// Applicable only to data sources that do not support service accounts.
// When set to 0, the data source service account credentials are used.
// May be negative. Note, that this identifier is not stable.
// It may change over time even for the same user.
int64 user_id = 11;
// Output only. Region in which BigQuery dataset is located.
@ -101,7 +104,7 @@ message TransferConfig {
}
// Represents a data transfer run.
// Next id: 23
// Next id: 27
message TransferRun {
// The resource name of the transfer run.
// Transfer run names have the form
@ -109,19 +112,16 @@ message TransferRun {
// The name is ignored when creating a transfer run.
string name = 1;
// The BigQuery target dataset id.
string destination_dataset_id = 2;
// Minimum time after which a transfer run can be started.
google.protobuf.Timestamp schedule_time = 3;
// Data transfer specific parameters.
google.protobuf.Struct params = 9;
// For batch transfer runs, specifies the date and time that
// data should be ingested.
google.protobuf.Timestamp run_time = 10;
// Status of the transfer run.
google.rpc.Status error_status = 21;
// Output only. Time when transfer run was started.
// Parameter ignored by server for input requests.
google.protobuf.Timestamp start_time = 4;
@ -133,6 +133,12 @@ message TransferRun {
// Output only. Last time the data transfer run state was updated.
google.protobuf.Timestamp update_time = 6;
// Output only. Data transfer specific parameters.
google.protobuf.Struct params = 9;
// Output only. The BigQuery target dataset id.
string destination_dataset_id = 2;
// Output only. Data source id.
string data_source_id = 7;
@ -142,7 +148,8 @@ message TransferRun {
// Output only. Unique ID of the user on whose behalf transfer is done.
// Applicable only to data sources that do not support service accounts.
// When set to 0, the data source service account credentials are used.
// May be negative.
// May be negative. Note, that this identifier is not stable.
// It may change over time even for the same user.
int64 user_id = 11;
// Output only. Describes the schedule of this transfer run if it was
@ -198,9 +205,6 @@ enum TransferState {
// State placeholder.
TRANSFER_STATE_UNSPECIFIED = 0;
// Data transfer is inactive.
INACTIVE = 1;
// Data transfer is scheduled and is waiting to be picked up by
// data transfer backend.
PENDING = 2;