Class: Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client

Inherits:
Object
  • Object
show all
Includes:
Paths
Defined in:
lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb

Overview

Client for the DataTransferService service.

This API allows users to manage their data transfers into BigQuery.

Defined Under Namespace

Classes: Configuration

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Methods included from Paths

#data_source_path, #location_path, #project_path, #run_path, #transfer_config_path

Constructor Details

#initialize {|config| ... } ⇒ Client

Create a new DataTransferService client object.

Examples:


# Create a client using the default configuration
client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a client using a custom configuration
client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config|
  config.timeout = 10.0
end

Yields:

  • (config)

    Configure the DataTransferService client.

Yield Parameters:



179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 179

def initialize
  # These require statements are intentionally placed here to initialize
  # the gRPC module only when it's required.
  # See https://github.com/googleapis/toolkit/issues/446
  require "gapic/grpc"
  require "google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb"

  # Create the configuration object
  @config = Configuration.new Client.configure

  # Yield the configuration if needed
  yield @config if block_given?

  # Create credentials
  credentials = @config.credentials
  # Use self-signed JWT if the endpoint is unchanged from default,
  # but only if the default endpoint does not have a region prefix.
  enable_self_signed_jwt = @config.endpoint.nil? ||
                           (@config.endpoint == Configuration::DEFAULT_ENDPOINT &&
                           !@config.endpoint.split(".").first.include?("-"))
  credentials ||= Credentials.default scope: @config.scope,
                                      enable_self_signed_jwt: enable_self_signed_jwt
  if credentials.is_a?(::String) || credentials.is_a?(::Hash)
    credentials = Credentials.new credentials, scope: @config.scope
  end
  @quota_project_id = @config.quota_project
  @quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id

  @data_transfer_service_stub = ::Gapic::ServiceStub.new(
    ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Stub,
    credentials: credentials,
    endpoint: @config.endpoint,
    endpoint_template: DEFAULT_ENDPOINT_TEMPLATE,
    universe_domain: @config.universe_domain,
    channel_args: @config.channel_args,
    interceptors: @config.interceptors,
    channel_pool_config: @config.channel_pool
  )

  @location_client = Google::Cloud::Location::Locations::Client.new do |config|
    config.credentials = credentials
    config.quota_project = @quota_project_id
    config.endpoint = @data_transfer_service_stub.endpoint
    config.universe_domain = @data_transfer_service_stub.universe_domain
  end
end

Instance Attribute Details

#location_clientGoogle::Cloud::Location::Locations::Client (readonly)

Get the associated client for mix-in of the Locations.

Returns:

  • (Google::Cloud::Location::Locations::Client)


231
232
233
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 231

def location_client
  @location_client
end

Class Method Details

.configure {|config| ... } ⇒ Client::Configuration

Configure the DataTransferService Client class.

See Configuration for a description of the configuration fields.

Examples:


# Modify the configuration for all DataTransferService clients
::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.configure do |config|
  config.timeout = 10.0
end

Yields:

  • (config)

    Configure the Client client.

Yield Parameters:

Returns:



61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 61

def self.configure
  @configure ||= begin
    namespace = ["Google", "Cloud", "Bigquery", "DataTransfer", "V1"]
    parent_config = while namespace.any?
                      parent_name = namespace.join "::"
                      parent_const = const_get parent_name
                      break parent_const.configure if parent_const.respond_to? :configure
                      namespace.pop
                    end
    default_config = Client::Configuration.new parent_config

    default_config.rpcs.get_data_source.timeout = 20.0
    default_config.rpcs.get_data_source.retry_policy = {
      initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
    }

    default_config.rpcs.list_data_sources.timeout = 20.0
    default_config.rpcs.list_data_sources.retry_policy = {
      initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
    }

    default_config.rpcs.create_transfer_config.timeout = 30.0

    default_config.rpcs.update_transfer_config.timeout = 30.0

    default_config.rpcs.delete_transfer_config.timeout = 20.0
    default_config.rpcs.delete_transfer_config.retry_policy = {
      initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
    }

    default_config.rpcs.get_transfer_config.timeout = 20.0
    default_config.rpcs.get_transfer_config.retry_policy = {
      initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
    }

    default_config.rpcs.list_transfer_configs.timeout = 20.0
    default_config.rpcs.list_transfer_configs.retry_policy = {
      initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
    }

    default_config.rpcs.schedule_transfer_runs.timeout = 30.0

    default_config.rpcs.get_transfer_run.timeout = 20.0
    default_config.rpcs.get_transfer_run.retry_policy = {
      initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
    }

    default_config.rpcs.delete_transfer_run.timeout = 20.0
    default_config.rpcs.delete_transfer_run.retry_policy = {
      initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
    }

    default_config.rpcs.list_transfer_runs.timeout = 20.0
    default_config.rpcs.list_transfer_runs.retry_policy = {
      initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
    }

    default_config.rpcs.list_transfer_logs.timeout = 20.0
    default_config.rpcs.list_transfer_logs.retry_policy = {
      initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
    }

    default_config.rpcs.check_valid_creds.timeout = 20.0
    default_config.rpcs.check_valid_creds.retry_policy = {
      initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
    }

    default_config
  end
  yield @configure if block_given?
  @configure
end

Instance Method Details

#check_valid_creds(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse #check_valid_creds(name: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse

Returns true if valid credentials exist for the given data source and requesting user.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new

# Call the check_valid_creds method.
result = client.check_valid_creds request

# The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse.
p result

Overloads:

  • #check_valid_creds(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse

    Pass arguments to check_valid_creds via a request object, either of type CheckValidCredsRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #check_valid_creds(name: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse

    Pass arguments to check_valid_creds via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • name (::String) (defaults to: nil)

      Required. The data source in the form: projects/{project_id}/dataSources/{data_source_id} or projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}.

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 1589

def check_valid_creds request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.check_valid_creds..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.name
    header_params["name"] = request.name
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.check_valid_creds.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.check_valid_creds.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :check_valid_creds, request, options: options do |response, operation|
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#configure {|config| ... } ⇒ Client::Configuration

Configure the DataTransferService Client instance.

The configuration is set to the derived mode, meaning that values can be changed, but structural changes (adding new fields, etc.) are not allowed. Structural changes should be made on configure.

See Configuration for a description of the configuration fields.

Yields:

  • (config)

    Configure the Client client.

Yield Parameters:

Returns:



149
150
151
152
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 149

def configure
  yield @config if block_given?
  @config
end

#create_transfer_config(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig #create_transfer_config(parent: nil, transfer_config: nil, authorization_code: nil, version_info: nil, service_account_name: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig

Creates a new data transfer configuration.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new

# Call the create_transfer_config method.
result = client.create_transfer_config request

# The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.
p result

Overloads:

  • #create_transfer_config(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig

    Pass arguments to create_transfer_config via a request object, either of type CreateTransferConfigRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #create_transfer_config(parent: nil, transfer_config: nil, authorization_code: nil, version_info: nil, service_account_name: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig

    Pass arguments to create_transfer_config via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • parent (::String) (defaults to: nil)

      Required. The BigQuery project id where the transfer configuration should be created. Must be in the format projects/{project_id}/locations/{location_id} or projects/{project_id}. If specified location and location of the destination bigquery dataset do not match - the request will fail.

    • transfer_config (::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig, ::Hash) (defaults to: nil)

      Required. Data transfer configuration to create.

    • authorization_code (::String) (defaults to: nil)

      Optional OAuth2 authorization code to use with this transfer configuration. This is required only if transferConfig.dataSourceId is 'youtube_channel' and new credentials are needed, as indicated by CheckValidCreds. In order to obtain authorization_code, make a request to the following URL:

      
      https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=<var>client_id</var>&scope=<var>data_source_scopes</var>
      
      • The client_id is the OAuth client_id of the a data source as returned by ListDataSources method.
      • data_source_scopes are the scopes returned by ListDataSources method.

      Note that this should not be set when service_account_name is used to create the transfer config.

    • version_info (::String) (defaults to: nil)

      Optional version info. This is required only if transferConfig.dataSourceId is not 'youtube_channel' and new credentials are needed, as indicated by CheckValidCreds. In order to obtain version info, make a request to the following URL:

      
      https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=<var>client_id</var>&scope=<var>data_source_scopes</var>
      
      • The client_id is the OAuth client_id of the a data source as returned by ListDataSources method.
      • data_source_scopes are the scopes returned by ListDataSources method.

      Note that this should not be set when service_account_name is used to create the transfer config.

    • service_account_name (::String) (defaults to: nil)

      Optional service account email. If this field is set, the transfer config will be created with this service account's credentials. It requires that the requesting user calling this API has permissions to act as this service account.

      Note that not all data sources support service account credentials when creating a transfer config. For the latest list of data sources, read about using service accounts.

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 512

def create_transfer_config request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.create_transfer_config..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.parent
    header_params["parent"] = request.parent
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.create_transfer_config.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.create_transfer_config.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :create_transfer_config, request, options: options do |response, operation|
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#delete_transfer_config(request, options = nil) ⇒ ::Google::Protobuf::Empty #delete_transfer_config(name: nil) ⇒ ::Google::Protobuf::Empty

Deletes a data transfer configuration, including any associated transfer runs and logs.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new

# Call the delete_transfer_config method.
result = client.delete_transfer_config request

# The returned object is of type Google::Protobuf::Empty.
p result

Overloads:

  • #delete_transfer_config(request, options = nil) ⇒ ::Google::Protobuf::Empty

    Pass arguments to delete_transfer_config via a request object, either of type Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #delete_transfer_config(name: nil) ⇒ ::Google::Protobuf::Empty

    Pass arguments to delete_transfer_config via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • name (::String) (defaults to: nil)

      Required. The field will contain name of the resource requested, for example: projects/{project_id}/transferConfigs/{config_id} or projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 728

def delete_transfer_config request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.delete_transfer_config..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.name
    header_params["name"] = request.name
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.delete_transfer_config.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.delete_transfer_config.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :delete_transfer_config, request, options: options do |response, operation|
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#delete_transfer_run(request, options = nil) ⇒ ::Google::Protobuf::Empty #delete_transfer_run(name: nil) ⇒ ::Google::Protobuf::Empty

Deletes the specified transfer run.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new

# Call the delete_transfer_run method.
result = client.delete_transfer_run request

# The returned object is of type Google::Protobuf::Empty.
p result

Overloads:

  • #delete_transfer_run(request, options = nil) ⇒ ::Google::Protobuf::Empty

    Pass arguments to delete_transfer_run via a request object, either of type Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #delete_transfer_run(name: nil) ⇒ ::Google::Protobuf::Empty

    Pass arguments to delete_transfer_run via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • name (::String) (defaults to: nil)

      Required. The field will contain name of the resource requested, for example: projects/{project_id}/transferConfigs/{config_id}/runs/{run_id} or projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 1293

def delete_transfer_run request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.delete_transfer_run..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.name
    header_params["name"] = request.name
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.delete_transfer_run.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.delete_transfer_run.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :delete_transfer_run, request, options: options do |response, operation|
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#enroll_data_sources(request, options = nil) ⇒ ::Google::Protobuf::Empty #enroll_data_sources(name: nil, data_source_ids: nil) ⇒ ::Google::Protobuf::Empty

Enroll data sources in a user project. This allows users to create transfer configurations for these data sources. They will also appear in the ListDataSources RPC and as such, will appear in the BigQuery UI, and the documents can be found in the public guide for BigQuery Web UI and Data Transfer Service.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new

# Call the enroll_data_sources method.
result = client.enroll_data_sources request

# The returned object is of type Google::Protobuf::Empty.
p result

Overloads:

  • #enroll_data_sources(request, options = nil) ⇒ ::Google::Protobuf::Empty

    Pass arguments to enroll_data_sources via a request object, either of type EnrollDataSourcesRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #enroll_data_sources(name: nil, data_source_ids: nil) ⇒ ::Google::Protobuf::Empty

    Pass arguments to enroll_data_sources via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • name (::String) (defaults to: nil)

      The name of the project resource in the form: projects/{project_id}

    • data_source_ids (::Array<::String>) (defaults to: nil)

      Data sources that are enrolled. It is required to provide at least one data source id.

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 1684

def enroll_data_sources request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.enroll_data_sources..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.name
    header_params["name"] = request.name
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.enroll_data_sources.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.enroll_data_sources.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :enroll_data_sources, request, options: options do |response, operation|
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#get_data_source(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource #get_data_source(name: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource

Retrieves a supported data source and returns its settings.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new

# Call the get_data_source method.
result = client.get_data_source request

# The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::DataSource.
p result

Overloads:

  • #get_data_source(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource

    Pass arguments to get_data_source via a request object, either of type GetDataSourceRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #get_data_source(name: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource

    Pass arguments to get_data_source via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • name (::String) (defaults to: nil)

      Required. The field will contain name of the resource requested, for example: projects/{project_id}/dataSources/{data_source_id} or projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 281

def get_data_source request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.get_data_source..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.name
    header_params["name"] = request.name
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.get_data_source.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.get_data_source.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :get_data_source, request, options: options do |response, operation|
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#get_transfer_config(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig #get_transfer_config(name: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig

Returns information about a data transfer config.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new

# Call the get_transfer_config method.
result = client.get_transfer_config request

# The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.
p result

Overloads:

  • #get_transfer_config(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig

    Pass arguments to get_transfer_config via a request object, either of type GetTransferConfigRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #get_transfer_config(name: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig

    Pass arguments to get_transfer_config via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • name (::String) (defaults to: nil)

      Required. The field will contain name of the resource requested, for example: projects/{project_id}/transferConfigs/{config_id} or projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 815

def get_transfer_config request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.get_transfer_config..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.name
    header_params["name"] = request.name
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.get_transfer_config.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.get_transfer_config.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :get_transfer_config, request, options: options do |response, operation|
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#get_transfer_run(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun #get_transfer_run(name: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun

Returns information about the particular transfer run.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new

# Call the get_transfer_run method.
result = client.get_transfer_run request

# The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferRun.
p result

Overloads:

  • #get_transfer_run(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun

    Pass arguments to get_transfer_run via a request object, either of type GetTransferRunRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #get_transfer_run(name: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun

    Pass arguments to get_transfer_run via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • name (::String) (defaults to: nil)

      Required. The field will contain name of the resource requested, for example: projects/{project_id}/transferConfigs/{config_id}/runs/{run_id} or projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 1205

def get_transfer_run request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.get_transfer_run..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.name
    header_params["name"] = request.name
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.get_transfer_run.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.get_transfer_run.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :get_transfer_run, request, options: options do |response, operation|
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#list_data_sources(request, options = nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource> #list_data_sources(parent: nil, page_token: nil, page_size: nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>

Lists supported data sources and returns their settings.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new

# Call the list_data_sources method.
result = client.list_data_sources request

# The returned object is of type Gapic::PagedEnumerable. You can iterate
# over elements, and API calls will be issued to fetch pages as needed.
result.each do |item|
  # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource.
  p item
end

Overloads:

  • #list_data_sources(request, options = nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>

    Pass arguments to list_data_sources via a request object, either of type ListDataSourcesRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #list_data_sources(parent: nil, page_token: nil, page_size: nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>

    Pass arguments to list_data_sources via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • parent (::String) (defaults to: nil)

      Required. The BigQuery project id for which data sources should be returned. Must be in the form: projects/{project_id} or projects/{project_id}/locations/{location_id}

    • page_token (::String) (defaults to: nil)

      Pagination token, which can be used to request a specific page of ListDataSourcesRequest list results. For multiple-page results, ListDataSourcesResponse outputs a next_page token, which can be used as the page_token value to request the next page of list results.

    • page_size (::Integer) (defaults to: nil)

      Page size. The default page size is the maximum value of 1000 results.

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 380

def list_data_sources request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.list_data_sources..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.parent
    header_params["parent"] = request.parent
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.list_data_sources.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.list_data_sources.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :list_data_sources, request, options: options do |response, operation|
    response = ::Gapic::PagedEnumerable.new @data_transfer_service_stub, :list_data_sources, request, response, operation, options
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#list_transfer_configs(request, options = nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig> #list_transfer_configs(parent: nil, data_source_ids: nil, page_token: nil, page_size: nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>

Returns information about all transfer configs owned by a project in the specified location.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new

# Call the list_transfer_configs method.
result = client.list_transfer_configs request

# The returned object is of type Gapic::PagedEnumerable. You can iterate
# over elements, and API calls will be issued to fetch pages as needed.
result.each do |item|
  # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.
  p item
end

Overloads:

  • #list_transfer_configs(request, options = nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>

    Pass arguments to list_transfer_configs via a request object, either of type ListTransferConfigsRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #list_transfer_configs(parent: nil, data_source_ids: nil, page_token: nil, page_size: nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>

    Pass arguments to list_transfer_configs via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • parent (::String) (defaults to: nil)

      Required. The BigQuery project id for which transfer configs should be returned: projects/{project_id} or projects/{project_id}/locations/{location_id}

    • data_source_ids (::Array<::String>) (defaults to: nil)

      When specified, only configurations of requested data sources are returned.

    • page_token (::String) (defaults to: nil)

      Pagination token, which can be used to request a specific page of ListTransfersRequest list results. For multiple-page results, ListTransfersResponse outputs a next_page token, which can be used as the page_token value to request the next page of list results.

    • page_size (::Integer) (defaults to: nil)

      Page size. The default page size is the maximum value of 1000 results.

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 917

def list_transfer_configs request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.list_transfer_configs..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.parent
    header_params["parent"] = request.parent
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.list_transfer_configs.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.list_transfer_configs.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :list_transfer_configs, request, options: options do |response, operation|
    response = ::Gapic::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_configs, request, response, operation, options
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#list_transfer_logs(request, options = nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage> #list_transfer_logs(parent: nil, page_token: nil, page_size: nil, message_types: nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>

Returns log messages for the transfer run.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new

# Call the list_transfer_logs method.
result = client.list_transfer_logs request

# The returned object is of type Gapic::PagedEnumerable. You can iterate
# over elements, and API calls will be issued to fetch pages as needed.
result.each do |item|
  # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage.
  p item
end

Overloads:

  • #list_transfer_logs(request, options = nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>

    Pass arguments to list_transfer_logs via a request object, either of type ListTransferLogsRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #list_transfer_logs(parent: nil, page_token: nil, page_size: nil, message_types: nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>

    Pass arguments to list_transfer_logs via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • parent (::String) (defaults to: nil)

      Required. Transfer run name in the form: projects/{project_id}/transferConfigs/{config_id}/runs/{run_id} or projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}

    • page_token (::String) (defaults to: nil)

      Pagination token, which can be used to request a specific page of ListTransferLogsRequest list results. For multiple-page results, ListTransferLogsResponse outputs a next_page token, which can be used as the page_token value to request the next page of list results.

    • page_size (::Integer) (defaults to: nil)

      Page size. The default page size is the maximum value of 1000 results.

    • message_types (::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity>) (defaults to: nil)

      Message types to return. If not populated - INFO, WARNING and ERROR messages are returned.

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 1500

def list_transfer_logs request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.list_transfer_logs..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.parent
    header_params["parent"] = request.parent
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.list_transfer_logs.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.list_transfer_logs.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :list_transfer_logs, request, options: options do |response, operation|
    response = ::Gapic::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_logs, request, response, operation, options
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#list_transfer_runs(request, options = nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun> #list_transfer_runs(parent: nil, states: nil, page_token: nil, page_size: nil, run_attempt: nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>

Returns information about running and completed transfer runs.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new

# Call the list_transfer_runs method.
result = client.list_transfer_runs request

# The returned object is of type Gapic::PagedEnumerable. You can iterate
# over elements, and API calls will be issued to fetch pages as needed.
result.each do |item|
  # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun.
  p item
end

Overloads:

  • #list_transfer_runs(request, options = nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>

    Pass arguments to list_transfer_runs via a request object, either of type ListTransferRunsRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #list_transfer_runs(parent: nil, states: nil, page_token: nil, page_size: nil, run_attempt: nil) ⇒ ::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>

    Pass arguments to list_transfer_runs via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • parent (::String) (defaults to: nil)

      Required. Name of transfer configuration for which transfer runs should be retrieved. Format of transfer configuration resource name is: projects/{project_id}/transferConfigs/{config_id} or projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}.

    • states (::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferState>) (defaults to: nil)

      When specified, only transfer runs with requested states are returned.

    • page_token (::String) (defaults to: nil)

      Pagination token, which can be used to request a specific page of ListTransferRunsRequest list results. For multiple-page results, ListTransferRunsResponse outputs a next_page token, which can be used as the page_token value to request the next page of list results.

    • page_size (::Integer) (defaults to: nil)

      Page size. The default page size is the maximum value of 1000 results.

    • run_attempt (::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest::RunAttempt) (defaults to: nil)

      Indicates how run attempts are to be pulled.

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 1397

def list_transfer_runs request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.list_transfer_runs..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.parent
    header_params["parent"] = request.parent
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.list_transfer_runs.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.list_transfer_runs.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :list_transfer_runs, request, options: options do |response, operation|
    response = ::Gapic::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_runs, request, response, operation, options
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#schedule_transfer_runs(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse #schedule_transfer_runs(parent: nil, start_time: nil, end_time: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse

Deprecated.

This method is deprecated and may be removed in the next major version update.

Creates transfer runs for a time range [start_time, end_time]. For each date - or whatever granularity the data source supports - in the range, one transfer run is created. Note that runs are created per UTC time in the time range. DEPRECATED: use StartManualTransferRuns instead.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new

# Call the schedule_transfer_runs method.
result = client.schedule_transfer_runs request

# The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse.
p result

Overloads:

  • #schedule_transfer_runs(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse

    Pass arguments to schedule_transfer_runs via a request object, either of type ScheduleTransferRunsRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #schedule_transfer_runs(parent: nil, start_time: nil, end_time: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse

    Pass arguments to schedule_transfer_runs via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • parent (::String) (defaults to: nil)

      Required. Transfer configuration name in the form: projects/{project_id}/transferConfigs/{config_id} or projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}.

    • start_time (::Google::Protobuf::Timestamp, ::Hash) (defaults to: nil)

      Required. Start time of the range of transfer runs. For example, "2017-05-25T00:00:00+00:00".

    • end_time (::Google::Protobuf::Timestamp, ::Hash) (defaults to: nil)

      Required. End time of the range of transfer runs. For example, "2017-05-30T00:00:00+00:00".

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 1017

def schedule_transfer_runs request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.schedule_transfer_runs..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.parent
    header_params["parent"] = request.parent
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.schedule_transfer_runs.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.schedule_transfer_runs.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :schedule_transfer_runs, request, options: options do |response, operation|
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#start_manual_transfer_runs(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse #start_manual_transfer_runs(parent: nil, requested_time_range: nil, requested_run_time: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse

Start manual transfer runs to be executed now with schedule_time equal to current time. The transfer runs can be created for a time range where the run_time is between start_time (inclusive) and end_time (exclusive), or for a specific run_time.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new

# Call the start_manual_transfer_runs method.
result = client.start_manual_transfer_runs request

# The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse.
p result

Overloads:

  • #start_manual_transfer_runs(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse

    Pass arguments to start_manual_transfer_runs via a request object, either of type StartManualTransferRunsRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #start_manual_transfer_runs(parent: nil, requested_time_range: nil, requested_run_time: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse

    Pass arguments to start_manual_transfer_runs via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • parent (::String) (defaults to: nil)

      Transfer configuration name in the form: projects/{project_id}/transferConfigs/{config_id} or projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}.

    • requested_time_range (::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest::TimeRange, ::Hash) (defaults to: nil)

      A time_range start and end timestamp for historical data files or reports that are scheduled to be transferred by the scheduled transfer run. requested_time_range must be a past time and cannot include future time values.

    • requested_run_time (::Google::Protobuf::Timestamp, ::Hash) (defaults to: nil)

      A run_time timestamp for historical data files or reports that are scheduled to be transferred by the scheduled transfer run. requested_run_time must be a past time and cannot include future time values.

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 1117

def start_manual_transfer_runs request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.start_manual_transfer_runs..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.parent
    header_params["parent"] = request.parent
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.start_manual_transfer_runs.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.start_manual_transfer_runs.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :start_manual_transfer_runs, request, options: options do |response, operation|
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end

#universe_domainString

The effective universe domain

Returns:

  • (String)


159
160
161
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 159

def universe_domain
  @data_transfer_service_stub.universe_domain
end

#update_transfer_config(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig #update_transfer_config(transfer_config: nil, authorization_code: nil, update_mask: nil, version_info: nil, service_account_name: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig

Updates a data transfer configuration. All fields must be set, even if they are not updated.

Examples:

Basic example

require "google/cloud/bigquery/data_transfer/v1"

# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new

# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new

# Call the update_transfer_config method.
result = client.update_transfer_config request

# The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.
p result

Overloads:

  • #update_transfer_config(request, options = nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig

    Pass arguments to update_transfer_config via a request object, either of type UpdateTransferConfigRequest or an equivalent Hash.

    Parameters:

    • request (::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest, ::Hash)

      A request object representing the call parameters. Required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash.

    • options (::Gapic::CallOptions, ::Hash) (defaults to: nil)

      Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.

  • #update_transfer_config(transfer_config: nil, authorization_code: nil, update_mask: nil, version_info: nil, service_account_name: nil) ⇒ ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig

    Pass arguments to update_transfer_config via keyword arguments. Note that at least one keyword argument is required. To specify no parameters, or to keep all the default parameter values, pass an empty Hash as a request object (see above).

    Parameters:

    • transfer_config (::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig, ::Hash) (defaults to: nil)

      Required. Data transfer configuration to create.

    • authorization_code (::String) (defaults to: nil)

      Optional OAuth2 authorization code to use with this transfer configuration. This is required only if transferConfig.dataSourceId is 'youtube_channel' and new credentials are needed, as indicated by CheckValidCreds. In order to obtain authorization_code, make a request to the following URL:

      
      https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=<var>client_id</var>&scope=<var>data_source_scopes</var>
      
      • The client_id is the OAuth client_id of the a data source as returned by ListDataSources method.
      • data_source_scopes are the scopes returned by ListDataSources method.

      Note that this should not be set when service_account_name is used to update the transfer config.

    • update_mask (::Google::Protobuf::FieldMask, ::Hash) (defaults to: nil)

      Required. Required list of fields to be updated in this request.

    • version_info (::String) (defaults to: nil)

      Optional version info. This is required only if transferConfig.dataSourceId is not 'youtube_channel' and new credentials are needed, as indicated by CheckValidCreds. In order to obtain version info, make a request to the following URL:

      
      https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=<var>client_id</var>&scope=<var>data_source_scopes</var>
      
      • The client_id is the OAuth client_id of the a data source as returned by ListDataSources method.
      • data_source_scopes are the scopes returned by ListDataSources method.

      Note that this should not be set when service_account_name is used to update the transfer config.

    • service_account_name (::String) (defaults to: nil)

      Optional service account email. If this field is set, the transfer config will be created with this service account's credentials. It requires that the requesting user calling this API has permissions to act as this service account.

      Note that not all data sources support service account credentials when creating a transfer config. For the latest list of data sources, read about using service accounts.

Yields:

  • (response, operation)

    Access the result along with the RPC operation

Yield Parameters:

Returns:

Raises:

  • (::Google::Cloud::Error)

    if the RPC is aborted.



640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
# File 'lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb', line 640

def update_transfer_config request, options = nil
  raise ::ArgumentError, "request must be provided" if request.nil?

  request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest

  # Converts hash and nil to an options object
  options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h

  # Customize the options with defaults
   = @config.rpcs.update_transfer_config..to_h

  # Set x-goog-api-client and x-goog-user-project headers
  [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
    lib_name: @config.lib_name, lib_version: @config.lib_version,
    gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
  [:"x-goog-user-project"] = @quota_project_id if @quota_project_id

  header_params = {}
  if request.transfer_config&.name
    header_params["transfer_config.name"] = request.transfer_config.name
  end

  request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
  [:"x-goog-request-params"] ||= request_params_header

  options.apply_defaults timeout:      @config.rpcs.update_transfer_config.timeout,
                         metadata:     ,
                         retry_policy: @config.rpcs.update_transfer_config.retry_policy

  options.apply_defaults timeout:      @config.timeout,
                         metadata:     @config.,
                         retry_policy: @config.retry_policy

  @data_transfer_service_stub.call_rpc :update_transfer_config, request, options: options do |response, operation|
    yield response, operation if block_given?
    return response
  end
rescue ::GRPC::BadStatus => e
  raise ::Google::Cloud::Error.from_error(e)
end