Class: Fluent::Plugin::BigQueryLoadOutput
- Inherits:
-
BigQueryBaseOutput
- Object
- Output
- BigQueryBaseOutput
- Fluent::Plugin::BigQueryLoadOutput
- Defined in:
- lib/fluent/plugin/out_bigquery_load.rb
Instance Method Summary collapse
-
#buffer ⇒ Object
Buffer.
- #configure(conf) ⇒ Object
-
#format(tag, time, record) ⇒ Object
for Fluent::Plugin::Output#implement? method.
- #prefer_delayed_commit ⇒ Object
- #start ⇒ Object
- #try_write(chunk) ⇒ Object
- #write(chunk) ⇒ Object
Methods inherited from BigQueryBaseOutput
#clustering_fields, #fetch_schema, #fetch_schema_target_table, #get_schema, #multi_workers_ready?, #read_schema, #read_schema_target_path, #request_timeout_sec, #time_partitioning_type, #writer
Instance Method Details
#buffer ⇒ Object
Buffer
25 26 27 28 29 30 31 32 33 34 35 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 25 config_section :buffer do config_set_default :@type, "file" config_set_default :flush_mode, :interval config_set_default :flush_interval, 3600 # 1h config_set_default :flush_thread_interval, 5 config_set_default :flush_thread_burst_interval, 5 config_set_default :chunk_limit_size, 1 * 1024 ** 3 # 1GB config_set_default :total_limit_size, 32 * 1024 ** 3 # 32GB config_set_default :delayed_commit_timeout, 1800 # 30m end |
#configure(conf) ⇒ Object
37 38 39 40 41 42 43 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 37 def configure(conf) super @is_load = true placeholder_params = "project=#{@project}/dataset=#{@dataset}/table=#{@tablelist.join(",")}/fetch_schema_table=#{@fetch_schema_table}" placeholder_validate!(:bigquery_load, placeholder_params) end |
#format(tag, time, record) ⇒ Object
for Fluent::Plugin::Output#implement? method
61 62 63 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 61 def format(tag, time, record) super end |
#prefer_delayed_commit ⇒ Object
56 57 58 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 56 def prefer_delayed_commit @use_delayed_commit end |
#start ⇒ Object
45 46 47 48 49 50 51 52 53 54 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 45 def start super if prefer_delayed_commit @polling_targets = [] @polling_mutex = Mutex.new log.debug("start load job polling") timer_execute(:polling_bigquery_load_job, @wait_job_interval, &method(:poll)) end end |
#try_write(chunk) ⇒ Object
99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 99 def try_write(chunk) job_reference = do_write(chunk) @polling_mutex.synchronize do @polling_targets << job_reference end rescue Fluent::BigQuery::Error => e raise if e.retryable? @retry_mutex.synchronize do if @secondary # TODO: find better way @retry = retry_state_create( :output_retries, @buffer_config.retry_type, @buffer_config.retry_wait, @buffer_config.retry_timeout, forever: false, max_steps: @buffer_config.retry_max_times, backoff_base: @buffer_config.retry_exponential_backoff_base, max_interval: @buffer_config.retry_max_interval, secondary: true, secondary_threshold: Float::EPSILON, randomize: @buffer_config.retry_randomize ) else @retry = retry_state_create( :output_retries, @buffer_config.retry_type, @buffer_config.retry_wait, @buffer_config.retry_timeout, forever: false, max_steps: 0, backoff_base: @buffer_config.retry_exponential_backoff_base, max_interval: @buffer_config.retry_max_interval, randomize: @buffer_config.retry_randomize ) end end raise end |
#write(chunk) ⇒ Object
65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 65 def write(chunk) job_reference = do_write(chunk) until response = writer.fetch_load_job(job_reference) sleep @wait_job_interval end writer.commit_load_job(job_reference.chunk_id_hex, response) rescue Fluent::BigQuery::Error => e raise if e.retryable? @retry_mutex.synchronize do if @secondary # TODO: find better way @retry = retry_state_create( :output_retries, @buffer_config.retry_type, @buffer_config.retry_wait, @buffer_config.retry_timeout, forever: false, max_steps: @buffer_config.retry_max_times, backoff_base: @buffer_config.retry_exponential_backoff_base, max_interval: @buffer_config.retry_max_interval, secondary: true, secondary_threshold: Float::EPSILON, randomize: @buffer_config.retry_randomize ) else @retry = retry_state_create( :output_retries, @buffer_config.retry_type, @buffer_config.retry_wait, @buffer_config.retry_timeout, forever: false, max_steps: 0, backoff_base: @buffer_config.retry_exponential_backoff_base, max_interval: @buffer_config.retry_max_interval, randomize: @buffer_config.retry_randomize ) end end raise end |