Class: Fluent::RedshiftOutput

Inherits:
BufferedOutput
  • Object
show all
Defined in:
lib/fluent/plugin/out_redshift_auto.rb

Constant Summary collapse

IGNORE_REDSHIFT_ERROR_REGEXP =

ignore load table error. (invalid data format)

/^ERROR:  Load into table '[^']+' failed\./

Instance Method Summary collapse

Constructor Details

#initializeRedshiftOutput

Returns a new instance of RedshiftOutput.



10
11
12
13
14
15
16
17
18
19
# File 'lib/fluent/plugin/out_redshift_auto.rb', line 10

def initialize
  super
  require 'aws-sdk'
  require 'zlib'
  require 'time'
  require 'tempfile'
  require 'pg'
  require 'json'
  require 'csv'
end

Instance Method Details

#configure(conf) ⇒ Object



49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
# File 'lib/fluent/plugin/out_redshift_auto.rb', line 49

def configure(conf)
  super
  @path = "#{@path}/" if /.+[^\/]$/ =~ @path
  @path = "" if @path == "/"
  @utc = true if conf['utc']
  @db_conf = {
    host:@redshift_host,
    port:@redshift_port,
    dbname:@redshift_dbname,
    user:@redshift_user,
    password:@redshift_password
  }
  @delimiter = determine_delimiter(@file_type) if @delimiter.nil? or @delimiter.empty?
  $log.debug format_log("redshift file_type:#{@file_type} delimiter:'#{@delimiter}'")
  @copy_sql_template = "copy #{@redshift_schemaname}.%s from '%s' CREDENTIALS 'aws_access_key_id=#{@aws_key_id};aws_secret_access_key=%s' delimiter '#{@delimiter}' GZIP TRUNCATECOLUMNS ESCAPE #{@redshift_copy_base_options};"
end

#format(tag, time, record) ⇒ Object



79
80
81
82
83
84
85
86
87
88
89
90
91
# File 'lib/fluent/plugin/out_redshift_auto.rb', line 79

def format(tag, time, record)
  record = JSON.generate(record)
  if @make_auto_table == 1 && json?
    json = JSON.parse(record)
    cols = []
    json.each do |key,val|
      cols.push("#{key}")
    end
    make_table_from_tag_name(tag, cols)
  end

  (json?) ? record.to_msgpack : "#{record[@record_log_tag]}\n"
end

#startObject



66
67
68
69
70
71
72
73
74
75
76
77
# File 'lib/fluent/plugin/out_redshift_auto.rb', line 66

def start
  super
  # init s3 conf
  $log.debug format_log("redshift file_type:#{@file_type} delimiter:'#{@delimiter}'")
  options = {
    :access_key_id     => @aws_key_id,
    :secret_access_key => @aws_sec_key
  }
  options[:s3_endpoint] = @s3_endpoint if @s3_endpoint
  @s3 = AWS::S3.new(options)
  @bucket = @s3.buckets[@s3_bucket]
end

#write(chunk) ⇒ Object



93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
# File 'lib/fluent/plugin/out_redshift_auto.rb', line 93

def write(chunk)
  $log.debug format_log("start creating gz.")
  if @tag_table == 1 then
    file_name = File::basename(chunk.path)
    table_name = file_name.sub(/\..*/, "")
  else 
    table_name = @redshift_tablename
  end

  # create a gz file
  tmp = Tempfile.new("s3-")
  tmp = (json?) ? create_gz_file_from_json(tmp, chunk, @delimiter)
                : create_gz_file_from_msgpack(tmp, chunk)

  # no data -> skip
  unless tmp
    $log.debug format_log("received no valid data. ")
    return false # for debug
  end

  # create a file path with time format
  s3path = create_s3path(@bucket, @path)

  # upload gz to s3
  @bucket.objects[s3path].write(Pathname.new(tmp.path),
                                :acl => :bucket_owner_full_control)
  # copy gz on s3 to redshift
  s3_uri = "s3://#{@s3_bucket}/#{s3path}"
  sql = @copy_sql_template % [table_name, s3_uri, @aws_sec_key]
  $log.debug  format_log("start copying. s3_uri=#{s3_uri}")
  conn = nil
  begin
    conn = PG.connect(@db_conf)
    conn.exec(sql)
    $log.info format_log("completed copying to redshift. s3_uri=#{s3_uri}")
  rescue PG::Error => e
    $log.error format_log("failed to copy data into redshift. s3_uri=#{s3_uri}"), :error=>e.to_s
    raise e unless e.to_s =~ IGNORE_REDSHIFT_ERROR_REGEXP
    return false # for debug
  ensure
    conn.close rescue nil if conn
  end
  true # for debug
end