Method: Fluent::GoogleCloudOutput#write

Defined in:
lib/fluent/plugin/out_google_cloud.rb

#write(chunk) ⇒ Object


663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
# File 'lib/fluent/plugin/out_google_cloud.rb', line 663

def write(chunk)
  grouped_entries = group_log_entries_by_tag_and_local_resource_id(chunk)

  requests_to_send = []
  grouped_entries.each do |(tag, local_resource_id), arr|
    entries = []
    group_level_resource, group_level_common_labels =
      determine_group_level_monitored_resource_and_labels(
        tag, local_resource_id)

    arr.each do |time, record|
      entry_level_resource, entry_level_common_labels =
        determine_entry_level_monitored_resource_and_labels(
          group_level_resource, group_level_common_labels, record)

      is_json = false
      if @detect_json
        # Save the following fields if available, then clear them out to
        # allow for determining whether we should parse the log or message
        # field.
        # This list should be in sync with
        # https://cloud.google.com/logging/docs/agent/configuration#special-fields.
        preserved_keys = [
          'time',
          'timeNanos',
          'timestamp',
          'timestampNanos',
          'timestampSeconds',
          'severity',
          @http_request_key,
          @insert_id_key,
          @labels_key,
          @operation_key,
          @source_location_key,
          @span_id_key,
          @trace_key,
          @trace_sampled_key
        ]

        # If the log is json, we want to export it as a structured log
        # unless there is additional metadata that would be lost.
        record_json = nil
        if (record.keys - preserved_keys).length == 1
          %w(log message msg).each do |field|
            if record.key?(field)
              record_json = parse_json_or_nil(record[field])
            end
          end
        end
        unless record_json.nil?
          # Propagate these if necessary. Note that we don't want to
          # override these keys in the JSON we've just parsed.
          preserved_keys.each do |key|
            record_json[key] ||= record[key] if
              record.key?(key) && !record_json.key?(key)
          end

          record = record_json
          is_json = true
        end
      end

      ts_secs, ts_nanos, timestamp = compute_timestamp(record, time)
      ts_secs, ts_nanos = adjust_timestamp_if_invalid(timestamp, Time.now) \
        if @adjust_invalid_timestamps && timestamp

      severity = compute_severity(
        entry_level_resource.type, record, entry_level_common_labels)

      dynamic_labels_from_payload = parse_labels(record)

      entry_level_common_labels = entry_level_common_labels.merge!(
        dynamic_labels_from_payload) if dynamic_labels_from_payload

      entry = @construct_log_entry.call(entry_level_common_labels,
                                        entry_level_resource,
                                        severity,
                                        ts_secs,
                                        ts_nanos)

      insert_id = record.delete(@insert_id_key)
      entry.insert_id = insert_id if insert_id
      span_id = record.delete(@span_id_key)
      entry.span_id = span_id if span_id
      trace = record.delete(@trace_key)
      entry.trace = compute_trace(trace) if trace
      trace_sampled = record.delete(@trace_sampled_key)
      entry.trace_sampled = parse_bool(trace_sampled) unless
        trace_sampled.nil?

      set_log_entry_fields(record, entry)
      set_payload(entry_level_resource.type, record, entry, is_json)

      entries.push(entry)
    end
    # Don't send an empty request if we rejected all the entries.
    next if entries.empty?

    log_name = "projects/#{@project_id}/logs/#{log_name(
      tag, group_level_resource)}"

    requests_to_send << {
      entries: entries,
      log_name: log_name,
      resource: group_level_resource,
      labels: group_level_common_labels
    }
  end

  if @split_logs_by_tag
    requests_to_send.each do |request|
      @write_request.call(request)
    end
  else
    # Combine all requests into one. The request level "log_name" will be
    # ported to the entry level. The request level "resource" and "labels"
    # are ignored as they should have been folded into the entry level
    # "resource" and "labels" already anyway.
    combined_entries = []
    requests_to_send.each do |request|
      request[:entries].each do |entry|
        # Modify entries in-place as they are not needed later on.
        entry.log_name = request[:log_name]
      end
      combined_entries.concat(request[:entries])
    end
    @write_request.call(entries: combined_entries) unless
      combined_entries.empty?
  end
end