Class: Datadog::CI::Git::TreeUploader

Inherits:
Object
  • Object
show all
Defined in:
lib/datadog/ci/git/tree_uploader.rb

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(api:, force_unshallow: false) ⇒ TreeUploader

Returns a new instance of TreeUploader.



20
21
22
23
# File 'lib/datadog/ci/git/tree_uploader.rb', line 20

def initialize(api:, force_unshallow: false)
  @api = api
  @force_unshallow = force_unshallow
end

Instance Attribute Details

#apiObject (readonly)

Returns the value of attribute api.



18
19
20
# File 'lib/datadog/ci/git/tree_uploader.rb', line 18

def api
  @api
end

#force_unshallowObject (readonly)

Returns the value of attribute force_unshallow.



18
19
20
# File 'lib/datadog/ci/git/tree_uploader.rb', line 18

def force_unshallow
  @force_unshallow
end

Instance Method Details

#call(repository_url) ⇒ Object



25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
# File 'lib/datadog/ci/git/tree_uploader.rb', line 25

def call(repository_url)
  if api.nil?
    Datadog.logger.debug("API is not configured, aborting git upload")
    return
  end

  if test_visibility_component.client_process?
    Datadog.logger.debug("Test visibility component is running in client process, aborting git upload")
    return
  end

  Datadog.logger.debug { "Uploading git tree for repository #{repository_url}" }

  latest_commits = LocalRepository.git_commits
  head_commit = latest_commits&.first
  if head_commit.nil?
    Datadog.logger.debug("Got empty latest commits list, aborting git upload")
    return
  end

  begin
    # ask the backend for the list of commits it already has
    known_commits, new_commits = fetch_known_commits_and_split(repository_url, latest_commits)
    # if all commits are present in the backend, we don't need to upload anything

    # We optimize unshallowing process by checking the latest available commits with backend:
    # if they are already known to backend, then we don't have to unshallow.
    #
    # Sometimes we need to unshallow anyway: for impacted tests detection feature for example we need
    # to calculate git diffs locally. In this case we skip the optimization and always unshallow.
    if new_commits.empty? && !@force_unshallow
      Datadog.logger.debug("No new commits to upload")
      return
    end

    # quite often we deal with shallow clones in CI environment
    if LocalRepository.git_shallow_clone? && LocalRepository.git_unshallow
      Datadog.logger.debug("Detected shallow clone and unshallowed the repository, repeating commits search")

      # re-run the search with the updated commit list after unshallowing
      known_commits, new_commits = fetch_known_commits_and_split(
        repository_url,
        LocalRepository.git_commits
      )
    end
  rescue SearchCommits::ApiError => e
    Datadog.logger.debug("SearchCommits failed with #{e}, aborting git upload")
    return
  end

  Datadog.logger.debug { "Uploading packfiles for commits: #{new_commits}" }
  uploader = UploadPackfile.new(
    api: api,
    head_commit_sha: head_commit,
    repository_url: repository_url
  )
  packfiles_count = 0
  Packfiles.generate(included_commits: new_commits, excluded_commits: known_commits) do |filepath|
    packfiles_count += 1
    uploader.call(filepath: filepath)
  rescue UploadPackfile::ApiError => e
    Datadog.logger.debug("Packfile upload failed with #{e}")
    break
  end

  Utils::Telemetry.distribution(Ext::Telemetry::METRIC_GIT_REQUESTS_OBJECT_PACK_FILES, packfiles_count.to_f)
ensure
  Datadog.logger.debug("Git tree upload finished")
end