1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281
|
# frozen_string_literal: true
module Git
class BranchHooksService < ::Git::BaseHooksService
include Gitlab::InternalEventsTracking
extend ::Gitlab::Utils::Override
JIRA_SYNC_BATCH_SIZE = 20
JIRA_SYNC_BATCH_DELAY = 10.seconds
def execute
execute_branch_hooks
super.tap do
enqueue_update_signatures
end
end
private
alias_method :removing_branch?, :removing_ref?
def hook_name
:push_hooks
end
def limited_commits
strong_memoize(:limited_commits) { threshold_commits.last(PROCESS_COMMIT_LIMIT) }
end
# Taking limit+1 commits allows us to detect when the limit is in effect
def threshold_commits
strong_memoize(:threshold_commits) do
if creating_default_branch?
# The most recent PROCESS_COMMIT_LIMIT commits in the default branch.
# They are returned newest-to-oldest, but we need to present them oldest-to-newest
project.repository.commits(newrev, limit: PROCESS_COMMIT_LIMIT + 1).reverse!
elsif creating_branch?
# Use the pushed commits that aren't reachable by the default branch
# as a heuristic. This may include more commits than are actually
# pushed, but that shouldn't matter because we check for existing
# cross-references later.
project.repository.commits_between(project.default_branch, newrev, limit: PROCESS_COMMIT_LIMIT + 1)
elsif updating_branch?
project.repository.commits_between(oldrev, newrev, limit: PROCESS_COMMIT_LIMIT + 1)
else # removing branch
[]
end
end
end
def commits_count
strong_memoize(:commits_count) do
next threshold_commits.count if
strong_memoized?(:threshold_commits) &&
threshold_commits.count <= PROCESS_COMMIT_LIMIT
if creating_default_branch?
project.repository.commit_count_for_ref(ref)
elsif creating_branch?
project.repository.count_commits_between(project.default_branch, newrev)
elsif updating_branch?
project.repository.count_commits_between(oldrev, newrev)
else # removing branch
0
end
end
end
override :invalidated_file_types
def invalidated_file_types
return super unless default_branch? && !creating_branch?
modified_file_types
end
def modified_file_types
paths = commit_paths.values.reduce(&:merge) || Set.new
Gitlab::FileDetector.types_in_paths(paths)
end
def execute_branch_hooks
project.repository.after_push_commit(branch_name)
branch_create_hooks if creating_branch?
branch_change_hooks if creating_branch? || updating_branch?
branch_remove_hooks if removing_branch?
track_process_commit_limit_overflow
end
def branch_create_hooks
project.repository.after_create_branch(expire_cache: false)
project.after_create_default_branch if default_branch?
end
def branch_change_hooks
enqueue_process_commit_messages
enqueue_jira_connect_sync_messages
track_ci_config_change_event
end
def branch_remove_hooks
enqueue_jira_connect_remove_branches
project.repository.after_remove_branch(expire_cache: false)
end
def track_ci_config_change_event
return unless default_branch?
commits_changing_ci_config.each do |commit|
track_internal_event('commit_change_to_ciconfigfile', user: commit.author, project: commit.project)
end
end
def track_process_commit_limit_overflow
return if threshold_commits.count <= PROCESS_COMMIT_LIMIT
Gitlab::Metrics.add_event(:process_commit_limit_overflow)
end
# Schedules processing of commit messages
def enqueue_process_commit_messages
referencing_commits = limited_commits.select(&:matches_cross_reference_regex?)
upstream_commit_ids = upstream_commit_ids(referencing_commits)
referencing_commits.each do |commit|
# Avoid reprocessing commits that already exist upstream if the project
# is a fork. This will prevent duplicated/superfluous system notes on
# mentionables referenced by a commit that is pushed to the upstream,
# that is then also pushed to forks when these get synced by users.
next if upstream_commit_ids.include?(commit.id)
ProcessCommitWorker.perform_in(
process_commit_worker_delay,
project.id,
current_user.id,
commit.to_hash,
default_branch?
)
end
end
def enqueue_jira_connect_sync_messages
return unless project.jira_subscription_exists?
branch_to_sync = branch_name if Atlassian::JiraIssueKeyExtractors::Branch.has_keys?(project, branch_name)
commits_to_sync = filtered_commit_shas
return if branch_to_sync.nil? && commits_to_sync.empty?
if commits_to_sync.any?
commits_to_sync.each_slice(JIRA_SYNC_BATCH_SIZE).with_index do |commits, i|
JiraConnect::SyncBranchWorker.perform_in(
JIRA_SYNC_BATCH_DELAY * i,
project.id,
branch_to_sync,
commits,
Atlassian::JiraConnect::Client.generate_update_sequence_id
)
end
else
JiraConnect::SyncBranchWorker.perform_async(
project.id,
branch_to_sync,
commits_to_sync,
Atlassian::JiraConnect::Client.generate_update_sequence_id
)
end
end
def enqueue_jira_connect_remove_branches
return unless project.jira_subscription_exists?
return unless Atlassian::JiraIssueKeyExtractors::Branch.has_keys?(project, branch_name)
Integrations::JiraConnect::RemoveBranchWorker.perform_async(
project.id,
{
branch_name: branch_name
}
)
end
def filtered_commit_shas
limited_commits.select { |commit| Atlassian::JiraIssueKeyExtractor.has_keys?(commit.safe_message) }.map(&:sha)
end
def signature_types
[
::CommitSignatures::GpgSignature,
::CommitSignatures::X509CommitSignature,
::CommitSignatures::SshSignature
]
end
def unsigned_commit_shas(commits)
commit_shas = commits.map(&:sha)
signature_types
.map { |signature| signature.unsigned_commit_shas(commit_shas) }
.reduce(&:&)
end
def enqueue_update_signatures
unsigned = unsigned_commit_shas(limited_commits)
return if unsigned.empty?
signable = Gitlab::Git::Commit.shas_with_signatures(project.repository, unsigned)
return if signable.empty?
CreateCommitSignatureWorker.perform_async(signable, project.id)
end
# It's not sufficient to just check for a blank SHA as it's possible for the
# branch to be pushed, but for the `post-receive` hook to never run:
# https://gitlab.com/gitlab-org/gitlab-foss/issues/59257
def creating_branch?
strong_memoize(:creating_branch) do
Gitlab::Git.blank_ref?(oldrev) ||
!project.repository.branch_exists?(branch_name)
end
end
def updating_branch?
!creating_branch? && !removing_branch?
end
def creating_default_branch?
creating_branch? && default_branch?
end
def default_branch?
strong_memoize(:default_branch) do
[nil, branch_name].include?(project.default_branch)
end
end
def branch_name
strong_memoize(:branch_name) { Gitlab::Git.ref_name(ref) }
end
def upstream_commit_ids(commits)
set = Set.new
upstream_project = project.fork_source
if upstream_project
upstream_project
.commits_by(oids: commits.map(&:id))
.each { |commit| set << commit.id }
end
set
end
def commits_changing_ci_config
commit_paths.select do |commit, paths|
next if commit.merge_commit?
paths.include?(project.ci_config_path_or_default)
end.keys
end
def commit_paths
strong_memoize(:commit_paths) do
limited_commits.to_h do |commit|
paths = Set.new(commit.raw_deltas.map(&:new_path))
[commit, paths]
end
end
end
def process_commit_worker_delay
params[:process_commit_worker_pool]&.get_and_increment_delay || 0
end
end
end
Git::BranchHooksService.prepend_mod_with('Git::BranchHooksService')
|