File: pipeline_schedule_worker.rb

package info (click to toggle)
gitlab 17.6.5-19
  • links: PTS, VCS
  • area: main
  • in suites: sid
  • size: 629,368 kB
  • sloc: ruby: 1,915,304; javascript: 557,307; sql: 60,639; xml: 6,509; sh: 4,567; makefile: 1,239; python: 406
file content (52 lines) | stat: -rw-r--r-- 1,359 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# frozen_string_literal: true

class PipelineScheduleWorker # rubocop:disable Scalability/IdempotentWorker
  include ApplicationWorker

  data_consistency :always

  include CronjobQueue
  include ::Gitlab::ExclusiveLeaseHelpers

  LOCK_RETRY = 3
  LOCK_TTL = 5.minutes
  DELAY = 7.seconds
  BATCH_SIZE = 500

  feature_category :continuous_integration
  worker_resource_boundary :cpu

  def perform
    in_lock(lock_key, **lock_params) do
      Ci::PipelineSchedule
        .select(:id, :owner_id, :project_id) # Minimize the selected columns
        .runnable_schedules
        .preloaded
        .find_in_batches(batch_size: BATCH_SIZE).with_index do |schedules, index| # rubocop: disable CodeReuse/ActiveRecord -- activates because of batch_size
          enqueue_run_pipeline_schedule_worker(schedules, index)
        end
    end
  end

  private

  def lock_key
    self.class.name.underscore
  end

  def lock_params
    {
      ttl: LOCK_TTL,
      retries: LOCK_RETRY
    }
  end

  def enqueue_run_pipeline_schedule_worker(schedules, index)
    RunPipelineScheduleWorker.bulk_perform_in_with_contexts(
      [1, index * DELAY].max,
      schedules,
      arguments_proc: ->(schedule) { [schedule.id, schedule.owner_id, { scheduling: true }] },
      context_proc: ->(schedule) { { project: schedule.project, user: schedule.owner } }
    )
  end
end