Files
ubicloud/model/github_runner.rb
Jeremy Evans a76c6259d8 Fix 9N+1 query issue when closing an account
For each of the account's projects, this ran 9 separate queries:
1 for the accounts, and 1 for each of the 8 resource associations.

Change this to 8-9 queries total.  Add a first_project_with_resources
dataset method to Project.  This methods issues one query per
resource association to find any objects related to the project in
the dataset, using Sequel's filtering by associations support.
If it finds a related object, it returns the project related to the
object.

To handle the filtering for projects with only a single account,
use a subquery that checks that the access_tag table only has one
entry for the project.
2025-06-25 06:52:58 +09:00

101 lines
3.3 KiB
Ruby

# frozen_string_literal: true
require "net/ssh"
require_relative "../model"
class GithubRunner < Sequel::Model
one_to_one :strand, key: :id
many_to_one :installation, key: :installation_id, class: :GithubInstallation
many_to_one :repository, key: :repository_id, class: :GithubRepository
one_to_one :vm, key: :id, primary_key: :vm_id
one_through_one :project, join_table: :github_installation, left_key: :id, left_primary_key: :installation_id, read_only: true
plugin ResourceMethods, redacted_columns: :workflow_job
include SemaphoreMethods
include HealthMonitorMethods
semaphore :destroy, :skip_deregistration
dataset_module do
def total_active_runner_vcpus
left_join(:strand, id: :id)
.exclude(Sequel[:strand][:label] => ["start", "wait_concurrency_limit"])
.select_map(Sequel[:github_runner][:label])
.sum { Github.runner_labels[it]["vcpus"] }
end
end
def label_data
@label_data ||= Github.runner_labels[label]
end
def repository_url
"http://github.com/#{repository_name}"
end
def run_url
"#{repository_url}/actions/runs/#{workflow_job["run_id"]}"
end
def job_url
"#{run_url}/job/#{workflow_job["id"]}"
end
def runner_url
"#{repository_url}/settings/actions/runners/#{runner_id}" if runner_id
end
def log_duration(message, duration)
values = {ubid:, label:, repository_name:, duration:, conclusion: workflow_job&.dig("conclusion")}
if vm
values.merge!(vm_ubid: vm.ubid, arch: vm.arch, cores: vm.cores, vcpus: vm.vcpus)
if vm.vm_host
values[:vm_host_ubid] = vm.vm_host.ubid
values[:data_center] = vm.vm_host.data_center
end
values[:vm_pool_ubid] = VmPool[vm.pool_id].ubid if vm.pool_id
end
Clog.emit(message) { {message => values} }
end
def provision_spare_runner
Prog::Vm::GithubRunner.assemble(installation, repository_name: repository_name, label: label).subject
end
def init_health_monitor_session
{
ssh_session: vm.sshable.start_fresh_session
}
end
def check_pulse(session:, previous_pulse:)
reading = begin
available_memory = session[:ssh_session].exec!("awk '/MemAvailable/ {print $2}' /proc/meminfo").chomp
"up"
rescue
"down"
end
aggregate_readings(previous_pulse: previous_pulse, reading: reading, data: {available_memory: available_memory})
end
end
# Table: github_runner
# Columns:
# id | uuid | PRIMARY KEY
# installation_id | uuid |
# repository_name | text | NOT NULL
# label | text | NOT NULL
# vm_id | uuid |
# runner_id | bigint |
# created_at | timestamp with time zone | NOT NULL DEFAULT CURRENT_TIMESTAMP
# ready_at | timestamp with time zone |
# workflow_job | jsonb |
# repository_id | uuid |
# allocated_at | timestamp with time zone |
# billed_vm_size | text |
# Indexes:
# github_runner_pkey | PRIMARY KEY btree (id)
# github_runner_vm_id_key | UNIQUE btree (vm_id)
# Foreign key constraints:
# github_runner_installation_id_fkey | (installation_id) REFERENCES github_installation(id)
# github_runner_repository_id_fkey | (repository_id) REFERENCES github_repository(id)