1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301
|
require 'yaml'
require 'json'
require 'tmpdir'
require 'chake/config'
require 'chake/version'
require 'chake/readline'
require 'chake/wipe'
desc 'Initializes current directory with sample structure'
task init: 'init:itamae'
Chake::ConfigManager.all.map do |cfgmgr|
desc "Initializes current directory for #{cfgmgr.short_name}"
task "init:#{cfgmgr.short_name}" do
cfgmgr.init
end
end
desc 'list nodes'
task :nodes do
fields = %i[hostname connection config_manager]
lengths = fields.map do |f|
[f.length, Chake.nodes.map { |n| n.send(f).to_s.length }.max].max
end
columns = lengths.map { |l| "%-#{l}s" }.join(' ')
puts(columns % fields)
puts(columns % lengths.map { |l| '-' * l })
Chake.nodes.each do |node|
puts(columns % fields.map { |f| node.send(f) })
end
end
def encrypted_for(node)
encrypted_files = Array(node.data['encrypted'])
if encrypted_files.empty?
encrypted_files = Dir.glob("**/files/{default,host-#{node.hostname}}/*.{asc,gpg}") + Dir.glob('**/files/*.{asc,gpg}')
end
encrypted_files.each_with_object({}) do |key, hash|
hash[key] = key.sub(/\.(asc|gpg)$/, '')
end
end
desc 'list encrypted files per node'
task :encrypted do
Chake.nodes.each do |node|
puts "#{node.hostname}: #{Array(encrypted_for(node).keys).join(', ')}"
end
end
def maybe_decrypt(node)
if node.needs_upload?
return yield
end
files = encrypted_for(node)
files.each do |encrypted, target|
sh "gpg --use-agent --quiet --decrypt --output #{target} #{encrypted}"
end
begin
yield
ensure
files.each do |_, target|
Chake::Wipe.instance.wipe(target)
end
end
end
def if_files_changed(node, group_name, files)
return if files.empty?
hash_io = IO.popen(%w[xargs sha1sum], 'w+')
hash_io.puts(File.join(Chake.tmpdir, "#{node}.bootstrap"))
files.sort.each { |f| hash_io.puts(f) }
hash_io.close_write
current_hash = hash_io.read
hash_file = File.join(Chake.tmpdir, "#{node}.#{group_name}.sha1sum")
hash_on_disk = nil
hash_on_disk = File.read(hash_file) if File.exist?(hash_file)
yield if current_hash != hash_on_disk
FileUtils.mkdir_p(File.dirname(hash_file))
File.write(hash_file, current_hash)
end
def write_json_file(file, data)
File.chmod(0o600, file) if File.exist?(file)
File.open(file, 'w', 0o600) do |f|
f.write(JSON.pretty_generate(data))
f.write("\n")
end
end
desc 'Executed before bootstrapping'
task bootstrap_common: :connect_common
desc 'Executed before uploading'
task upload_common: :connect_common
desc 'Executed before uploading'
task converge_common: :connect_common
desc 'Executed before connecting to any host'
task :connect_common
Chake.nodes.each do |node|
node.silent = Rake.application.options.silent
hostname = node.hostname
bootstrap_script = File.join(Chake.tmpdir, "#{hostname}.bootstrap")
bootstrap_steps = node.bootstrap_steps
bootstrap_code = (["#!/bin/sh\n", "set -eu\n"] + bootstrap_steps.map { |f| File.read(f) }).join
desc "bootstrap #{hostname}"
task "bootstrap:#{hostname}" => :bootstrap_common do
mkdir_p Chake.tmpdir unless File.directory?(Chake.tmpdir)
if !File.exist?(bootstrap_script) || File.read(bootstrap_script) != bootstrap_code
# create bootstrap script
File.write(bootstrap_script, bootstrap_code)
chmod 0o755, bootstrap_script
# copy bootstrap script over
scp = node.scp
target = "/tmp/.chake-bootstrap.#{Etc.getpwuid.name}"
sh *scp, bootstrap_script, node.scp_dest + target
# run bootstrap script
node.run_as_root("#{target} #{hostname}")
end
# overwrite config with current contents
config = File.join(Chake.tmpdir, "#{hostname}.json")
write_json_file(config, node.data)
end
desc "upload data to #{hostname}"
task "upload:#{hostname}" => ["bootstrap:#{hostname}", :upload_common] do
next unless node.needs_upload?
encrypted = encrypted_for(node)
rsync_excludes = (encrypted.values + encrypted.keys).map { |f| ['--exclude', f] }.flatten
rsync_excludes << '--exclude' << '.git/'
rsync_excludes << '--exclude' << 'cache/'
rsync_excludes << '--exclude' << 'nodes/'
rsync_excludes << '--exclude' << 'local-mode-cache/'
rsync = node.rsync + ['-ap'] + ENV.fetch('CHAKE_RSYNC_OPTIONS', '').split
rsync_logging = (Rake.application.options.trace && '--verbose') || '--quiet'
hash_files = Dir.glob(File.join(Chake.tmpdir, '*.sha1sum'))
files = Dir.glob('**/*').reject { |f| File.directory?(f) } - encrypted.keys - encrypted.values - hash_files
if_files_changed(hostname, 'plain', files) do
sh *rsync, '--delete', rsync_logging, *rsync_excludes, './', node.rsync_dest
end
if_files_changed(hostname, 'enc', encrypted.keys) do
Dir.mktmpdir do |tmpdir|
encrypted.each do |encrypted_file, target_file|
target = File.join(tmpdir, target_file)
mkdir_p(File.dirname(target))
rm_f target
File.open(target, 'w', 0o400) do |output|
IO.popen(['gpg', '--quiet', '--batch', '--use-agent', '--decrypt', encrypted_file]) do |data|
output.write(data.read)
end
end
puts "#{target} (decrypted)"
end
sh *rsync, rsync_logging, "#{tmpdir}/", node.rsync_dest
end
end
end
converge_dependencies = [:converge_common, "bootstrap:#{hostname}", "upload:#{hostname}"]
desc "converge #{hostname}"
task "converge:#{hostname}" => converge_dependencies do
maybe_decrypt(node) do
node.converge
end
end
desc "Preview changes when converging #{hostname}"
task "preview:#{hostname}" => converge_dependencies do
maybe_decrypt(node) do
node.preview
end
end
desc 'apply <recipe> on #{hostname}'
task "apply:#{hostname}", [:recipe] => [:recipe_input, :connect_common, "connect:#{hostname}"] do |_task, _args|
maybe_decrypt(node) do
node.apply($recipe_to_apply)
end
end
task "apply:#{hostname}" => converge_dependencies
desc "run a command on #{hostname}"
task "run:#{hostname}", [:command] => [:run_input, :connect_common, "connect:#{hostname}"] do
node.run($cmd_to_run)
end
desc "Logs in to a shell on #{hostname}"
task "login:#{hostname}" => [:connect_common, "connect:#{hostname}"] do
node.run_shell
end
desc 'checks connectivity and setup on all nodes'
task "check:#{hostname}" => [:connect_common, "connect:#{hostname}"] do
node.run('sudo echo OK')
end
# needs to be overridden in user projects
task "connect:#{hostname}"
end
task :run_input, :command do |_task, args|
$cmd_to_run = args[:command]
unless $cmd_to_run
puts '# Enter command to run (use arrow keys for history):'
$cmd_to_run = Chake::Readline::Commands.readline
end
if !$cmd_to_run || $cmd_to_run.strip == ''
puts
puts 'I: no command provided, operation aborted.'
exit(1)
end
end
task :recipe_input, :recipe do |_task, args|
$recipe_to_apply = args[:recipe]
unless $recipe_to_apply
recipes = Dir['**/*/recipes/*.rb'].map do |f|
f =~ %r{(.*/)?(.*)/recipes/(.*).rb$}
cookbook = Regexp.last_match(2)
recipe = Regexp.last_match(3)
recipe = nil if recipe == 'default'
[cookbook, recipe].compact.join('::')
end.sort
puts 'Available recipes:'
IO.popen('column', 'w') do |column|
column.puts(recipes)
end
$recipe_to_apply = Chake::Readline::Recipes.readline
if !$recipe_to_apply || $recipe_to_apply.empty?
puts
puts 'I: no recipe provided, operation aborted.'
exit(1)
end
unless recipes.include?($recipe_to_apply)
abort "E: no such recipe: #{$recipe_to_apply}"
end
end
end
desc 'upload to all nodes'
multitask upload: Chake.nodes.map { |node| "upload:#{node.hostname}" }
desc 'bootstrap all nodes'
multitask bootstrap: Chake.nodes.map { |node| "bootstrap:#{node.hostname}" }
desc 'converge all nodes (default)'
multitask 'converge' => Chake.nodes.map { |node| "converge:#{node.hostname}" }
desc 'Preview changes when converigng all nodes'
multitask 'preview' => Chake.nodes.map { |node| "preview:#{node.hostname}" }
desc 'Apply <recipe> on all nodes'
multitask 'apply', [:recipe] => Chake.nodes.map { |node| "apply:#{node.hostname}" }
desc 'run <command> on all nodes'
multitask :run, [:command] => Chake.nodes.map { |node| "run:#{node.hostname}" }
task default: :converge
desc 'checks connectivity and setup on all nodes'
multitask check: (Chake.nodes.map { |node| "check:#{node.hostname}" }) do
puts '✓ all hosts OK'
puts ' - ssh connection works'
puts ' - password-less sudo works'
end
desc 'runs a Ruby console in the chake environment'
task :console do
require 'irb'
IRB.setup('__FILE__', argv: [])
workspace = IRB::WorkSpace.new(self)
puts 'chake - interactive console'
puts '---------------------------'
puts 'all node data in available in Chake.nodes'
puts
IRB::Irb.new(workspace).run(IRB.conf)
end
|