This commit is contained in:
amartynov 2014-10-22 15:01:55 +04:00
parent f581b1ff63
commit b3273a3ffe
17 changed files with 619 additions and 220 deletions

View File

@ -1,31 +1,61 @@
require "commands/knife_commands"
require "commands/ssh"
module DeployCommands
def deploy_server_proc
lambda do |out, s, mongo, tags|
begin
old_tags_str = nil
new_tags_str = nil
unless tags.empty?
old_tags_str = KnifeCommands.tags_list(s.chef_node_name).join(" ")
out << "Server tags: #{old_tags_str}\n"
KnifeCommands.tags_delete(s.chef_node_name, old_tags_str)
new_tags_str = tags.join(" ")
out << "Server new tags: #{new_tags_str}\n"
cmd = KnifeCommands.tags_create(s.chef_node_name, new_tags_str)
unless cmd[1]
m = "Error: Cannot add tags '#{new_tags_str}' to server '#{s.chef_node_name}'"
logger.error(m)
out << m + "\n"
return 3
end
logger.info("Set tags for '#{s.chef_node_name}': #{new_tags_str}")
end
k = mongo.key s.key
r = deploy_server out, s, k.path
unless tags.empty?
out << "Restore tags\n"
cmd = KnifeCommands.tags_delete(s.chef_node_name, new_tags_str)
logger.info("Deleted tags for #{s.chef_node_name}: #{new_tags_str}")
cmd = KnifeCommands.tags_create(s.chef_node_name, old_tags_str)
logger.info("Set tags for #{s.chef_node_name}: #{old_tags_str}")
end
return r
rescue IOError => e
logger.error e.message
return 4
end
end
end
def deploy_server out, server, cert_path
out << "\nRun chef-client on '#{server.chef_node_name}'"
cmd = (server.remote_user == "root" ? "chef-client" : "sudo chef-client")
out << "\nRun chef-client on '#{server.chef_node_name}'\n"
cmd = "chef-client"
ip = if server.public_ip.nil?
server.private_ip
else
out << "Public IP detected\n"
server.public_ip
end
cmd = "ssh -t -i #{cert_path} #{server.remote_user}@#{ip} \"#{cmd}\""
out << "\nCommand: '#{cmd}'\n"
status = nil
IO.popen(cmd + " 2>&1") do |c|
buf = ""
while line = c.gets do
out << line
buf = line
end
c.close
status = $?.to_i
r = buf.scan(/exit\scode\s([0-9]{1,3})/)[0]
unless r.nil?
status = r[0].to_i
end
end
return status
out.flush if out.respond_to?(:flush)
lline = KnifeCommands.ssh_stream(out, cmd, ip, server.remote_user, cert_path)
r = /Chef\sClient\sfinished/i
return (lline[r].nil? ? 1 : 0)
end
end

View File

@ -70,4 +70,29 @@ EOH
return o, $?.success?
end
def self.ssh_options cmd, host, user, cert
["-m", "-x", user, "-i", cert, "--no-host-key-verify", host, "'#{(user == "root" ? cmd : "sudo #{cmd}")}'"]
end
def self.ssh_stream out, cmd, host, user, cert
knife_cmd = "knife ssh -c #{get_config()} #{ssh_options(cmd, host, user, cert).join(" ")}"
out << "\nExecuting '#{knife_cmd}' \n\n"
out.flush if out.respond_to?(:flush)
status = 2
IO.popen(knife_cmd + " 2>&1") do |o|
lline = nil
while line = o.gets do
out << line
lline = line
out.flush if out.respond_to?(:flush)
end
o.close
end
return lline
end
private
def self.get_config
File.join(ENV["HOME"], ".chef", "knife.rb")
end
end

View File

@ -3,6 +3,68 @@ require "db/exceptions/record_not_found"
module ServerCommands
def create_server_proc
lambda do |out, s, provider, mongo|
begin
out << "Create server...\n"
out.flush if out.respond_to?(:flush)
unless provider.create_server(s, out)
return 3
end
mongo.server_insert s
out.flush if out.respond_to?(:flush)
logger.info "Server with parameters: #{s.to_hash.inspect} is running"
key = mongo.key(s.key)
s.chef_node_name = provider.create_default_chef_node_name(s) if s.chef_node_name.nil?
out << "\n\nBootstrap..."
out.flush if out.respond_to?(:flush)
run_list = s.options[:run_list]
s.options[:run_list] = provider.run_list
out << "\nBootstrap with provider run list: #{s.options[:run_list].inspect}"
status = bootstrap(s, out, key.path, logger)
out.flush if out.respond_to?(:flush)
if status == 0
mongo.server_set_chef_node_name s
logger.info "Server with id '#{s.id}' is bootstraped"
if check_server(s)
out << "Server #{s.chef_node_name} is created"
else
out << roll_back(s, provider)
mongo.server_delete s.id
return 5
end
out << "\n"
out.flush if out.respond_to?(:flush)
out << "\nAdd project run list: #{run_list.inspect}"
s.options[:run_list] += run_list
KnifeCommands.set_run_list(s.chef_node_name, s.options[:run_list])
status = deploy_server(out, s, key.path)
if status != 0
msg = "Failed on chef-client with project run list, server with id '#{s.id}'"
logger.error msg
out << "\n" + msg + "\n"
mongo.server_delete s.id
end
return status
else
msg = "Failed while bootstraping server with id '#{s.id}'"
logger.error msg
out << "\n" + msg + "\n"
out << roll_back(s, provider)
mongo.server_delete s.id
status
end
rescue IOError => e
logger.error e.message
logger.warn roll_back(s, provider)
mongo.server_delete s.id
return 5
end
end
end
def extract_servers provider, project, env, params, user, mongo
flavors = provider.flavors
projects = {}
@ -112,35 +174,25 @@ module ServerCommands
out << "\nPublic IP is present\n"
end
out << "\nWaiting for SSH..."
out.flush if out.respond_to?(:flush)
i = 0
cmd = "ssh -i #{cert_path} -q #{s.remote_user}@#{ip} 'exit' 2>&1"
begin
sleep(1)
`ssh -i #{cert_path} -q #{s.remote_user}@#{ip} exit`
sleep(5)
res = `#{cmd}`
i += 1
if i == 300
res = `ssh -i #{cert_path} #{s.remote_user}@#{ip} "exit" 2>&1`
if i == 120
out << "\nCan not connect to #{s.remote_user}@#{ip}"
out << "\n" + res
logger.error "Can not connect with command 'ssh -i #{cert_path} #{s.remote_user}@#{ip}':\n#{res}"
return false
end
raise unless $?.success?
rescue
raise ArgumentError.new("Can not connect with command '#{cmd}' ") unless $?.success?
rescue ArgumentError => e
retry
end
bootstrap_cmd = "knife bootstrap #{bootstrap_options.join(" ")} #{ip}"
out << "\nExecuting '#{bootstrap_cmd}' \n\n"
status = nil
IO.popen(bootstrap_cmd + " 2>&1") do |bo|
while line = bo.gets do
out << line
end
bo.close
status = $?.to_i
out << "\nBootstrap exit status: #{status}\n"
end
return status
return KnifeCommands.knife_bootstrap(out, ip, bootstrap_options)
end
def self.unbootstrap s, cert_path
@ -159,9 +211,13 @@ module ServerCommands
end
def delete_server s, mongo, logger
if s.chef_node_name.nil?
if s.static?
if !s.chef_node_name.nil?
cert = BaseRoutes.mongo.key s.key
ServerCommands.unbootstrap(s, cert.path)
end
mongo.server_delete s.id
msg = "Added server '#{s.id}' is removed"
msg = "Static server '#{s.id}' is removed"
logger.info msg
return msg, nil
end

View File

@ -2,10 +2,12 @@
# `bundle exec thin -R $devops_home/config.ru -e $env -d -p $port -t 600 -u $user --pid $pid_file --log $log_file start`
require 'rubygems'
require 'bundler/setup'
require "sidekiq/web"
root = File.dirname(__FILE__)
require File.join(root, "devops-service")
require File.join(root, "client")
require File.join(root, "report")
# Read configuration file
config_file = File.join(root, "config.rb")
@ -16,8 +18,24 @@ else
raise "No config file '#{config_file}' found"
end
config[:devops_home] = File.join(ENV["HOME"], ".devops")
puts "Devops home: #{config[:devops_home]}"
unless File.exists?(config[:devops_home])
FileUtils.mkdir config[:devops_home]
puts "Directory '#{config[:devops_home]}' has been created"
end
config[:server_report_dir_v2] = File.expand_path(File.join(config[:devops_home], "report", "v2", "server")) unless config[:server_report_dir_v2]
config[:deploy_report_dir_v2] = File.expand_path(File.join(config[:devops_home], "report", "v2", "deploy")) unless config[:deploy_report_dir_v2]
config[:project_report_dir_v2] = File.expand_path(File.join(config[:devops_home], "report", "v2", "project")) unless config[:project_report_dir_v2]
[
:server_report_dir_v2,
:deploy_report_dir_v2,
:project_report_dir_v2
].each {|key| d = config[key]; FileUtils.mkdir_p(d) unless File.exists?(d) }
# URL map for API v2.0
run Rack::URLMap.new({
"/v2.0" => DevopsService.new(config),
"/client" => Client.new(config)
"#{config[:url_prefix]}/v2.0" => DevopsService.new(config),
"#{config[:url_prefix]}/client" => Client.new(config),
"#{config[:url_prefix]}/sidekiq" => Sidekiq::Web
})

View File

@ -84,4 +84,8 @@ class Server < MongoModel
str
end
def static?
self.static || false
end
end

View File

@ -248,6 +248,10 @@ class MongoConnector
@servers.update({"_id" => server.id}, server.to_hash_without_id)
end
def server_set_chef_node_name server
@servers.update({"_id" => server.id}, {"$set" => {"chef_node_name" => server.chef_node_name}})
end
def keys
@keys.find(create_query).to_a.map {|bi| Key.create_from_bson bi}
end

View File

@ -11,6 +11,7 @@ require "routes/v2.0/provider"
require "routes/v2.0/tag"
require "routes/v2.0/server"
require "routes/v2.0/script"
require "routes/v2.0/status"
require "routes/v2.0/bootstrap_templates"
module Version2_0
@ -20,7 +21,7 @@ module Version2_0
def initialize app
stack = Rack::Builder.new
[FlavorRoutes, ImageRoutes, FilterRoutes, NetworkRoutes, GroupRoutes, DeployRoutes,
ProjectRoutes, KeyRoutes, UserRoutes, ProviderRoutes, TagRoutes, ServerRoutes, ScriptRoutes, BootstrapTemplatesRoutes].each do |m|
ProjectRoutes, KeyRoutes, UserRoutes, ProviderRoutes, TagRoutes, ServerRoutes, ScriptRoutes, BootstrapTemplatesRoutes, StatusRoutes].each do |m|
stack.use m
end
stack.run app

View File

@ -3,6 +3,7 @@ require "routes/v2.0/base_routes"
require "providers/provider_factory"
require "commands/deploy"
require "commands/status"
require "workers/deploy_worker"
module Version2_0
class DeployRoutes < BaseRoutes
@ -28,7 +29,8 @@ module Version2_0
# - body :
# {
# "names": [], -> array of servers names to run chef-client
# "tags": [] -> array of tags to apply on each server before running chef-client
# "tags": [], -> array of tags to apply on each server before running chef-client
# "trace": true -> return output in stream
# }
#
# * *Returns* : text stream
@ -39,63 +41,47 @@ module Version2_0
names = check_array(r["names"], "Parameter 'names' should be a not empty array of strings")
tags = check_array(r["tags"], "Parameter 'tags' should be an array of strings", String, true) || []
servers = BaseRoutes.mongo.servers_by_names(names)
servers.delete_if{|s| s.reserved_by.nil?}
servers = BaseRoutes.mongo.servers(nil, nil, names, true)
halt(404, "No reserved servers found for names '#{names.join("', '")}'") if servers.empty?
keys = {}
servers.sort_by!{|s| names.index(s.chef_node_name)}
if r.key?("trace")
stream() do |out|
status = []
begin
servers.each do |s|
begin
begin
project = begin
BaseRoutes.mongo.check_project_auth s.project, s.deploy_env, request.env['REMOTE_USER']
rescue InvalidPrivileges, RecordNotFound => e
out << e.message + "\n"
status.push 2
next
end
old_tags_str = nil
new_tags_str = nil
unless tags.empty?
old_tags_str = KnifeCommands.tags_list(s.chef_node_name).join(" ")
out << "Server tags: #{old_tags_str}\n"
KnifeCommands.tags_delete(s.chef_node_name, old_tags_str)
new_tags_str = tags.join(" ")
out << "Server new tags: #{new_tags_str}\n"
cmd = KnifeCommands.tags_create(s.chef_node_name, new_tags_str)
unless cmd[1]
m = "Error: Cannot add tags '#{new_tags_str}' to server '#{s.chef_node_name}'"
logger.error(m)
out << m + "\n"
status.push 3
next
end
logger.info("Set tags for '#{s.chef_node_name}': #{new_tags_str}")
end
unless keys.key? s.key
k = BaseRoutes.mongo.key s.key
keys[s.key] = k.path
end
status.push(deploy_server out, s, keys[s.key])
unless tags.empty?
out << "Restore tags\n"
cmd = KnifeCommands.tags_delete(s.chef_node_name, new_tags_str)
logger.info("Deleted tags for #{s.chef_node_name}: #{new_tags_str}")
cmd = KnifeCommands.tags_create(s.chef_node_name, old_tags_str)
logger.info("Set tags for #{s.chef_node_name}: #{old_tags_str}")
res = deploy_server_proc.call(out, s, BaseRoutes.mongo, tags)
status.push(res)
end
out << create_status(status)
rescue IOError => e
logger.error e.message
break
end
end # stream
else
dir = DevopsService.config[:deploy_report_dir_v2]
files = []
uri = URI.parse(request.url)
servers.each do |s|
project = begin
BaseRoutes.mongo.check_project_auth s.project, s.deploy_env, request.env['REMOTE_USER']
rescue InvalidPrivileges, RecordNotFound => e
next
end
jid = DeployWorker.perform_async(dir, s.to_hash, tags, DevopsService.config)
logger.info "Job '#{jid}' has been started"
uri.path = "#{DevopsService.config[:url_prefix]}/v2.0/report/deploy/" + jid
files.push uri.to_s
end
json files
end
end

View File

@ -5,6 +5,7 @@ require "db/exceptions/invalid_record"
require "commands/deploy"
require "commands/status"
require "commands/server"
require "workers/project_test_worker"
module Version2_0
class ProjectRoutes < BaseRoutes
@ -376,19 +377,14 @@ module Version2_0
check_array(obj["servers"], "Parameter 'servers' should be a not empty array of strings", String, true)
project = BaseRoutes.mongo.project(params[:id])
servers = BaseRoutes.mongo.servers(params[:id], obj["deploy_env"], obj["servers"], true)
=begin
servers.delete_if{|s| s.reserved_by.nil?}
unless obj["servers"].nil?
logger.debug "Servers in params: #{obj["servers"].inspect}\nServers: #{servers.map{|s| s.chef_node_name}.inspect}"
servers.select!{|ps| obj["servers"].include?(ps.chef_node_name)}
end
=end
keys = {}
if obj.key?("trace")
stream() do |out|
begin
out << (servers.empty? ? "No reserved servers to deploy\n" : "Deploy servers: '#{servers.map{|s| s.chef_node_name}.join("', '")}'\n")
status = []
servers.each do |s|
logger.debug "Deploy server: #{s.inspect}"
begin
BaseRoutes.mongo.check_project_auth s.project, s.deploy_env, request.env['REMOTE_USER']
@ -401,13 +397,31 @@ module Version2_0
k = BaseRoutes.mongo.key s.key
keys[s.key] = k.path
end
status.push(deploy_server out, s, keys[s.key])
status.push(deploy_server(out, s, keys[s.key]))
end
out << create_status(status)
rescue IOError => e
logger.error e.message
end
end
else
dir = DevopsService.config[:deploy_report_dir_v2]
files = []
uri = URI.parse(request.url)
servers.each do |s|
project = begin
BaseRoutes.mongo.check_project_auth s.project, s.deploy_env, request.env['REMOTE_USER']
rescue InvalidPrivileges, RecordNotFound => e
next
end
jid = DeployWorker.perform_async(dir, s.to_hash, [], DevopsService.config)
logger.info "Job '#{jid}' has been started"
uri.path = "#{DevopsService.config[:url_prefix]}/v2.0/report/deploy/" + jid
files.push uri.to_s
end
json files
end
end
# Test project environment
@ -481,62 +495,20 @@ module Version2_0
check_privileges("project", "r")
project = BaseRoutes.mongo.project(params[:id])
env = project.deploy_env params[:env]
user = request.env['REMOTE_USER']
provider = ::Provider::ProviderFactory.get(env.provider)
header = "Test project '#{project.id}' and environment '#{env.identifier}'"
logger.info header
servers = extract_servers(provider, project, env, {}, user, BaseRoutes.mongo)
result = {:servers => []}
project.deploy_envs = [ env ]
result[:project] = project.to_hash
servers.each do |s|
sr = {}
t1 = Time.now
out = ""
if provider.create_server(s, out)
t2 = Time.now
sr[:id] = s.id
sr[:create] = {:status => true}
sr[:create][:time] = time_diff_s(t1, t2)
logger.info "Server with parameters: #{s.to_hash.inspect} is running"
key = BaseRoutes.mongo.key(s.key)
b_out = ""
r = bootstrap(s, b_out, key.path, logger)
t1 = Time.now
sr[:chef_node_name] = s.chef_node_name
if r == 0
sr[:bootstrap] = {:status => true}
sr[:bootstrap][:time] = time_diff_s(t2, t1)
logger.info "Server with id '#{s.id}' is bootstraped"
if check_server(s)
BaseRoutes.mongo.server_insert s
end
else
sr[:bootstrap] = {:status => false}
sr[:bootstrap][:log] = b_out
sr[:bootstrap][:return_code] = r
end
logger.info "Test project '#{project.id}' and environment '#{env.identifier}'"
t1 = Time.now
r = delete_from_chef_server(s.chef_node_name)
begin
r[:server] = provider.delete_server s.id
rescue Fog::Compute::OpenStack::NotFound, Fog::Compute::AWS::Error
r[:server] = "Server with id '#{s.id}' not found in '#{provider.name}' servers"
logger.warn r[:server]
end
BaseRoutes.mongo.server_delete s.id
t2 = Time.now
sr[:delete] = {:status => true}
sr[:delete][:time] = time_diff_s(t1, t2)
sr[:delete][:log] = r
else
sr[:create] = {:status => false}
sr[:create][:log] = out
end
result[:servers].push sr
end
create_response(header, result)
dir = File.join(DevopsService.config[:project_report_dir_v2], project.id, env.identifier)
uri = URI.parse(request.url)
p = {
:project => project.id,
:env => env.identifier,
:user => request.env['REMOTE_USER']
}
jid = ProjectTestWorker.perform_async(dir, p, DevopsService.config)
logger.info "Job '#{jid}' has been created"
uri.path = "#{DevopsService.config[:url_prefix]}/v2.0/report/project/#{project.id}/#{env.identifier}/" + jid
files = [uri.to_s]
json files
end
private

View File

@ -1,3 +1,4 @@
require "uri"
require "json"
require "chef"
require "commands/knife_commands"
@ -7,6 +8,9 @@ require "providers/provider_factory"
require "db/mongo/models/deploy_env"
require "commands/status"
require "commands/server"
require "commands/bootstrap_templates"
require "workers/create_server_worker"
require "workers/bootstrap_worker"
module Version2_0
@ -32,6 +36,7 @@ module Version2_0
include StatusCommands
include ServerCommands
include BootstrapTemplatesCommands
def initialize wrapper
super wrapper
@ -203,7 +208,8 @@ module Version2_0
# "without_bootstrap": null, -> do not install chef on instance if true
# "force": null, -> do not delete server on error
# "groups": [], -> specify special security groups, overrides value from project env
# "key": "ssh key" -> specify ssh key for server, overrides value from project env
# "key": "ssh key", -> specify ssh key for server, overrides value from project env
# "trace": true -> return output in stream
# }
#
# * *Returns* : text stream
@ -234,40 +240,32 @@ module Version2_0
end
servers = extract_servers(provider, p, env, body, user, BaseRoutes.mongo)
if body.key?("trace")
stream() do |out|
begin
status = []
servers.each do |s|
begin
unless provider.create_server(s, out)
status.push 3
next
end
logger.info "Server with parameters: #{s.to_hash.inspect} is running"
unless without_bootstrap
key = new_key || BaseRoutes.mongo.key(s.key)
bootstrap(s, out, key.path, logger)
logger.info "Server with id '#{s.id}' is bootstraped"
if force or check_server(s)
BaseRoutes.mongo.server_insert s
scheduler.in(env.expires, ExpireHandler.new(s, logger)) unless env.expires.nil?
out << "Server #{s.chef_node_name} is created"
status.push 0
else
out << roll_back(s, provider)
status.push 5
end
out << "\n"
else
BaseRoutes.mongo.server_insert s
status.push 0
res = create_server_proc.call(out, s, provider, BaseRoutes.mongo)
status.push res
end
out << create_status(status)
rescue IOError => e
logger.error e.message
logger.warn roll_back(s, provider)
break
end
end
else
dir = DevopsService.config[:server_report_dir_v2]
files = []
uri = URI.parse(request.url)
servers.each do |s|
h = s.to_hash
h["options"] = s.options
jid = CreateServerWorker.perform_async(dir, env.provider, h, DevopsService.config)
logger.info "Job '#{jid}' has been started"
uri.path = "#{DevopsService.config[:url_prefix]}/v2.0/report/server/" + jid
files.push uri.to_s
end
json files
end
end
@ -408,20 +406,27 @@ module Version2_0
provider = ::Provider::ProviderFactory.get(s.provider)
check_chef_node_name(name, provider) unless name.nil?
s.options = {
:run_list => rl || d.run_list,
}
s.options[:bootstrap_template] = t unless t.nil?
unless t.nil?
templates = get_templates
halt_response("Invalid bootstrap template '#{t}', available values: #{templates.join(", ")}", 400) unless templates.include?(t)
s.options[:bootstrap_template] = t
end
s.chef_node_name = name || provider.create_default_chef_node_name(s)
logger.debug "Chef node name: '#{s.chef_node_name}'"
status = []
if body.key?("trace")
stream() do |out|
begin
s.chef_node_name = name || provider.create_default_chef_node_name(s)
cert = BaseRoutes.mongo.key s.key
logger.debug "Bootstrap certificate path: #{cert.path}"
bootstrap s, out, cert.path, logger
str = nil
r = if check_server(s)
BaseRoutes.mongo.server_update s
BaseRoutes.mongo.server_set_chef_node_name s
str = "Server with id '#{s.id}' is bootstraped"
logger.info str
0
@ -438,6 +443,21 @@ module Version2_0
logger.error e.message
end
end
else
dir = DevopsService.config[:server_report_dir_v2]
files = []
uri = URI.parse(request.url)
h = s.to_hash
h["options"] = s.options
h["_id"] = s.id
jid = BootstrapWorker.perform_async(dir, d.provider, h, DevopsService.config)
logger.info "Job '#{jid}' has been started"
uri.path = "#{DevopsService.config[:url_prefix]}/v2.0/report/server/" + jid
uri.query = nil
uri.fragment = nil
files.push uri.to_s
json files
end
end
# Add external server to devops

View File

@ -0,0 +1,21 @@
require "json"
require "routes/v2.0/base_routes"
require "sidekiq"
module Version2_0
class StatusRoutes < BaseRoutes
def initialize wrapper
super wrapper
puts "Status routes initialized"
end
get "/status/:id" do
r = Sidekiq.redis do |connection|
connection.hget("devops", params[:id])
end
return [404, "Job with id '#{params[:id]}' not found"] if r.nil?
r
end
end
end

View File

@ -0,0 +1,44 @@
#root = File.join(File.dirname(__FILE__), "..")
#$LOAD_PATH.push root unless $LOAD_PATH.include? root
require File.join(File.dirname(__FILE__), "worker")
require "providers/provider_factory"
require "commands/server"
require "db/mongo/models/server"
class BootstrapWorker < Worker
include ServerCommands
def perform(dir, e_provider, server, conf)
set_status jid, "init"
config = convert_config(conf)
File.open(File.join(dir, jid), "w") do |out|
begin
set_status jid, "running"
mongo = mongo_connector(config)
::Provider::ProviderFactory.init(config)
provider = ::Provider::ProviderFactory.get(e_provider)
logger.debug "Provider: #{provider.inspect}"
s = Server.new(server)
s.options = convert_config(server["options"])
key = mongo.key(s.key)
out << "\nBootstrap with run list: #{s.options[:run_list].inspect}"
status = bootstrap(s, out, key.path, logger)
if status == 0
out << "Chef node name: #{s.chef_node_name}\n"
mongo.server_set_chef_node_name s
out << "Chef node name has been updated\n"
end
set_status jid, (status == 0 ? "completed" : "failed")
rescue Exception => e
out << "\n"
out << e.message
out << "\n"
out << e.backtrace.join("\n")
set_status jid, "failed"
end
end
end
end

View File

@ -0,0 +1,36 @@
#root = File.join(File.dirname(__FILE__), "..")
#$LOAD_PATH.push root unless $LOAD_PATH.include? root
require File.join(File.dirname(__FILE__), "worker")
require "providers/provider_factory"
require "commands/server"
require "db/mongo/models/server"
class CreateServerWorker < Worker
include ServerCommands
def perform(dir, e_provider, server, conf)
set_status jid, "init"
config = convert_config(conf)
File.open(File.join(dir, jid), "w") do |out|
begin
set_status jid, "running"
mongo = mongo_connector(config)
::Provider::ProviderFactory.init(config)
provider = ::Provider::ProviderFactory.get(e_provider)
logger.debug "Provider: #{provider.inspect}"
s = Server.new(server)
s.options = convert_config(server["options"])
status = create_server_proc.call(out, s, provider, mongo)
set_status jid, (status == 0 ? "completed" : "failed")
rescue Exception => e
out << "\n"
out << e.message
out << "\n"
out << e.backtrace.join("\n")
set_status jid, "failed"
end
end
end
end

View File

@ -0,0 +1,30 @@
#root = File.join(File.dirname(__FILE__), "..")
#$LOAD_PATH.push root unless $LOAD_PATH.include? root
require File.join(File.dirname(__FILE__), "worker")
require "commands/deploy"
require "db/mongo/models/server"
class DeployWorker < Worker
include DeployCommands
def perform(dir, server, tags, conf)
set_status jid, "init"
config = convert_config(conf)
File.open(File.join(dir, jid), "w") do |out|
begin
set_status jid, "running"
mongo = mongo_connector(config)
s = deploy_server_proc.call(out, Server.new(server), mongo, tags)
set_status jid, (s == 0 ? "completed" : "failed")
rescue Exception => e
out << "\n"
out << e.message
out << "\n"
out << e.backtrace.join("\n")
set_status jid, "failed"
end
end
end
end

View File

@ -0,0 +1,114 @@
require File.join(File.dirname(__FILE__), "worker")
require "providers/provider_factory"
require "commands/server"
require "db/mongo/models/server"
require "json"
require "fileutils"
require "commands/status"
class ProjectTestWorker < Worker
include ServerCommands
include StatusCommands
def perform(dir, params, conf)
FileUtils.mkdir_p(dir) unless File.exists?(dir)
set_status jid, "init"
config = convert_config(conf)
File.open(File.join(dir, jid), "w") do |out|
begin
set_status jid, "running"
logger.info "Test project '#{params["project"]}' and env '#{params["env"]}' (user - #{params["user"]})"
mongo = mongo_connector(config)
::Provider::ProviderFactory.init(config)
project = mongo.project(params["project"])
env = project.deploy_env(params["env"])
user = params["user"]
provider = ::Provider::ProviderFactory.get(env.provider)
logger.debug "Provider: #{provider.inspect}"
servers = extract_servers(provider, project, env, {}, user, mongo)
result = {:servers => []}
project.deploy_envs = [ env ]
result[:project] = project.to_hash
status = 0
servers.each do |s|
sr = {}
t1 = Time.now
out << "\n=== Create server ===\n"
out.flush
if provider.create_server(s, out)
out << "\n=== Create server - OK ===\n"
out.flush
t2 = Time.now
sr[:id] = s.id
sr[:create] = {:status => true}
sr[:create][:time] = time_diff_s(t1, t2)
s.chef_node_name = provider.create_default_chef_node_name(s)
logger.info "Server with parameters: #{s.to_hash.inspect} is running"
key = mongo.key(s.key)
out << "\n=== Bootstrap ===\n"
out.flush
r = bootstrap(s, out, key.path, logger)
t1 = Time.now
sr[:chef_node_name] = s.chef_node_name
if r == 0
out << "\n=== Bootstrap - OK ===\n"
out.flush
sr[:bootstrap] = {:status => true}
sr[:bootstrap][:time] = time_diff_s(t2, t1)
logger.info "Server with id '#{s.id}' is bootstraped"
out << "\n=== Check server ===\n"
out.flush
if check_server(s)
mongo.server_insert s
out << "\n=== OK, server has been inserted ===\n"
out.flush
end
else
status = 2
out << "\n=== Bootstrap - FAIL ===\n"
out.flush
sr[:bootstrap] = {:status => false}
sr[:bootstrap][:return_code] = r
end
t1 = Time.now
out << "\n=== Delete server ===\n"
out.flush
r = delete_from_chef_server(s.chef_node_name)
begin
r[:server] = provider.delete_server s
out << "\n=== Delete server - OK ===\n"
out.flush
rescue Fog::Compute::OpenStack::NotFound, Fog::Compute::AWS::Error
status = 3
out << "\n=== Delete server - FAIL ===\n"
out.flush
r[:server] = "Server with id '#{s.id}' not found in '#{provider.name}' servers"
logger.warn r[:server]
end
mongo.server_delete s.id
t2 = Time.now
sr[:delete] = {:status => true}
sr[:delete][:time] = time_diff_s(t1, t2)
else
status = 1
out << "\n=== Create server - FAIL ===\n"
out.flush
sr[:create] = {:status => false}
end
result[:servers].push sr
end
out << "\n\n#{result.to_json}"
set_status jid, (status == 0 ? "completed" : "failed")
rescue Exception => e
out << "\n"
out << e.message
out << "\n"
out << e.backtrace.join("\n")
set_status jid, "failed"
end
end
end
end

View File

@ -0,0 +1,9 @@
root = File.dirname(__FILE__)
require File.join(root, "create_server_worker")
require File.join(root, "deploy_worker")
require File.join(root, "bootstrap_worker")
require File.join(root, "project_test_worker")
config = {}
require File.join(root, "../proxy")

View File

@ -0,0 +1,29 @@
root = File.join(File.dirname(__FILE__), "..")
$LOAD_PATH.push root unless $LOAD_PATH.include? root
require "sidekiq"
require "sidekiq/api"
require "db/mongo/mongo_connector"
class Worker
include Sidekiq::Worker
def convert_config conf
config = {}
conf.each {|k,v| config[k.is_a?(String) ? k.to_sym : k] = v}
logger.debug "Config: #{config.inspect}"
config
end
def mongo_connector config
mongo = MongoConnector.new(config[:mongo_db], config[:mongo_host], config[:mongo_port], config[:mongo_user], config[:mongo_password])
logger.debug "Mongo connector: #{mongo.inspect}"
mongo
end
def set_status id, status
Sidekiq.redis {|con| con.hset "devops", id, status}
end
end