Class: Haas::ChefProvider
- Inherits:
-
Object
- Object
- Haas::ChefProvider
- Defined in:
- lib/haas/chef.rb
Constant Summary collapse
- COOKBOOK_PATH =
File.join(Haas::Config::WORKING_DIR, 'cookbooks')
Class Method Summary collapse
- .bootstrap_node(node) ⇒ Object
- .download_cookbook(cookbook_name, url) ⇒ Object
- .install_chef_server ⇒ Object
- .setup_cluster ⇒ Object
- .setup_environment ⇒ Object
- .upload_cookbook ⇒ Object
- .write_knife_config_file ⇒ Object
Class Method Details
.bootstrap_node(node) ⇒ Object
82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 |
# File 'lib/haas/chef.rb', line 82 def self.bootstrap_node node require 'chef' require 'chef/knife' require 'chef/knife/bootstrap' require 'chef/knife/core/bootstrap_context' require 'chef/knife/ssh' require 'net/ssh' require 'net/ssh/multi' puts "Bootstrapping node #{node.public_dns_name}" user = Haas.cluster.ssh_user run_list = ["recipe[ambari::agent]"] run_list << "recipe[ambari::server]" if node. Chef::Config.from_file(Haas.cluster.knife_config_path) kb = Chef::Knife::Bootstrap.new kb.config[:ssh_user] = user kb.config[:run_list] = run_list kb.config[:use_sudo] = true kb.config[:identity_file] = Haas.cluster.identity_file_path kb.config[:distro] = 'chef-full' kb.name_args = [node.public_dns_name] kb.run end |
.download_cookbook(cookbook_name, url) ⇒ Object
108 109 110 111 112 113 114 115 116 117 118 119 120 121 |
# File 'lib/haas/chef.rb', line 108 def self.download_cookbook cookbook_name, url require 'open-uri' require 'zlib' require 'archive/tar/minitar' cookbooks_dir = File.join(Haas::Config::WORKING_DIR, 'cookbooks') Dir.mkdir(cookbooks_dir) unless File.exists?(cookbooks_dir) archive_path = File.join(cookbooks_dir, "#{cookbook_name}.tar.gz") open(archive_path, 'wb') do |file| file << open(url).read end tgz = Zlib::GzipReader.new(File.open(archive_path, 'rb')) Archive::Tar::Minitar.unpack(tgz, cookbooks_dir) end |
.install_chef_server ⇒ Object
22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
# File 'lib/haas/chef.rb', line 22 def self.install_chef_server require 'net/ssh' chef_server = Haas.cluster.get_chef_server user = Haas.cluster.ssh_user chef_server_file = "chef-server-core-12.0.0_rc.5-1.el5.x86_64.rpm" chef_server_url = "https://packagecloud.io/chef/stable/download?distro=6&filename=#{chef_server_file}" chef_server_local_path = "/tmp/#{chef_server_file}" Net::SSH.start( chef_server.public_dns_name, user, :host_key => "ssh-rsa", :encryption => "blowfish-cbc", :keys => [ Haas.cluster.identity_file_path ], :compression => "zlib" ) do |ssh| puts "Entering chef server installation on the node #{chef_server.public_dns_name}. This may take a while." puts "Disable iptables" ssh.exec!("service iptables stop") puts "Downloading and installing the chef server." ssh.exec!(%{ until curl -L '#{chef_server_url}' -o #{chef_server_local_path} && rpm -ivh #{chef_server_local_path}; do echo "installing chef server"; done }) ssh.exec!("rpm -ivh #{chef_server_local_path}") puts "Configuring chef server." ssh.exec!("mkdir -p /etc/opscode/") ssh.exec!(%{echo "nginx['non_ssl_port'] = false" >> /etc/opscode/chef-server.rb}) ssh.exec!("chef-server-ctl reconfigure") client_key = "" while !client_key.include?("BEGIN RSA PRIVATE KEY") do client_key = ssh.exec!("chef-server-ctl user-create haas-api HAAS Api [email protected] abc123") end File.write(Haas.cluster.chef_client_pem_path, client_key) org_validator_key = ssh.exec!("chef-server-ctl org-create haas Hadoop as a Service --association_user haas-api") File.write(Haas.cluster.chef_validator_pem_path, org_validator_key) end end |
.setup_cluster ⇒ Object
6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 |
# File 'lib/haas/chef.rb', line 6 def self.setup_cluster install_chef_server write_knife_config_file cookbooks=[{'name' => 'ambari','url' => "https://supermarket.getchef.com/cookbooks/ambari/download" }] cookbooks.each do |cb| download_cookbook cb['name'], cb['url'] end upload_cookbook setup_environment threads = [] Haas.cluster.nodes.each do |node| threads << Thread.new { bootstrap_node(node) } end threads.each { |thr| thr.join } end |
.setup_environment ⇒ Object
139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 |
# File 'lib/haas/chef.rb', line 139 def self.setup_environment require 'chef/environment' require 'chef/rest' = Haas.cluster. override_attributes = { :ambari => { :server_fqdn => .private_dns_name } } Chef::Config.from_file(Haas.cluster.knife_config_path) environment = Chef::Environment.new environment.name(Haas.cluster.name) environment.description("haas hadoop cluster") environment.override_attributes(override_attributes) environment.save end |
.upload_cookbook ⇒ Object
123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 |
# File 'lib/haas/chef.rb', line 123 def self.upload_cookbook require 'chef' require 'chef/cookbook_uploader' puts "Uploading cookbooks to the chef server." Chef::Config.from_file(Haas.cluster.knife_config_path) cookbook_repo = Chef::CookbookLoader.new(COOKBOOK_PATH) cookbook_repo.load_cookbooks cbs = [] cookbook_repo.each do |cookbook_name, cookbook| cbs << cookbook end Chef::CookbookUploader.new(cbs,:force => false, :concurrency => 10).upload_cookbooks end |
.write_knife_config_file ⇒ Object
63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 |
# File 'lib/haas/chef.rb', line 63 def self.write_knife_config_file conf = %{ log_level :info log_location STDOUT node_name "haas-api" client_key "#{Haas.cluster.chef_client_pem_path}" validation_client_name "haas-validator" validation_key "#{Haas.cluster.chef_validator_pem_path}" chef_server_url "https://#{Haas.cluster.get_chef_server.public_dns_name}/organizations/haas" cache_type 'BasicFile' cache_options( :path => "#{ENV['HOME']}/.chef/checksums" ) cookbook_path ["#{COOKBOOK_PATH}"] environment "#{Haas.cluster.name}" } File.write(Haas.cluster.knife_config_path, conf) end |