diff options
-rwxr-xr-x | cloud.rb | 91 | ||||
-rwxr-xr-x | cluster.sh | 82 | ||||
-rw-r--r-- | playbooks/gce/os3-master/terminate.yml | 39 | ||||
-rw-r--r-- | playbooks/gce/os3-minion/terminate.yml | 39 |
4 files changed, 232 insertions, 19 deletions
@@ -6,13 +6,18 @@ require 'yaml' require 'securerandom' require 'fileutils' require 'parseconfig' +require 'open3' + +# Don't buffer output to the client +STDOUT.sync = true +STDERR.sync = true SCRIPT_DIR = File.expand_path(File.dirname(__FILE__)) module OpenShift module Ops # WARNING: we do not currently support environments with hyphens in the name - SUPPORTED_ENVS = ['prod','stg','int','tint','kint','test'] + SUPPORTED_ENVS = %w(prod stg int tint kint test jint) class GceHelper def self.list_hosts() @@ -101,14 +106,15 @@ module OpenShift @pipelining = true end + def all_eof(files) + files.find { |f| !f.eof }.nil? + end + def run_playbook(playbook) @inventory = 'inventory/hosts' if @inventory.nil? # This is used instead of passing in the json on the cli to avoid quoting problems - tmpfile = Tempfile.new('extra_vars') - tmpfile.write(@extra_vars.to_json) - tmpfile.sync() - tmpfile.close() + tmpfile = Tempfile.open('extra_vars') { |f| f.write(@extra_vars.to_json); f} cmds = [] @@ -121,18 +127,23 @@ module OpenShift # We need pipelining off so that we can do sudo to enable the root account cmds << %Q[export ANSIBLE_SSH_PIPELINING='#{@pipelining.to_s}'] + cmds << %Q[time -p ansible-playbook -i #{@inventory} #{@verbosity} #{playbook} --extra-vars '@#{tmpfile.path}'] - ssh_key_arg = %q[--private-key=~/.ssh/mmcgrath_libra] if File.file?(ENV['HOME']+'/.ssh/mmcgrath_libra.pem') + cmd = cmds.join(' ; ') - cmds << %Q[time -p ansible-playbook -i #{@inventory} #{@verbosity} #{playbook} #{ssh_key_arg} --extra-vars '@#{tmpfile.path}'] + pid = spawn(cmd, :out => $stdout, :err => $stderr, :close_others => true) + _, state = Process.wait2(pid) - cmd = cmds.join(' ; ') + if 0 != state.exitstatus + raise %Q[Warning failed with exit code: #{state.exitstatus} + +#{cmd} - unless system(cmd) - puts %Q[Following command failed with exit code: #{$?.exitstatus}\n#{cmd}] - puts %Q[extra_vars: #{@extra_vars.to_json}] +extra_vars: #{@extra_vars.to_json} +] end - tmpfile.unlink + ensure + tmpfile.unlink if tmpfile end def merge_extra_vars_file(file) @@ -165,9 +176,17 @@ module OpenShift ah.inventory = 'inventory/gce/gce.py' return ah end + + def ignore_bug_6407 + puts + puts %q[ .---- Spurious warning "It is unnecessary to use '{{' in loops" (ansible bug 6407) ----.] + puts %q[ V V] + end + end class GceCommand < Thor + option :type, :required => true, :enum => LaunchHelper.get_gce_host_types, :desc => 'The host type of the new instances.' option :env, :required => true, :aliases => '-e', :enum => OpenShift::Ops::SUPPORTED_ENVS, @@ -199,14 +218,12 @@ module OpenShift puts puts 'Creating instance(s) in GCE...' - puts - puts %q[ .---- Spurious warning "It is unnecessary to use '{{' in loops" (ansible bug 6407) ----.] - puts %q[ V V] - + ah.ignore_bug_6407 ah.run_playbook("playbooks/gce/#{options[:type]}/launch.yml") end + option :name, :required => false, :type => :string, :desc => 'The name of the instance to configure.' option :env, :required => false, :aliases => '-e', :enum => OpenShift::Ops::SUPPORTED_ENVS, @@ -238,13 +255,49 @@ module OpenShift puts puts "Configuring #{options[:type]} instance(s) in GCE..." - puts - puts " .---- Disregard this (ansible bug 6407) ----." - puts " V V" + ah.ignore_bug_6407 ah.run_playbook("playbooks/gce/#{host_type}/config.yml") end + option :name, :required => false, :type => :string, + :desc => 'The name of the instance to terminate.' + option :env, :required => false, :aliases => '-e', :enum => OpenShift::Ops::SUPPORTED_ENVS, + :desc => 'The environment of the new instances.' + option :type, :required => false, :enum => LaunchHelper.get_gce_host_types, + :desc => 'The type of the instances to configure.' + option :confirm, :required => false, :type => :boolean, + :desc => 'Terminate without interactive confirmation' + desc "terminate", 'Terminate instances' + def terminate() + ah = AnsibleHelper.for_gce() + + abort 'Error: you can\'t specify both --name and --type' unless options[:type].nil? || options[:name].nil? + + abort 'Error: you can\'t specify both --name and --env' unless options[:env].nil? || options[:name].nil? + + host_type = nil + if options[:name] + details = GceHelper.get_host_details(options[:name]) + ah.extra_vars['oo_host_group_exp'] = options[:name] + ah.extra_vars['oo_env'] = details['env'] + host_type = details['host-type'] + elsif options[:type] && options[:env] + oo_env_host_type_tag = GceHelper.generate_env_host_type_tag_name(options[:env], options[:type]) + ah.extra_vars['oo_host_group_exp'] = "groups['#{oo_env_host_type_tag}']" + ah.extra_vars['oo_env'] = options[:env] + host_type = options[:type] + else + abort 'Error: you need to specify either --name or (--type and --env)' + end + + puts + puts "Terminating #{options[:type]} instance(s) in GCE..." + ah.ignore_bug_6407 + + ah.run_playbook("playbooks/gce/#{host_type}/terminate.yml") + end + desc "list", "Lists instances." def list() hosts = GceHelper.list_hosts() diff --git a/cluster.sh b/cluster.sh new file mode 100755 index 000000000..50fbe66eb --- /dev/null +++ b/cluster.sh @@ -0,0 +1,82 @@ +#!/bin/bash -eu + +MINIONS=3 +MASTERS=1 +PROVIDER=gce + +# @formatter:off +function usage { + cat 1>&2 <<-EOT + ${0} : [create|destroy|update|list] {GCE environment tag} + + Supported environment tags: + $(grep 'SUPPORTED_ENVS.*=' ./cloud.rb) +EOT +} +# @formatter:on + +function create_cluser { + for (( i = 0; i < $MINIONS; i ++ )); do + ./cloud.rb "${PROVIDER}" launch -e "${ENV}" --type=os3-minion + done + + for (( i = 0; i < $MASTERS; i ++ )); do + ./cloud.rb "${PROVIDER}" launch -e "${ENV}" --type=os3-master + done + update_cluster + echo -e "\nCreated ${MASTERS} masters and ${MINIONS} minions using ${PROVIDER} provider\n" +} + +function update_cluster { + for (( i = 0; i < $MINIONS; i ++ )); do + ./cloud.rb "${PROVIDER}" config -e "${ENV}" --type=os3-minion + done + + for (( i = 0; i < $MASTERS; i ++ )); do + ./cloud.rb "${PROVIDER}" config -e "${ENV}" --type=os3-master + done +} + +function terminate_cluster { + #./cloud.rb "${PROVIDER}" terminate -e "${ENV}" --type=os3-master + ./cloud.rb "${PROVIDER}" terminate -e "${ENV}" --type=os3-minion +} + +[ -f ./cloud.rb ] || (echo 1>&2 'Cannot find ./cloud.rb' && exit 1) + +while getopts ':p:m:n:' flag; do + case "${flag}" in + p) PROVIDER="${OPTARG}" ;; + m) MASTERS="${OPTARG}" ;; + n) MINIONS="${OPTARG}" ;; + *) echo -e 2>&1 "unsupported option $OPTARG\n" + usage + exit 1 ;; + esac +done +shift $((OPTIND-1)) + +[ -z "${1:-}" ] && (usage; exit 1) + +case "${1}" in + 'create') + [ -z "${2:-}" ] && (usage; exit 1) + ENV="${2}" + create_cluser ;; + 'update') + [ -z "${2:-}" ] && (usage; exit 1) + ENV="${2}" + update_cluster ;; + 'terminate') + [ -z "${2:-}" ] && (usage; exit 1) + ENV="${2}" + terminate_cluster ;; + 'list') ./cloud.rb "${PROVIDER}" list ;; + 'help') usage; exit 0 ;; + *) + echo -n 1>&2 "${1} is not a supported operation"; + usage; + exit 1 ;; +esac + +exit 0 diff --git a/playbooks/gce/os3-master/terminate.yml b/playbooks/gce/os3-master/terminate.yml new file mode 100644 index 000000000..b6a1b38e4 --- /dev/null +++ b/playbooks/gce/os3-master/terminate.yml @@ -0,0 +1,39 @@ +- name: "populate oo_hosts_to_terminate host group if needed" + hosts: localhost + gather_facts: no + tasks: + - debug: var=oo_host_group_exp + + - name: Evaluate oo_host_group_exp if it's set + add_host: "name={{ item }} groups=oo_hosts_to_terminate" + with_items: "{{ oo_host_group_exp | default('') }}" + when: oo_host_group_exp is defined + + - debug: msg="{{ groups['oo_hosts_to_terminate'] }}" + + +- name: Terminate instances + hosts: localhost + connection: local + tasks: + - name: Terminate master instances + gce: + service_account_email: "{{ gce_service_account_email }}" + pem_file: "{{ gce_pem_file }}" + project_id: "{{ gce_project_id }}" + state: 'absent' + instance_names: "{{ groups['oo_hosts_to_terminate'] }}" + disks: "{{ groups['oo_hosts_to_terminate'] }}" + register: gce + + - debug: var=gce + +# - name: Remove disks of instances +# gce_pd: +# service_account_email: "{{ gce_service_account_email }}" +# pem_file: "{{ gce_pem_file }}" +# project_id: "{{ gce_project_id }}" +# name: "{{ item }}" +# state: deleted +# with_items: gce.instance_names + diff --git a/playbooks/gce/os3-minion/terminate.yml b/playbooks/gce/os3-minion/terminate.yml new file mode 100644 index 000000000..d31f175e1 --- /dev/null +++ b/playbooks/gce/os3-minion/terminate.yml @@ -0,0 +1,39 @@ +- name: "populate oo_hosts_to_terminate host group if needed" + hosts: localhost + gather_facts: no + tasks: + - debug: var=oo_host_group_exp + + - name: Evaluate oo_host_group_exp if it's set + add_host: "name={{ item }} groups=oo_hosts_to_terminate" + with_items: "{{ oo_host_group_exp | default('') }}" + when: oo_host_group_exp is defined + + - debug: msg="{{ groups['oo_hosts_to_terminate'] }}" + + +- name: Terminate instances + hosts: localhost + connection: local + tasks: + - name: Terminate minion instances + gce: + service_account_email: "{{ gce_service_account_email }}" + pem_file: "{{ gce_pem_file }}" + project_id: "{{ gce_project_id }}" + state: 'absent' + instance_names: "{{ groups['oo_hosts_to_terminate'] }}" + disks: "{{ groups['oo_hosts_to_terminate'] }}" + register: gce + + - debug: var=gce + +# - name: Remove disks of instances +# gce_pd: +# service_account_email: "{{ gce_service_account_email }}" +# pem_file: "{{ gce_pem_file }}" +# project_id: "{{ gce_project_id }}" +# name: "{{ item }}" +# state: deleted +# with_items: gce.instance_names + |