multi_ec2.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363
  1. #!/usr/bin/env python2
  2. '''
  3. Fetch and combine multiple ec2 account settings into a single
  4. json hash.
  5. '''
  6. # vim: expandtab:tabstop=4:shiftwidth=4
  7. from time import time
  8. import argparse
  9. import yaml
  10. import os
  11. import subprocess
  12. import json
  13. import errno
  14. import fcntl
  15. import tempfile
  16. import copy
  17. CONFIG_FILE_NAME = 'multi_ec2.yaml'
  18. DEFAULT_CACHE_PATH = os.path.expanduser('~/.ansible/tmp/multi_ec2_inventory.cache')
  19. class MultiEc2(object):
  20. '''
  21. MultiEc2 class:
  22. Opens a yaml config file and reads aws credentials.
  23. Stores a json hash of resources in result.
  24. '''
  25. def __init__(self, args=None):
  26. # Allow args to be passed when called as a library
  27. if not args:
  28. self.args = {}
  29. else:
  30. self.args = args
  31. self.cache_path = DEFAULT_CACHE_PATH
  32. self.config = None
  33. self.all_ec2_results = {}
  34. self.result = {}
  35. self.file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)))
  36. same_dir_config_file = os.path.join(self.file_path, CONFIG_FILE_NAME)
  37. etc_dir_config_file = os.path.join(os.path.sep, 'etc', 'ansible', CONFIG_FILE_NAME)
  38. # Prefer a file in the same directory, fall back to a file in etc
  39. if os.path.isfile(same_dir_config_file):
  40. self.config_file = same_dir_config_file
  41. elif os.path.isfile(etc_dir_config_file):
  42. self.config_file = etc_dir_config_file
  43. else:
  44. self.config_file = None # expect env vars
  45. def run(self):
  46. '''This method checks to see if the local
  47. cache is valid for the inventory.
  48. if the cache is valid; return cache
  49. else the credentials are loaded from multi_ec2.yaml or from the env
  50. and we attempt to get the inventory from the provider specified.
  51. '''
  52. # load yaml
  53. if self.config_file and os.path.isfile(self.config_file):
  54. self.config = self.load_yaml_config()
  55. elif os.environ.has_key("AWS_ACCESS_KEY_ID") and \
  56. os.environ.has_key("AWS_SECRET_ACCESS_KEY"):
  57. # Build a default config
  58. self.config = {}
  59. self.config['accounts'] = [
  60. {
  61. 'name': 'default',
  62. 'cache_location': DEFAULT_CACHE_PATH,
  63. 'provider': 'aws/hosts/ec2.py',
  64. 'env_vars': {
  65. 'AWS_ACCESS_KEY_ID': os.environ["AWS_ACCESS_KEY_ID"],
  66. 'AWS_SECRET_ACCESS_KEY': os.environ["AWS_SECRET_ACCESS_KEY"],
  67. }
  68. },
  69. ]
  70. self.config['cache_max_age'] = 0
  71. else:
  72. raise RuntimeError("Could not find valid ec2 credentials in the environment.")
  73. if self.config.has_key('cache_location'):
  74. self.cache_path = self.config['cache_location']
  75. if self.args.get('refresh_cache', None):
  76. self.get_inventory()
  77. self.write_to_cache()
  78. # if its a host query, fetch and do not cache
  79. elif self.args.get('host', None):
  80. self.get_inventory()
  81. elif not self.is_cache_valid():
  82. # go fetch the inventories and cache them if cache is expired
  83. self.get_inventory()
  84. self.write_to_cache()
  85. else:
  86. # get data from disk
  87. self.get_inventory_from_cache()
  88. def load_yaml_config(self, conf_file=None):
  89. """Load a yaml config file with credentials to query the
  90. respective cloud for inventory.
  91. """
  92. config = None
  93. if not conf_file:
  94. conf_file = self.config_file
  95. with open(conf_file) as conf:
  96. config = yaml.safe_load(conf)
  97. return config
  98. def get_provider_tags(self, provider, env=None):
  99. """Call <provider> and query all of the tags that are usuable
  100. by ansible. If environment is empty use the default env.
  101. """
  102. if not env:
  103. env = os.environ
  104. # Allow relatively path'd providers in config file
  105. if os.path.isfile(os.path.join(self.file_path, provider)):
  106. provider = os.path.join(self.file_path, provider)
  107. # check to see if provider exists
  108. if not os.path.isfile(provider) or not os.access(provider, os.X_OK):
  109. raise RuntimeError("Problem with the provider. Please check path " \
  110. "and that it is executable. (%s)" % provider)
  111. cmds = [provider]
  112. if self.args.get('host', None):
  113. cmds.append("--host")
  114. cmds.append(self.args.get('host', None))
  115. else:
  116. cmds.append('--list')
  117. cmds.append('--refresh-cache')
  118. return subprocess.Popen(cmds, stderr=subprocess.PIPE, \
  119. stdout=subprocess.PIPE, env=env)
  120. @staticmethod
  121. def generate_config(config_data):
  122. """Generate the ec2.ini file in as a secure temp file.
  123. Once generated, pass it to the ec2.py as an environment variable.
  124. """
  125. fildes, tmp_file_path = tempfile.mkstemp(prefix='multi_ec2.ini.')
  126. for section, values in config_data.items():
  127. os.write(fildes, "[%s]\n" % section)
  128. for option, value in values.items():
  129. os.write(fildes, "%s = %s\n" % (option, value))
  130. os.close(fildes)
  131. return tmp_file_path
  132. def run_provider(self):
  133. '''Setup the provider call with proper variables
  134. and call self.get_provider_tags.
  135. '''
  136. try:
  137. all_results = []
  138. tmp_file_paths = []
  139. processes = {}
  140. for account in self.config['accounts']:
  141. env = account['env_vars']
  142. if account.has_key('provider_config'):
  143. tmp_file_paths.append(MultiEc2.generate_config(account['provider_config']))
  144. env['EC2_INI_PATH'] = tmp_file_paths[-1]
  145. name = account['name']
  146. provider = account['provider']
  147. processes[name] = self.get_provider_tags(provider, env)
  148. # for each process collect stdout when its available
  149. for name, process in processes.items():
  150. out, err = process.communicate()
  151. all_results.append({
  152. "name": name,
  153. "out": out.strip(),
  154. "err": err.strip(),
  155. "code": process.returncode
  156. })
  157. finally:
  158. # Clean up the mkstemp file
  159. for tmp_file in tmp_file_paths:
  160. os.unlink(tmp_file)
  161. return all_results
  162. def get_inventory(self):
  163. """Create the subprocess to fetch tags from a provider.
  164. Host query:
  165. Query to return a specific host. If > 1 queries have
  166. results then fail.
  167. List query:
  168. Query all of the different accounts for their tags. Once completed
  169. store all of their results into one merged updated hash.
  170. """
  171. provider_results = self.run_provider()
  172. # process --host results
  173. # For any 0 result, return it
  174. if self.args.get('host', None):
  175. count = 0
  176. for results in provider_results:
  177. if results['code'] == 0 and results['err'] == '' and results['out'] != '{}':
  178. self.result = json.loads(results['out'])
  179. count += 1
  180. if count > 1:
  181. raise RuntimeError("Found > 1 results for --host %s. \
  182. This is an invalid state." % self.args.get('host', None))
  183. # process --list results
  184. else:
  185. # For any non-zero, raise an error on it
  186. for result in provider_results:
  187. if result['code'] != 0:
  188. err_msg = ['\nProblem fetching account: {name}',
  189. 'Error Code: {code}',
  190. 'StdErr: {err}',
  191. 'Stdout: {out}',
  192. ]
  193. raise RuntimeError('\n'.join(err_msg).format(**result))
  194. else:
  195. self.all_ec2_results[result['name']] = json.loads(result['out'])
  196. # Check if user wants extra vars in yaml by
  197. # having hostvars and all_group defined
  198. for acc_config in self.config['accounts']:
  199. self.apply_account_config(acc_config)
  200. # Build results by merging all dictionaries
  201. values = self.all_ec2_results.values()
  202. values.insert(0, self.result)
  203. for result in values:
  204. MultiEc2.merge_destructively(self.result, result)
  205. def apply_account_config(self, acc_config):
  206. ''' Apply account config settings
  207. '''
  208. if not acc_config.has_key('hostvars') and not acc_config.has_key('all_group'):
  209. return
  210. results = self.all_ec2_results[acc_config['name']]
  211. # Update each hostvar with the newly desired key: value
  212. for host_property, value in acc_config['hostvars'].items():
  213. # Verify the account results look sane
  214. # by checking for these keys ('_meta' and 'hostvars' exist)
  215. if results.has_key('_meta') and results['_meta'].has_key('hostvars'):
  216. for data in results['_meta']['hostvars'].values():
  217. data[str(host_property)] = str(value)
  218. # Add this group
  219. if results.has_key(acc_config['all_group']):
  220. results["%s_%s" % (host_property, value)] = \
  221. copy.copy(results[acc_config['all_group']])
  222. # store the results back into all_ec2_results
  223. self.all_ec2_results[acc_config['name']] = results
  224. @staticmethod
  225. def merge_destructively(input_a, input_b):
  226. "merges b into input_a"
  227. for key in input_b:
  228. if key in input_a:
  229. if isinstance(input_a[key], dict) and isinstance(input_b[key], dict):
  230. MultiEc2.merge_destructively(input_a[key], input_b[key])
  231. elif input_a[key] == input_b[key]:
  232. pass # same leaf value
  233. # both lists so add each element in b to a if it does ! exist
  234. elif isinstance(input_a[key], list) and isinstance(input_b[key], list):
  235. for result in input_b[key]:
  236. if result not in input_a[key]:
  237. input_a[key].append(result)
  238. # a is a list and not b
  239. elif isinstance(input_a[key], list):
  240. if input_b[key] not in input_a[key]:
  241. input_a[key].append(input_b[key])
  242. elif isinstance(input_b[key], list):
  243. input_a[key] = [input_a[key]] + [k for k in input_b[key] if k != input_a[key]]
  244. else:
  245. input_a[key] = [input_a[key], input_b[key]]
  246. else:
  247. input_a[key] = input_b[key]
  248. return input_a
  249. def is_cache_valid(self):
  250. ''' Determines if the cache files have expired, or if it is still valid '''
  251. if os.path.isfile(self.cache_path):
  252. mod_time = os.path.getmtime(self.cache_path)
  253. current_time = time()
  254. if (mod_time + self.config['cache_max_age']) > current_time:
  255. return True
  256. return False
  257. def parse_cli_args(self):
  258. ''' Command line argument processing '''
  259. parser = argparse.ArgumentParser(
  260. description='Produce an Ansible Inventory file based on a provider')
  261. parser.add_argument('--refresh-cache', action='store_true', default=False,
  262. help='Fetch cached only instances (default: False)')
  263. parser.add_argument('--list', action='store_true', default=True,
  264. help='List instances (default: True)')
  265. parser.add_argument('--host', action='store', default=False,
  266. help='Get all the variables about a specific instance')
  267. self.args = parser.parse_args().__dict__
  268. def write_to_cache(self):
  269. ''' Writes data in JSON format to a file '''
  270. # if it does not exist, try and create it.
  271. if not os.path.isfile(self.cache_path):
  272. path = os.path.dirname(self.cache_path)
  273. try:
  274. os.makedirs(path)
  275. except OSError as exc:
  276. if exc.errno != errno.EEXIST or not os.path.isdir(path):
  277. raise
  278. json_data = MultiEc2.json_format_dict(self.result, True)
  279. with open(self.cache_path, 'w') as cache:
  280. try:
  281. fcntl.flock(cache, fcntl.LOCK_EX)
  282. cache.write(json_data)
  283. finally:
  284. fcntl.flock(cache, fcntl.LOCK_UN)
  285. def get_inventory_from_cache(self):
  286. ''' Reads the inventory from the cache file and returns it as a JSON
  287. object '''
  288. if not os.path.isfile(self.cache_path):
  289. return None
  290. with open(self.cache_path, 'r') as cache:
  291. self.result = json.loads(cache.read())
  292. return True
  293. @classmethod
  294. def json_format_dict(cls, data, pretty=False):
  295. ''' Converts a dict to a JSON object and dumps it as a formatted
  296. string '''
  297. if pretty:
  298. return json.dumps(data, sort_keys=True, indent=2)
  299. else:
  300. return json.dumps(data)
  301. def result_str(self):
  302. '''Return cache string stored in self.result'''
  303. return self.json_format_dict(self.result, True)
  304. if __name__ == "__main__":
  305. MEC2 = MultiEc2()
  306. MEC2.parse_cli_args()
  307. MEC2.run()
  308. print MEC2.result_str()