multi_ec2.py 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257
  1. #!/usr/bin/env python2
  2. '''
  3. Fetch and combine multiple ec2 account settings into a single
  4. json hash.
  5. '''
  6. # vim: expandtab:tabstop=4:shiftwidth=4
  7. from time import time
  8. import argparse
  9. import yaml
  10. import os
  11. import subprocess
  12. import json
  13. CONFIG_FILE_NAME = 'multi_ec2.yaml'
  14. class MultiEc2(object):
  15. '''
  16. MultiEc2 class:
  17. Opens a yaml config file and reads aws credentials.
  18. Stores a json hash of resources in result.
  19. '''
  20. def __init__(self):
  21. self.args = None
  22. self.config = None
  23. self.all_ec2_results = {}
  24. self.result = {}
  25. self.cache_path = os.path.expanduser('~/.ansible/tmp/multi_ec2_inventory.cache')
  26. self.file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)))
  27. same_dir_config_file = os.path.join(self.file_path, CONFIG_FILE_NAME)
  28. etc_dir_config_file = os.path.join(os.path.sep, 'etc', 'ansible', CONFIG_FILE_NAME)
  29. # Prefer a file in the same directory, fall back to a file in etc
  30. if os.path.isfile(same_dir_config_file):
  31. self.config_file = same_dir_config_file
  32. elif os.path.isfile(etc_dir_config_file):
  33. self.config_file = etc_dir_config_file
  34. else:
  35. self.config_file = None # expect env vars
  36. self.parse_cli_args()
  37. # load yaml
  38. if self.config_file and os.path.isfile(self.config_file):
  39. self.config = self.load_yaml_config()
  40. elif os.environ.has_key("AWS_ACCESS_KEY_ID") and \
  41. os.environ.has_key("AWS_SECRET_ACCESS_KEY"):
  42. self.config = {}
  43. self.config['accounts'] = [
  44. {
  45. 'name': 'default',
  46. 'provider': 'aws/hosts/ec2.py',
  47. 'env_vars': {
  48. 'AWS_ACCESS_KEY_ID': os.environ["AWS_ACCESS_KEY_ID"],
  49. 'AWS_SECRET_ACCESS_KEY': os.environ["AWS_SECRET_ACCESS_KEY"],
  50. }
  51. },
  52. ]
  53. self.config['cache_max_age'] = 0
  54. else:
  55. raise RuntimeError("Could not find valid ec2 credentials in the environment.")
  56. if self.args.refresh_cache:
  57. self.get_inventory()
  58. self.write_to_cache()
  59. # if its a host query, fetch and do not cache
  60. elif self.args.host:
  61. self.get_inventory()
  62. elif not self.is_cache_valid():
  63. # go fetch the inventories and cache them if cache is expired
  64. self.get_inventory()
  65. self.write_to_cache()
  66. else:
  67. # get data from disk
  68. self.get_inventory_from_cache()
  69. def load_yaml_config(self, conf_file=None):
  70. """Load a yaml config file with credentials to query the
  71. respective cloud for inventory.
  72. """
  73. config = None
  74. if not conf_file:
  75. conf_file = self.config_file
  76. with open(conf_file) as conf:
  77. config = yaml.safe_load(conf)
  78. return config
  79. def get_provider_tags(self, provider, env=None):
  80. """Call <provider> and query all of the tags that are usuable
  81. by ansible. If environment is empty use the default env.
  82. """
  83. if not env:
  84. env = os.environ
  85. # Allow relatively path'd providers in config file
  86. if os.path.isfile(os.path.join(self.file_path, provider)):
  87. provider = os.path.join(self.file_path, provider)
  88. # check to see if provider exists
  89. if not os.path.isfile(provider) or not os.access(provider, os.X_OK):
  90. raise RuntimeError("Problem with the provider. Please check path " \
  91. "and that it is executable. (%s)" % provider)
  92. cmds = [provider]
  93. if self.args.host:
  94. cmds.append("--host")
  95. cmds.append(self.args.host)
  96. else:
  97. cmds.append('--list')
  98. cmds.append('--refresh-cache')
  99. return subprocess.Popen(cmds, stderr=subprocess.PIPE, \
  100. stdout=subprocess.PIPE, env=env)
  101. def get_inventory(self):
  102. """Create the subprocess to fetch tags from a provider.
  103. Host query:
  104. Query to return a specific host. If > 1 queries have
  105. results then fail.
  106. List query:
  107. Query all of the different accounts for their tags. Once completed
  108. store all of their results into one merged updated hash.
  109. """
  110. processes = {}
  111. for account in self.config['accounts']:
  112. env = account['env_vars']
  113. name = account['name']
  114. provider = account['provider']
  115. processes[name] = self.get_provider_tags(provider, env)
  116. # for each process collect stdout when its available
  117. all_results = []
  118. for name, process in processes.items():
  119. out, err = process.communicate()
  120. all_results.append({
  121. "name": name,
  122. "out": out.strip(),
  123. "err": err.strip(),
  124. "code": process.returncode
  125. })
  126. # process --host results
  127. if not self.args.host:
  128. # For any non-zero, raise an error on it
  129. for result in all_results:
  130. if result['code'] != 0:
  131. raise RuntimeError(result['err'])
  132. else:
  133. self.all_ec2_results[result['name']] = json.loads(result['out'])
  134. values = self.all_ec2_results.values()
  135. values.insert(0, self.result)
  136. for result in values:
  137. MultiEc2.merge_destructively(self.result, result)
  138. else:
  139. # For any 0 result, return it
  140. count = 0
  141. for results in all_results:
  142. if results['code'] == 0 and results['err'] == '' and results['out'] != '{}':
  143. self.result = json.loads(out)
  144. count += 1
  145. if count > 1:
  146. raise RuntimeError("Found > 1 results for --host %s. \
  147. This is an invalid state." % self.args.host)
  148. @staticmethod
  149. def merge_destructively(input_a, input_b):
  150. "merges b into input_a"
  151. for key in input_b:
  152. if key in input_a:
  153. if isinstance(input_a[key], dict) and isinstance(input_b[key], dict):
  154. MultiEc2.merge_destructively(input_a[key], input_b[key])
  155. elif input_a[key] == input_b[key]:
  156. pass # same leaf value
  157. # both lists so add each element in b to a if it does ! exist
  158. elif isinstance(input_a[key], list) and isinstance(input_b[key], list):
  159. for result in input_b[key]:
  160. if result not in input_a[key]:
  161. input_a[key].input_append(result)
  162. # a is a list and not b
  163. elif isinstance(input_a[key], list):
  164. if input_b[key] not in input_a[key]:
  165. input_a[key].append(input_b[key])
  166. elif isinstance(input_b[key], list):
  167. input_a[key] = [input_a[key]] + [k for k in input_b[key] if k != input_a[key]]
  168. else:
  169. input_a[key] = [input_a[key], input_b[key]]
  170. else:
  171. input_a[key] = input_b[key]
  172. return input_a
  173. def is_cache_valid(self):
  174. ''' Determines if the cache files have expired, or if it is still valid '''
  175. if os.path.isfile(self.cache_path):
  176. mod_time = os.path.getmtime(self.cache_path)
  177. current_time = time()
  178. if (mod_time + self.config['cache_max_age']) > current_time:
  179. return True
  180. return False
  181. def parse_cli_args(self):
  182. ''' Command line argument processing '''
  183. parser = argparse.ArgumentParser(
  184. description='Produce an Ansible Inventory file based on a provider')
  185. parser.add_argument('--refresh-cache', action='store_true', default=False,
  186. help='Fetch cached only instances (default: False)')
  187. parser.add_argument('--list', action='store_true', default=True,
  188. help='List instances (default: True)')
  189. parser.add_argument('--host', action='store', default=False,
  190. help='Get all the variables about a specific instance')
  191. self.args = parser.parse_args()
  192. def write_to_cache(self):
  193. ''' Writes data in JSON format to a file '''
  194. json_data = MultiEc2.json_format_dict(self.result, True)
  195. with open(self.cache_path, 'w') as cache:
  196. cache.write(json_data)
  197. def get_inventory_from_cache(self):
  198. ''' Reads the inventory from the cache file and returns it as a JSON
  199. object '''
  200. if not os.path.isfile(self.cache_path):
  201. return None
  202. with open(self.cache_path, 'r') as cache:
  203. self.result = json.loads(cache.read())
  204. return True
  205. @classmethod
  206. def json_format_dict(cls, data, pretty=False):
  207. ''' Converts a dict to a JSON object and dumps it as a formatted
  208. string '''
  209. if pretty:
  210. return json.dumps(data, sort_keys=True, indent=2)
  211. else:
  212. return json.dumps(data)
  213. def result_str(self):
  214. '''Return cache string stored in self.result'''
  215. return self.json_format_dict(self.result, True)
  216. if __name__ == "__main__":
  217. print MultiEc2().result_str()