multi_ec2.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279
  1. #!/usr/bin/env python2
  2. '''
  3. Fetch and combine multiple ec2 account settings into a single
  4. json hash.
  5. '''
  6. # vim: expandtab:tabstop=4:shiftwidth=4
  7. from time import time
  8. import argparse
  9. import yaml
  10. import os
  11. import subprocess
  12. import json
  13. import errno
  14. import fcntl
  15. CONFIG_FILE_NAME = 'multi_ec2.yaml'
  16. DEFAULT_CACHE_PATH = os.path.expanduser('~/.ansible/tmp/multi_ec2_inventory.cache')
  17. class MultiEc2(object):
  18. '''
  19. MultiEc2 class:
  20. Opens a yaml config file and reads aws credentials.
  21. Stores a json hash of resources in result.
  22. '''
  23. def __init__(self):
  24. self.args = None
  25. self.config = None
  26. self.all_ec2_results = {}
  27. self.result = {}
  28. self.file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)))
  29. same_dir_config_file = os.path.join(self.file_path, CONFIG_FILE_NAME)
  30. etc_dir_config_file = os.path.join(os.path.sep, 'etc', 'ansible', CONFIG_FILE_NAME)
  31. # Prefer a file in the same directory, fall back to a file in etc
  32. if os.path.isfile(same_dir_config_file):
  33. self.config_file = same_dir_config_file
  34. elif os.path.isfile(etc_dir_config_file):
  35. self.config_file = etc_dir_config_file
  36. else:
  37. self.config_file = None # expect env vars
  38. self.parse_cli_args()
  39. # load yaml
  40. if self.config_file and os.path.isfile(self.config_file):
  41. self.config = self.load_yaml_config()
  42. elif os.environ.has_key("AWS_ACCESS_KEY_ID") and \
  43. os.environ.has_key("AWS_SECRET_ACCESS_KEY"):
  44. # Build a default config
  45. self.config = {}
  46. self.config['accounts'] = [
  47. {
  48. 'name': 'default',
  49. 'cache_location': DEFAULT_CACHE_PATH,
  50. 'provider': 'aws/hosts/ec2.py',
  51. 'env_vars': {
  52. 'AWS_ACCESS_KEY_ID': os.environ["AWS_ACCESS_KEY_ID"],
  53. 'AWS_SECRET_ACCESS_KEY': os.environ["AWS_SECRET_ACCESS_KEY"],
  54. }
  55. },
  56. ]
  57. self.config['cache_max_age'] = 0
  58. else:
  59. raise RuntimeError("Could not find valid ec2 credentials in the environment.")
  60. # Set the default cache path but if its defined we'll assign it.
  61. self.cache_path = DEFAULT_CACHE_PATH
  62. if self.config.has_key('cache_location'):
  63. self.cache_path = self.config['cache_location']
  64. if self.args.refresh_cache:
  65. self.get_inventory()
  66. self.write_to_cache()
  67. # if its a host query, fetch and do not cache
  68. elif self.args.host:
  69. self.get_inventory()
  70. elif not self.is_cache_valid():
  71. # go fetch the inventories and cache them if cache is expired
  72. self.get_inventory()
  73. self.write_to_cache()
  74. else:
  75. # get data from disk
  76. self.get_inventory_from_cache()
  77. def load_yaml_config(self, conf_file=None):
  78. """Load a yaml config file with credentials to query the
  79. respective cloud for inventory.
  80. """
  81. config = None
  82. if not conf_file:
  83. conf_file = self.config_file
  84. with open(conf_file) as conf:
  85. config = yaml.safe_load(conf)
  86. return config
  87. def get_provider_tags(self, provider, env=None):
  88. """Call <provider> and query all of the tags that are usuable
  89. by ansible. If environment is empty use the default env.
  90. """
  91. if not env:
  92. env = os.environ
  93. # Allow relatively path'd providers in config file
  94. if os.path.isfile(os.path.join(self.file_path, provider)):
  95. provider = os.path.join(self.file_path, provider)
  96. # check to see if provider exists
  97. if not os.path.isfile(provider) or not os.access(provider, os.X_OK):
  98. raise RuntimeError("Problem with the provider. Please check path " \
  99. "and that it is executable. (%s)" % provider)
  100. cmds = [provider]
  101. if self.args.host:
  102. cmds.append("--host")
  103. cmds.append(self.args.host)
  104. else:
  105. cmds.append('--list')
  106. cmds.append('--refresh-cache')
  107. return subprocess.Popen(cmds, stderr=subprocess.PIPE, \
  108. stdout=subprocess.PIPE, env=env)
  109. def get_inventory(self):
  110. """Create the subprocess to fetch tags from a provider.
  111. Host query:
  112. Query to return a specific host. If > 1 queries have
  113. results then fail.
  114. List query:
  115. Query all of the different accounts for their tags. Once completed
  116. store all of their results into one merged updated hash.
  117. """
  118. processes = {}
  119. for account in self.config['accounts']:
  120. env = account['env_vars']
  121. name = account['name']
  122. provider = account['provider']
  123. processes[name] = self.get_provider_tags(provider, env)
  124. # for each process collect stdout when its available
  125. all_results = []
  126. for name, process in processes.items():
  127. out, err = process.communicate()
  128. all_results.append({
  129. "name": name,
  130. "out": out.strip(),
  131. "err": err.strip(),
  132. "code": process.returncode
  133. })
  134. # process --host results
  135. if not self.args.host:
  136. # For any non-zero, raise an error on it
  137. for result in all_results:
  138. if result['code'] != 0:
  139. raise RuntimeError(result['err'])
  140. else:
  141. self.all_ec2_results[result['name']] = json.loads(result['out'])
  142. values = self.all_ec2_results.values()
  143. values.insert(0, self.result)
  144. for result in values:
  145. MultiEc2.merge_destructively(self.result, result)
  146. else:
  147. # For any 0 result, return it
  148. count = 0
  149. for results in all_results:
  150. if results['code'] == 0 and results['err'] == '' and results['out'] != '{}':
  151. self.result = json.loads(out)
  152. count += 1
  153. if count > 1:
  154. raise RuntimeError("Found > 1 results for --host %s. \
  155. This is an invalid state." % self.args.host)
  156. @staticmethod
  157. def merge_destructively(input_a, input_b):
  158. "merges b into input_a"
  159. for key in input_b:
  160. if key in input_a:
  161. if isinstance(input_a[key], dict) and isinstance(input_b[key], dict):
  162. MultiEc2.merge_destructively(input_a[key], input_b[key])
  163. elif input_a[key] == input_b[key]:
  164. pass # same leaf value
  165. # both lists so add each element in b to a if it does ! exist
  166. elif isinstance(input_a[key], list) and isinstance(input_b[key], list):
  167. for result in input_b[key]:
  168. if result not in input_a[key]:
  169. input_a[key].input_append(result)
  170. # a is a list and not b
  171. elif isinstance(input_a[key], list):
  172. if input_b[key] not in input_a[key]:
  173. input_a[key].append(input_b[key])
  174. elif isinstance(input_b[key], list):
  175. input_a[key] = [input_a[key]] + [k for k in input_b[key] if k != input_a[key]]
  176. else:
  177. input_a[key] = [input_a[key], input_b[key]]
  178. else:
  179. input_a[key] = input_b[key]
  180. return input_a
  181. def is_cache_valid(self):
  182. ''' Determines if the cache files have expired, or if it is still valid '''
  183. if os.path.isfile(self.cache_path):
  184. mod_time = os.path.getmtime(self.cache_path)
  185. current_time = time()
  186. if (mod_time + self.config['cache_max_age']) > current_time:
  187. return True
  188. return False
  189. def parse_cli_args(self):
  190. ''' Command line argument processing '''
  191. parser = argparse.ArgumentParser(
  192. description='Produce an Ansible Inventory file based on a provider')
  193. parser.add_argument('--refresh-cache', action='store_true', default=False,
  194. help='Fetch cached only instances (default: False)')
  195. parser.add_argument('--list', action='store_true', default=True,
  196. help='List instances (default: True)')
  197. parser.add_argument('--host', action='store', default=False,
  198. help='Get all the variables about a specific instance')
  199. self.args = parser.parse_args()
  200. def write_to_cache(self):
  201. ''' Writes data in JSON format to a file '''
  202. # if it does not exist, try and create it.
  203. if not os.path.isfile(self.cache_path):
  204. path = os.path.dirname(self.cache_path)
  205. try:
  206. os.makedirs(path)
  207. except OSError as exc:
  208. if exc.errno != errno.EEXIST or not os.path.isdir(path):
  209. raise
  210. json_data = MultiEc2.json_format_dict(self.result, True)
  211. with open(self.cache_path, 'w') as cache:
  212. try:
  213. fcntl.flock(cache, fcntl.LOCK_EX)
  214. cache.write(json_data)
  215. finally:
  216. fcntl.flock(cache, fcntl.LOCK_UN)
  217. def get_inventory_from_cache(self):
  218. ''' Reads the inventory from the cache file and returns it as a JSON
  219. object '''
  220. if not os.path.isfile(self.cache_path):
  221. return None
  222. with open(self.cache_path, 'r') as cache:
  223. self.result = json.loads(cache.read())
  224. return True
  225. @classmethod
  226. def json_format_dict(cls, data, pretty=False):
  227. ''' Converts a dict to a JSON object and dumps it as a formatted
  228. string '''
  229. if pretty:
  230. return json.dumps(data, sort_keys=True, indent=2)
  231. else:
  232. return json.dumps(data)
  233. def result_str(self):
  234. '''Return cache string stored in self.result'''
  235. return self.json_format_dict(self.result, True)
  236. if __name__ == "__main__":
  237. print MultiEc2().result_str()