multi_ec2.py 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183
  1. #!/usr/bin/env python
  2. from time import time
  3. import argparse
  4. import yaml
  5. import os
  6. import sys
  7. import pdb
  8. import subprocess
  9. import json
  10. import pprint
  11. class MultiEc2(object):
  12. def __init__(self):
  13. self.config = None
  14. self.results = {}
  15. self.result = {}
  16. self.cache_path_cache = os.path.expanduser('~/.ansible/tmp/meta-inventory.cache')
  17. self.parse_cli_args()
  18. # load yaml
  19. self.load_yaml_config()
  20. # if its a host query, fetch and do not cache
  21. if self.args.host:
  22. self.get_inventory()
  23. elif not self.is_cache_valid():
  24. # go fetch the inventories and cache them if cache is expired
  25. self.get_inventory()
  26. self.write_to_cache()
  27. else:
  28. # get data from disk
  29. self.get_inventory_from_cache()
  30. def load_yaml_config(self,conf_file=os.path.join(os.getcwd(),'multi_ec2.yaml')):
  31. """Load a yaml config file with credentials to query the
  32. respective cloud for inventory.
  33. """
  34. config = None
  35. with open(conf_file) as conf:
  36. self.config = yaml.safe_load(conf)
  37. def get_provider_tags(self,provider, env={}):
  38. """Call <provider> and query all of the tags that are usuable
  39. by ansible. If environment is empty use the default env.
  40. """
  41. if not env:
  42. env = os.environ
  43. # check to see if provider exists
  44. if not os.path.isfile(os.path.join(os.getcwd(),provider)):
  45. raise RuntimeError("Unkown provider: %s" % provider)
  46. cmds = [provider]
  47. if self.args.host:
  48. cmds.append("--host")
  49. cmds.append(self.args.host)
  50. else:
  51. cmds.append('--list')
  52. cmds.append('--refresh-cache')
  53. return subprocess.Popen(cmds, stderr=subprocess.PIPE, \
  54. stdout=subprocess.PIPE, env=env)
  55. def get_inventory(self):
  56. """Create the subprocess to fetch tags from a provider.
  57. Host query:
  58. Query to return a specific host. If > 1 queries have
  59. results then fail.
  60. List query:
  61. Query all of the different clouds for their tags. Once completed
  62. store all of their results into one merged updated hash.
  63. """
  64. processes = {}
  65. for account in self.config['clouds']:
  66. env = account['env_vars']
  67. name = account['name']
  68. provider = account['provider']
  69. processes[name] = self.get_provider_tags(provider, env)
  70. # for each process collect stdout when its available
  71. all_results = []
  72. for name, process in processes.items():
  73. out, err = process.communicate()
  74. all_results.append({
  75. "name": name,
  76. "out": out.strip(),
  77. "err": err.strip(),
  78. "code": process.returncode
  79. })
  80. if not self.args.host:
  81. # For any non-zero, raise an error on it
  82. for result in all_results:
  83. if result['code'] != 0:
  84. raise RuntimeError(result['err'])
  85. else:
  86. self.results[result['name']] = json.loads(result['out'])
  87. self.merge()
  88. else:
  89. # For any 0 result, return it
  90. count = 0
  91. for results in all_results:
  92. if results['code'] == 0 and results['err'] == '' and results['out'] != '{}':
  93. self.result = json.loads(out)
  94. count += 1
  95. if count > 1:
  96. raise RuntimeError("Found > 1 results for --host %s. \
  97. This is an invalid state." % self.args.host)
  98. def merge(self):
  99. """Merge the results into a single hash. Duplicate keys are placed
  100. into a list.
  101. """
  102. for name, cloud_result in self.results.items():
  103. for k,v in cloud_result.items():
  104. if self.result.has_key(k):
  105. # need to combine into a list
  106. if isinstance(self.result[k], list):
  107. self.result[k].append(v)
  108. else:
  109. self.result[k] = [self.result[k],v]
  110. else:
  111. self.result[k] = [v]
  112. self.result = self.json_format_dict(self.result)
  113. def is_cache_valid(self):
  114. ''' Determines if the cache files have expired, or if it is still valid '''
  115. if os.path.isfile(self.cache_path_cache):
  116. mod_time = os.path.getmtime(self.cache_path_cache)
  117. current_time = time()
  118. if (mod_time + self.config['cache_max_age']) > current_time:
  119. #if os.path.isfile(self.cache_path_index):
  120. return True
  121. return False
  122. def parse_cli_args(self):
  123. ''' Command line argument processing '''
  124. parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on a provider')
  125. parser.add_argument('--list', action='store_true', default=True,
  126. help='List instances (default: True)')
  127. parser.add_argument('--host', action='store',
  128. help='Get all the variables about a specific instance')
  129. self.args = parser.parse_args()
  130. def write_to_cache(self):
  131. ''' Writes data in JSON format to a file '''
  132. json_data = self.json_format_dict(self.result, True)
  133. with open(self.cache_path_cache, 'w') as cache:
  134. cache.write(json_data)
  135. def get_inventory_from_cache(self):
  136. ''' Reads the inventory from the cache file and returns it as a JSON
  137. object '''
  138. with open(self.cache_path_cache, 'r') as cache:
  139. self.result = json.loads(cache.read())
  140. def json_format_dict(self, data, pretty=False):
  141. ''' Converts a dict to a JSON object and dumps it as a formatted
  142. string '''
  143. if pretty:
  144. return json.dumps(data, sort_keys=True, indent=2)
  145. else:
  146. return json.dumps(data)
  147. if __name__ == "__main__":
  148. mi = MultiEc2()
  149. #print mi.result
  150. pp = pprint.PrettyPrinter(indent=2)
  151. pp.pprint(mi.result)