pep8 fixes for contrib (#24344)

This commit is contained in:
Matt Martz 2017-05-09 16:38:08 -05:00 committed by GitHub
commit d3249e7875
37 changed files with 326 additions and 380 deletions

View file

@ -53,6 +53,7 @@ except ImportError:
from ansible.module_utils.urls import open_url from ansible.module_utils.urls import open_url
def api_get(link, config): def api_get(link, config):
try: try:
if link is None: if link is None:
@ -67,6 +68,7 @@ def api_get(link, config):
except: except:
return None return None
def save_cache(data, config): def save_cache(data, config):
''' saves item to cache ''' ''' saves item to cache '''
dpath = config.get('cache', 'cache_dir') dpath = config.get('cache', 'cache_dir')
@ -91,6 +93,7 @@ def get_cache(cache_item, config):
return inv return inv
def cache_available(config): def cache_available(config):
''' checks if we have a 'fresh' cache available for item requested ''' ''' checks if we have a 'fresh' cache available for item requested '''
@ -105,11 +108,12 @@ def cache_available(config):
if config.has_option('cache', 'cache_max_age'): if config.has_option('cache', 'cache_max_age'):
maxage = config.get('cache', 'cache_max_age') maxage = config.get('cache', 'cache_max_age')
if ((int(time.time()) - int(existing.st_mtime)) <= int(maxage)): if (int(time.time()) - int(existing.st_mtime)) <= int(maxage):
return True return True
return False return False
def generate_inv_from_api(enterprise_entity, config): def generate_inv_from_api(enterprise_entity, config):
try: try:
inventory['all'] = {} inventory['all'] = {}
@ -119,7 +123,7 @@ def generate_inv_from_api(enterprise_entity,config):
inventory['_meta']['hostvars'] = {} inventory['_meta']['hostvars'] = {}
enterprise = api_get(enterprise_entity, config) enterprise = api_get(enterprise_entity, config)
vms_entity = next(link for link in (enterprise['links']) if (link['rel']=='virtualmachines')) vms_entity = next(link for link in enterprise['links'] if link['rel'] == 'virtualmachines')
vms = api_get(vms_entity, config) vms = api_get(vms_entity, config)
for vmcollection in vms['collection']: for vmcollection in vms['collection']:
for link in vmcollection['links']: for link in vmcollection['links']:
@ -133,7 +137,7 @@ def generate_inv_from_api(enterprise_entity,config):
# From abiquo.ini: Only adding to inventory VMs with public IP # From abiquo.ini: Only adding to inventory VMs with public IP
if config.getboolean('defaults', 'public_ip_only') is True: if config.getboolean('defaults', 'public_ip_only') is True:
for link in vmcollection['links']: for link in vmcollection['links']:
if (link['type']=='application/vnd.abiquo.publicip+json' and link['rel']=='ip'): if link['type'] == 'application/vnd.abiquo.publicip+json' and link['rel'] == 'ip':
vm_nic = link['title'] vm_nic = link['title']
break break
else: else:
@ -166,7 +170,7 @@ def generate_inv_from_api(enterprise_entity,config):
inventory[vm_template]['children'] = [] inventory[vm_template]['children'] = []
inventory[vm_template]['hosts'] = [] inventory[vm_template]['hosts'] = []
if config.getboolean('defaults', 'get_metadata') is True: if config.getboolean('defaults', 'get_metadata') is True:
meta_entity = next(link for link in (vmcollection['links']) if (link['rel']=='metadata')) meta_entity = next(link for link in vmcollection['links'] if link['rel'] == 'metadata')
try: try:
metadata = api_get(meta_entity, config) metadata = api_get(meta_entity, config)
if (config.getfloat("api", "version") >= 3.0): if (config.getfloat("api", "version") >= 3.0):
@ -189,6 +193,7 @@ def generate_inv_from_api(enterprise_entity,config):
# Return empty hosts output # Return empty hosts output
return {'all': {'hosts': []}, '_meta': {'hostvars': {}}} return {'all': {'hosts': []}, '_meta': {'hostvars': {}}}
def get_inventory(enterprise, config): def get_inventory(enterprise, config):
''' Reads the inventory from cache or Abiquo api ''' ''' Reads the inventory from cache or Abiquo api '''
@ -202,6 +207,7 @@ def get_inventory(enterprise, config):
save_cache(inv, config) save_cache(inv, config)
return json.dumps(inv) return json.dumps(inv)
if __name__ == '__main__': if __name__ == '__main__':
inventory = {} inventory = {}
enterprise = {} enterprise = {}
@ -215,7 +221,7 @@ if __name__ == '__main__':
try: try:
login = api_get(None, config) login = api_get(None, config)
enterprise = next(link for link in (login['links']) if (link['rel']=='enterprise')) enterprise = next(link for link in login['links'] if link['rel'] == 'enterprise')
except Exception as e: except Exception as e:
enterprise = None enterprise = None

View file

@ -82,7 +82,6 @@ class LibcloudInventory(object):
print(data_to_print) print(data_to_print)
def is_cache_valid(self): def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid ''' ''' Determines if the cache files have expired, or if it is still valid '''
@ -95,7 +94,6 @@ class LibcloudInventory(object):
return False return False
def read_settings(self): def read_settings(self):
''' Reads the settings from the libcloud.ini file ''' ''' Reads the settings from the libcloud.ini file '''
@ -146,7 +144,6 @@ class LibcloudInventory(object):
self.cache_path_index = cache_path + "/ansible-libcloud.index" self.cache_path_index = cache_path + "/ansible-libcloud.index"
self.cache_max_age = config.getint('cache', 'cache_max_age') self.cache_max_age = config.getint('cache', 'cache_max_age')
def parse_cli_args(self): def parse_cli_args(self):
''' '''
Command line argument processing Command line argument processing
@ -161,7 +158,6 @@ class LibcloudInventory(object):
help='Force refresh of cache by making API requests to libcloud supported providers (default: False - use cache files)') help='Force refresh of cache by making API requests to libcloud supported providers (default: False - use cache files)')
self.args = parser.parse_args() self.args = parser.parse_args()
def do_api_calls_update_cache(self): def do_api_calls_update_cache(self):
''' '''
Do API calls to a location, and save data in cache files Do API calls to a location, and save data in cache files
@ -172,7 +168,6 @@ class LibcloudInventory(object):
self.write_to_cache(self.inventory, self.cache_path_cache) self.write_to_cache(self.inventory, self.cache_path_cache)
self.write_to_cache(self.index, self.cache_path_index) self.write_to_cache(self.index, self.cache_path_index)
def get_nodes(self): def get_nodes(self):
''' '''
Gets the list of all nodes Gets the list of all nodes
@ -181,7 +176,6 @@ class LibcloudInventory(object):
for node in self.conn.list_nodes(): for node in self.conn.list_nodes():
self.add_node(node) self.add_node(node)
def get_node(self, node_id): def get_node(self, node_id):
''' '''
Gets details about a specific node Gets details about a specific node
@ -189,7 +183,6 @@ class LibcloudInventory(object):
return [node for node in self.conn.list_nodes() if node.id == node_id][0] return [node for node in self.conn.list_nodes() if node.id == node_id][0]
def add_node(self, node): def add_node(self, node):
''' '''
Adds a node to the inventory and index, as long as it is Adds a node to the inventory and index, as long as it is
@ -244,10 +237,10 @@ class LibcloudInventory(object):
# Need to load index from cache # Need to load index from cache
self.load_index_from_cache() self.load_index_from_cache()
if not self.args.host in self.index: if self.args.host not in self.index:
# try updating the cache # try updating the cache
self.do_api_calls_update_cache() self.do_api_calls_update_cache()
if not self.args.host in self.index: if self.args.host not in self.index:
# host migh not exist anymore # host migh not exist anymore
return self.json_format_dict({}, True) return self.json_format_dict({}, True)
@ -289,7 +282,6 @@ class LibcloudInventory(object):
return self.json_format_dict(instance_vars, True) return self.json_format_dict(instance_vars, True)
def push(self, my_dict, key, element): def push(self, my_dict, key, element):
''' '''
Pushed an element onto an array that may not have been defined in Pushed an element onto an array that may not have been defined in
@ -301,7 +293,6 @@ class LibcloudInventory(object):
else: else:
my_dict[key] = [element] my_dict[key] = [element]
def get_inventory_from_cache(self): def get_inventory_from_cache(self):
''' '''
Reads the inventory from the cache file and returns it as a JSON Reads the inventory from the cache file and returns it as a JSON
@ -312,7 +303,6 @@ class LibcloudInventory(object):
json_inventory = cache.read() json_inventory = cache.read()
return json_inventory return json_inventory
def load_index_from_cache(self): def load_index_from_cache(self):
''' '''
Reads the index from the cache file sets self.index Reads the index from the cache file sets self.index
@ -322,7 +312,6 @@ class LibcloudInventory(object):
json_index = cache.read() json_index = cache.read()
self.index = json.loads(json_index) self.index = json.loads(json_index)
def write_to_cache(self, data, filename): def write_to_cache(self, data, filename):
''' '''
Writes data in JSON format to a file Writes data in JSON format to a file
@ -333,7 +322,6 @@ class LibcloudInventory(object):
cache.write(json_data) cache.write(json_data)
cache.close() cache.close()
def to_safe(self, word): def to_safe(self, word):
''' '''
Converts 'bad' characters in a string to underscores so they can be Converts 'bad' characters in a string to underscores so they can be
@ -342,7 +330,6 @@ class LibcloudInventory(object):
return re.sub("[^A-Za-z0-9\-]", "_", word) return re.sub("[^A-Za-z0-9\-]", "_", word)
def json_format_dict(self, data, pretty=False): def json_format_dict(self, data, pretty=False):
''' '''
Converts a dict to a JSON object and dumps it as a formatted Converts a dict to a JSON object and dumps it as a formatted
@ -354,6 +341,7 @@ class LibcloudInventory(object):
else: else:
return json.dumps(data) return json.dumps(data)
def main(): def main():
LibcloudInventory() LibcloudInventory()

View file

@ -278,10 +278,12 @@ Expected output format in Device mode
} }
""" """
def fail(msg): def fail(msg):
sys.stderr.write("%s\n" % msg) sys.stderr.write("%s\n" % msg)
sys.exit(1) sys.exit(1)
class AosInventory(object): class AosInventory(object):
def __init__(self): def __init__(self):
@ -496,7 +498,6 @@ class AosInventory(object):
except: except:
pass pass
def parse_cli_args(self): def parse_cli_args(self):
""" Command line argument processing """ """ Command line argument processing """

View file

@ -229,11 +229,11 @@ class CobblerInventory(object):
# Need to load index from cache # Need to load index from cache
self.load_cache_from_cache() self.load_cache_from_cache()
if not self.args.host in self.cache: if self.args.host not in self.cache:
# try updating the cache # try updating the cache
self.update_cache() self.update_cache()
if not self.args.host in self.cache: if self.args.host not in self.cache:
# host might not exist anymore # host might not exist anymore
return self.json_format_dict({}, True) return self.json_format_dict({}, True)

View file

@ -85,6 +85,7 @@ from six import iteritems
from ansible.module_utils.urls import open_url from ansible.module_utils.urls import open_url
class CollinsDefaults(object): class CollinsDefaults(object):
ASSETS_API_ENDPOINT = '%s/api/assets' ASSETS_API_ENDPOINT = '%s/api/assets'
SPECIAL_ATTRIBUTES = set([ SPECIAL_ATTRIBUTES = set([
@ -174,8 +175,7 @@ class CollinsInventory(object):
# Formats asset search query to locate assets matching attributes, using # Formats asset search query to locate assets matching attributes, using
# the CQL search feature as described here: # the CQL search feature as described here:
# http://tumblr.github.io/collins/recipes.html # http://tumblr.github.io/collins/recipes.html
attributes_query = [ '='.join(attr_pair) attributes_query = ['='.join(attr_pair) for attr_pair in iteritems(attributes)]
for attr_pair in iteritems(attributes) ]
query_parameters = { query_parameters = {
'details': ['True'], 'details': ['True'],
'operation': [operation], 'operation': [operation],
@ -190,8 +190,7 @@ class CollinsInventory(object):
# Locates all assets matching the provided query, exhausting pagination. # Locates all assets matching the provided query, exhausting pagination.
while True: while True:
if num_retries == self.collins_max_retries: if num_retries == self.collins_max_retries:
raise MaxRetriesError("Maximum of %s retries reached; giving up" % \ raise MaxRetriesError("Maximum of %s retries reached; giving up" % self.collins_max_retries)
self.collins_max_retries)
query_parameters['page'] = cur_page query_parameters['page'] = cur_page
query_url = "%s?%s" % ( query_url = "%s?%s" % (
(CollinsDefaults.ASSETS_API_ENDPOINT % self.collins_host), (CollinsDefaults.ASSETS_API_ENDPOINT % self.collins_host),
@ -212,8 +211,7 @@ class CollinsInventory(object):
cur_page += 1 cur_page += 1
num_retries = 0 num_retries = 0
except: except:
self.log.error("Error while communicating with Collins, retrying:\n%s", self.log.error("Error while communicating with Collins, retrying:\n%s" % traceback.format_exc())
traceback.format_exc())
num_retries += 1 num_retries += 1
return assets return assets
@ -232,19 +230,15 @@ class CollinsInventory(object):
def read_settings(self): def read_settings(self):
""" Reads the settings from the collins.ini file """ """ Reads the settings from the collins.ini file """
config_loc = os.getenv('COLLINS_CONFIG', config_loc = os.getenv('COLLINS_CONFIG', os.path.dirname(os.path.realpath(__file__)) + '/collins.ini')
os.path.dirname(os.path.realpath(__file__)) + '/collins.ini')
config = ConfigParser.SafeConfigParser() config = ConfigParser.SafeConfigParser()
config.read(os.path.dirname(os.path.realpath(__file__)) + '/collins.ini') config.read(os.path.dirname(os.path.realpath(__file__)) + '/collins.ini')
self.collins_host = config.get('collins', 'host') self.collins_host = config.get('collins', 'host')
self.collins_username = os.getenv('COLLINS_USERNAME', self.collins_username = os.getenv('COLLINS_USERNAME', config.get('collins', 'username'))
config.get('collins', 'username')) self.collins_password = os.getenv('COLLINS_PASSWORD', config.get('collins', 'password'))
self.collins_password = os.getenv('COLLINS_PASSWORD', self.collins_asset_type = os.getenv('COLLINS_ASSET_TYPE', config.get('collins', 'asset_type'))
config.get('collins', 'password'))
self.collins_asset_type = os.getenv('COLLINS_ASSET_TYPE',
config.get('collins', 'asset_type'))
self.collins_timeout_secs = config.getint('collins', 'timeout_secs') self.collins_timeout_secs = config.getint('collins', 'timeout_secs')
self.collins_max_retries = config.getint('collins', 'max_retries') self.collins_max_retries = config.getint('collins', 'max_retries')
@ -268,16 +262,12 @@ class CollinsInventory(object):
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='Produces an Ansible Inventory file based on Collins') description='Produces an Ansible Inventory file based on Collins')
parser.add_argument('--list', parser.add_argument('--list', action='store_true', default=True, help='List instances (default: True)')
action='store_true', default=True, help='List instances (default: True)') parser.add_argument('--host', action='store', help='Get all the variables about a specific instance')
parser.add_argument('--host', parser.add_argument('--refresh-cache', action='store_true', default=False,
action='store', help='Get all the variables about a specific instance') help='Force refresh of cache by making API requests to Collins '
parser.add_argument('--refresh-cache',
action='store_true', default=False,
help='Force refresh of cache by making API requests to Collins ' \
'(default: False - use cache files)') '(default: False - use cache files)')
parser.add_argument('--pretty', parser.add_argument('--pretty', action='store_true', default=False, help='Pretty print all JSON output')
action='store_true', default=False, help='Pretty print all JSON output')
self.args = parser.parse_args() self.args = parser.parse_args()
def update_cache(self): def update_cache(self):
@ -290,8 +280,7 @@ class CollinsInventory(object):
try: try:
server_assets = self.find_assets() server_assets = self.find_assets()
except: except:
self.log.error("Error while locating assets from Collins:\n%s", self.log.error("Error while locating assets from Collins:\n%s" % traceback.format_exc())
traceback.format_exc())
return False return False
for asset in server_assets: for asset in server_assets:
@ -315,8 +304,7 @@ class CollinsInventory(object):
if self.prefer_hostnames and self._asset_has_attribute(asset, 'HOSTNAME'): if self.prefer_hostnames and self._asset_has_attribute(asset, 'HOSTNAME'):
asset_identifier = self._asset_get_attribute(asset, 'HOSTNAME') asset_identifier = self._asset_get_attribute(asset, 'HOSTNAME')
elif 'ADDRESSES' not in asset: elif 'ADDRESSES' not in asset:
self.log.warning("No IP addresses found for asset '%s', skipping", self.log.warning("No IP addresses found for asset '%s', skipping" % asset)
asset)
continue continue
elif len(asset['ADDRESSES']) < ip_index + 1: elif len(asset['ADDRESSES']) < ip_index + 1:
self.log.warning( self.log.warning(
@ -384,11 +372,11 @@ class CollinsInventory(object):
# Need to load index from cache # Need to load index from cache
self.load_cache_from_cache() self.load_cache_from_cache()
if not self.args.host in self.cache: if self.args.host not in self.cache:
# try updating the cache # try updating the cache
self.update_cache() self.update_cache()
if not self.args.host in self.cache: if self.args.host not in self.cache:
# host might not exist anymore # host might not exist anymore
return self.json_format_dict({}, self.args.pretty) return self.json_format_dict({}, self.args.pretty)

View file

@ -239,7 +239,6 @@ class ConsulInventory(object):
self.current_dc = datacenter self.current_dc = datacenter
self.load_data_for_datacenter(datacenter) self.load_data_for_datacenter(datacenter)
def load_availability_groups(self, node, datacenter): def load_availability_groups(self, node, datacenter):
'''check the health of each service on a node and add add the node to either '''check the health of each service on a node and add add the node to either
an 'available' or 'unavailable' grouping. The suffix for each group can be an 'available' or 'unavailable' grouping. The suffix for each group can be
@ -259,7 +258,6 @@ class ConsulInventory(object):
self.add_node_to_map(self.nodes_by_availability, self.add_node_to_map(self.nodes_by_availability,
service_name + suffix, node['Node']) service_name + suffix, node['Node'])
def load_data_for_datacenter(self, datacenter): def load_data_for_datacenter(self, datacenter):
'''processes all the nodes in a particular datacenter''' '''processes all the nodes in a particular datacenter'''
index, nodes = self.consul_api.catalog.nodes(dc=datacenter) index, nodes = self.consul_api.catalog.nodes(dc=datacenter)
@ -380,7 +378,6 @@ class ConsulInventory(object):
def add_node_to_map(self, map, name, node): def add_node_to_map(self, map, name, node):
self.push(map, name, self.get_inventory_name(node)) self.push(map, name, self.get_inventory_name(node))
def push(self, my_dict, key, element): def push(self, my_dict, key, element):
''' Pushed an element onto an array that may not have been defined in the ''' Pushed an element onto an array that may not have been defined in the
dict ''' dict '''
@ -439,14 +436,13 @@ class ConsulConfig(dict):
def read_cli_args(self): def read_cli_args(self):
''' Command line argument processing ''' ''' Command line argument processing '''
parser = argparse.ArgumentParser(description= parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based nodes in a Consul cluster')
'Produce an Ansible Inventory file based nodes in a Consul cluster')
parser.add_argument('--list', action='store_true', parser.add_argument('--list', action='store_true',
help='Get all inventory variables from all nodes in the consul cluster') help='Get all inventory variables from all nodes in the consul cluster')
parser.add_argument('--host', action='store', parser.add_argument('--host', action='store',
help='Get all inventory variables about a specific consul node, \ help='Get all inventory variables about a specific consul node,'
requires datacenter set in consul.ini.') 'requires datacenter set in consul.ini.')
parser.add_argument('--datacenter', action='store', parser.add_argument('--datacenter', action='store',
help='Get all inventory about a specific consul datacenter') help='Get all inventory about a specific consul datacenter')
@ -462,7 +458,6 @@ class ConsulConfig(dict):
return self.has_config(suffix) return self.has_config(suffix)
return default return default
def get_consul_api(self): def get_consul_api(self):
'''get an instance of the api based on the supplied configuration''' '''get an instance of the api based on the supplied configuration'''
host = 'localhost' host = 'localhost'

View file

@ -152,7 +152,6 @@ except ImportError as e:
sys.exit("failed=True msg='`dopy` library required for this script'") sys.exit("failed=True msg='`dopy` library required for this script'")
class DigitalOceanInventory(object): class DigitalOceanInventory(object):
########################################################################### ###########################################################################
@ -240,7 +239,6 @@ or environment variables (DO_API_TOKEN)\n''')
print(json.dumps(json_data)) print(json.dumps(json_data))
# That's all she wrote... # That's all she wrote...
########################################################################### ###########################################################################
# Script configuration # Script configuration
########################################################################### ###########################################################################
@ -276,7 +274,6 @@ or environment variables (DO_API_TOKEN)\n''')
if os.getenv("DO_API_KEY"): if os.getenv("DO_API_KEY"):
self.api_token = os.getenv("DO_API_KEY") self.api_token = os.getenv("DO_API_KEY")
def read_cli_args(self): def read_cli_args(self):
''' Command line argument processing ''' ''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials') parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials')
@ -315,7 +312,6 @@ or environment variables (DO_API_TOKEN)\n''')
not self.args.all and not self.args.host): not self.args.all and not self.args.host):
self.args.list = True self.args.list = True
########################################################################### ###########################################################################
# Data Management # Data Management
########################################################################### ###########################################################################
@ -349,7 +345,6 @@ or environment variables (DO_API_TOKEN)\n''')
self.data['domains'] = self.manager.all_domains() self.data['domains'] = self.manager.all_domains()
self.cache_refreshed = True self.cache_refreshed = True
def build_inventory(self): def build_inventory(self):
'''Build Ansible inventory of droplets''' '''Build Ansible inventory of droplets'''
self.inventory = { self.inventory = {
@ -406,7 +401,6 @@ or environment variables (DO_API_TOKEN)\n''')
info = self.do_namespace(droplet) info = self.do_namespace(droplet)
self.inventory['_meta']['hostvars'][dest] = info self.inventory['_meta']['hostvars'][dest] = info
def load_droplet_variables_for_host(self): def load_droplet_variables_for_host(self):
'''Generate a JSON response to a --host call''' '''Generate a JSON response to a --host call'''
host = int(self.args.host) host = int(self.args.host)
@ -414,8 +408,6 @@ or environment variables (DO_API_TOKEN)\n''')
info = self.do_namespace(droplet) info = self.do_namespace(droplet)
return {'droplet': info} return {'droplet': info}
########################################################################### ###########################################################################
# Cache Management # Cache Management
########################################################################### ###########################################################################
@ -429,7 +421,6 @@ or environment variables (DO_API_TOKEN)\n''')
return True return True
return False return False
def load_from_cache(self): def load_from_cache(self):
''' Reads the data from the cache file and assigns it to member variables as Python Objects''' ''' Reads the data from the cache file and assigns it to member variables as Python Objects'''
try: try:
@ -443,7 +434,6 @@ or environment variables (DO_API_TOKEN)\n''')
self.data = data['data'] self.data = data['data']
self.inventory = data['inventory'] self.inventory = data['inventory']
def write_to_cache(self): def write_to_cache(self):
''' Writes data in JSON format to a file ''' ''' Writes data in JSON format to a file '''
data = {'data': self.data, 'inventory': self.inventory} data = {'data': self.data, 'inventory': self.inventory}
@ -453,7 +443,6 @@ or environment variables (DO_API_TOKEN)\n''')
cache.write(json_data) cache.write(json_data)
cache.close() cache.close()
########################################################################### ###########################################################################
# Utilities # Utilities
########################################################################### ###########################################################################
@ -465,7 +454,6 @@ or environment variables (DO_API_TOKEN)\n''')
else: else:
my_dict[key] = [element] my_dict[key] = [element]
def to_safe(self, word): def to_safe(self, word):
''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups ''' ''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups '''
return re.sub("[^A-Za-z0-9\-\.]", "_", word) return re.sub("[^A-Za-z0-9\-\.]", "_", word)
@ -478,7 +466,6 @@ or environment variables (DO_API_TOKEN)\n''')
return info return info
########################################################################### ###########################################################################
# Run the script # Run the script
DigitalOceanInventory() DigitalOceanInventory()

View file

@ -205,7 +205,6 @@ class Ec2Inventory(object):
print(data_to_print) print(data_to_print)
def is_cache_valid(self): def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid ''' ''' Determines if the cache files have expired, or if it is still valid '''
@ -218,7 +217,6 @@ class Ec2Inventory(object):
return False return False
def read_settings(self): def read_settings(self):
''' Reads the settings from the ec2.ini file ''' ''' Reads the settings from the ec2.ini file '''
@ -226,7 +224,8 @@ class Ec2Inventory(object):
scriptbasename = os.path.basename(scriptbasename) scriptbasename = os.path.basename(scriptbasename)
scriptbasename = scriptbasename.replace('.py', '') scriptbasename = scriptbasename.replace('.py', '')
defaults = {'ec2': { defaults = {
'ec2': {
'ini_path': os.path.join(os.path.dirname(__file__), '%s.ini' % scriptbasename) 'ini_path': os.path.join(os.path.dirname(__file__), '%s.ini' % scriptbasename)
} }
} }
@ -511,7 +510,6 @@ class Ec2Inventory(object):
help='Use boto profile for connections to EC2') help='Use boto profile for connections to EC2')
self.args = parser.parse_args() self.args = parser.parse_args()
def do_api_calls_update_cache(self): def do_api_calls_update_cache(self):
''' Do API calls to each region, and save data in cache files ''' ''' Do API calls to each region, and save data in cache files '''
@ -990,7 +988,6 @@ class Ec2Inventory(object):
self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance) self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance)
self.inventory["_meta"]["hostvars"][hostname]['ansible_ssh_host'] = dest self.inventory["_meta"]["hostvars"][hostname]['ansible_ssh_host'] = dest
def add_rds_instance(self, instance, region): def add_rds_instance(self, instance, region):
''' Adds an RDS instance to the inventory and index, as long as it is ''' Adds an RDS instance to the inventory and index, as long as it is
addressable ''' addressable '''
@ -1070,7 +1067,6 @@ class Ec2Inventory(object):
self.fail_with_error('\n'.join(['Package boto seems a bit older.', self.fail_with_error('\n'.join(['Package boto seems a bit older.',
'Please upgrade boto >= 2.3.0.'])) 'Please upgrade boto >= 2.3.0.']))
# Inventory: Group by engine # Inventory: Group by engine
if self.group_by_rds_engine: if self.group_by_rds_engine:
self.push(self.inventory, self.to_safe("rds_" + instance.engine), hostname) self.push(self.inventory, self.to_safe("rds_" + instance.engine), hostname)
@ -1338,8 +1334,7 @@ class Ec2Inventory(object):
r53_conn = route53.Route53Connection() r53_conn = route53.Route53Connection()
all_zones = r53_conn.get_zones() all_zones = r53_conn.get_zones()
route53_zones = [ zone for zone in all_zones if zone.name[:-1] route53_zones = [zone for zone in all_zones if zone.name[:-1] not in self.route53_excluded_zones]
not in self.route53_excluded_zones ]
self.route53_records = {} self.route53_records = {}
@ -1356,7 +1351,6 @@ class Ec2Inventory(object):
self.route53_records.setdefault(resource, set()) self.route53_records.setdefault(resource, set())
self.route53_records[resource].add(record_name) self.route53_records[resource].add(record_name)
def get_instance_route53_names(self, instance): def get_instance_route53_names(self, instance):
''' Check if an instance is referenced in the records we have from ''' Check if an instance is referenced in the records we have from
Route53. If it is, return the list of domain names pointing to said Route53. If it is, return the list of domain names pointing to said
@ -1523,10 +1517,10 @@ class Ec2Inventory(object):
# Need to load index from cache # Need to load index from cache
self.load_index_from_cache() self.load_index_from_cache()
if not self.args.host in self.index: if self.args.host not in self.index:
# try updating the cache # try updating the cache
self.do_api_calls_update_cache() self.do_api_calls_update_cache()
if not self.args.host in self.index: if self.args.host not in self.index:
# host might not exist anymore # host might not exist anymore
return self.json_format_dict({}, True) return self.json_format_dict({}, True)

View file

@ -48,6 +48,7 @@ parser.add_option('--host', default=None, dest="host",
# helper functions # helper functions
# #
def get_ssh_config(): def get_ssh_config():
configs = [] configs = []
for box in list_running_boxes(): for box in list_running_boxes():
@ -55,6 +56,7 @@ def get_ssh_config():
configs.append(config) configs.append(config)
return configs return configs
# list all the running instances in the fleet # list all the running instances in the fleet
def list_running_boxes(): def list_running_boxes():
boxes = [] boxes = []
@ -65,6 +67,7 @@ def list_running_boxes():
return boxes return boxes
def get_a_ssh_config(box_name): def get_a_ssh_config(box_name):
config = {} config = {}
config['Host'] = box_name config['Host'] = box_name
@ -72,6 +75,7 @@ def get_a_ssh_config(box_name):
config['ansible_python_interpreter'] = '/opt/bin/python' config['ansible_python_interpreter'] = '/opt/bin/python'
return config return config
# List out servers that vagrant has running # List out servers that vagrant has running
# ------------------------------ # ------------------------------
if options.list: if options.list:

View file

@ -46,6 +46,7 @@ if LooseVersion(requests.__version__) < LooseVersion('1.1.0'):
from requests.auth import HTTPBasicAuth from requests.auth import HTTPBasicAuth
def json_format_dict(data, pretty=False): def json_format_dict(data, pretty=False):
"""Converts a dict to a JSON object and dumps it as a formatted string""" """Converts a dict to a JSON object and dumps it as a formatted string"""
@ -54,6 +55,7 @@ def json_format_dict(data, pretty=False):
else: else:
return json.dumps(data) return json.dumps(data)
class ForemanInventory(object): class ForemanInventory(object):
def __init__(self): def __init__(self):

View file

@ -340,7 +340,6 @@ class GceInventory(object):
help='Force refresh of cache by making API requests (default: False - use cache files)') help='Force refresh of cache by making API requests (default: False - use cache files)')
self.args = parser.parse_args() self.args = parser.parse_args()
def node_to_dict(self, inst): def node_to_dict(self, inst):
md = {} md = {}

View file

@ -113,6 +113,7 @@ load_chube_config()
# Imports for ansible # Imports for ansible
import ConfigParser import ConfigParser
class LinodeInventory(object): class LinodeInventory(object):
def __init__(self): def __init__(self):
"""Main execution path.""" """Main execution path."""
@ -245,10 +246,10 @@ class LinodeInventory(object):
# Need to load index from cache # Need to load index from cache
self.load_index_from_cache() self.load_index_from_cache()
if not self.args.host in self.index: if self.args.host not in self.index:
# try updating the cache # try updating the cache
self.do_api_calls_update_cache() self.do_api_calls_update_cache()
if not self.args.host in self.index: if self.args.host not in self.index:
# host might not exist anymore # host might not exist anymore
return self.json_format_dict({}, True) return self.json_format_dict({}, True)

View file

@ -35,6 +35,7 @@ import sys
import lxc import lxc
import json import json
def build_dict(): def build_dict():
"""Returns a dictionary keyed to the defined LXC groups. All """Returns a dictionary keyed to the defined LXC groups. All
containers, including the ones not in any group, are included in the containers, including the ones not in any group, are included in the
@ -53,6 +54,7 @@ def build_dict():
return dict([(g, {'hosts': [k for k, v in containers.items() if g in v], return dict([(g, {'hosts': [k for k, v in containers.items() if g in v],
'vars': {'ansible_connection': 'lxc'}}) for g in groups]) 'vars': {'ansible_connection': 'lxc'}}) for g in groups])
def main(argv): def main(argv):
"""Returns a JSON dictionary as expected by Ansible""" """Returns a JSON dictionary as expected by Ansible"""
result = build_dict() result = build_dict()

View file

@ -32,6 +32,7 @@ try:
except ImportError: except ImportError:
import ConfigParser as configparser import ConfigParser as configparser
class MDTInventory(object): class MDTInventory(object):
def __init__(self): def __init__(self):
@ -119,7 +120,6 @@ class MDTInventory(object):
if config.has_option('tower', 'groupname'): if config.has_option('tower', 'groupname'):
self.mdt_groupname = config.get('tower', 'groupname') self.mdt_groupname = config.get('tower', 'groupname')
def parse_cli_args(self): def parse_cli_args(self):
''' '''
Command line argument processing Command line argument processing

View file

@ -47,6 +47,7 @@ except ImportError:
print("Error: mk_livestatus is needed. Try something like: pip install python-mk-livestatus") print("Error: mk_livestatus is needed. Try something like: pip install python-mk-livestatus")
exit(1) exit(1)
class NagiosLivestatusInventory(object): class NagiosLivestatusInventory(object):
def parse_ini_file(self): def parse_ini_file(self):

View file

@ -42,6 +42,7 @@ except ImportError:
print("Error: SQLAlchemy is needed. Try something like: pip install sqlalchemy") print("Error: SQLAlchemy is needed. Try something like: pip install sqlalchemy")
exit(1) exit(1)
class NagiosNDOInventory(object): class NagiosNDOInventory(object):
def read_settings(self): def read_settings(self):

View file

@ -150,6 +150,7 @@ from click.exceptions import UsageError
from six import string_types from six import string_types
def warning(*objs): def warning(*objs):
print("WARNING: ", *objs, file=sys.stderr) print("WARNING: ", *objs, file=sys.stderr)

View file

@ -38,6 +38,7 @@ inventory = {'vzhosts': {'hosts': vzhosts}, '_meta': {'hostvars': {}}}
# default group, when description not defined # default group, when description not defined
default_group = ['vzguest'] default_group = ['vzguest']
def get_guests(): def get_guests():
# Loop through vzhosts # Loop through vzhosts
for h in vzhosts: for h in vzhosts:

View file

@ -63,6 +63,7 @@ except ImportError:
ini_section = 'packet' ini_section = 'packet'
class PacketInventory(object): class PacketInventory(object):
def _empty_inventory(self): def _empty_inventory(self):
@ -101,7 +102,6 @@ class PacketInventory(object):
print(data_to_print) print(data_to_print)
def is_cache_valid(self): def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid ''' ''' Determines if the cache files have expired, or if it is still valid '''
@ -231,7 +231,6 @@ class PacketInventory(object):
help='Force refresh of cache by making API requests to Packet (default: False - use cache files)') help='Force refresh of cache by making API requests to Packet (default: False - use cache files)')
self.args = parser.parse_args() self.args = parser.parse_args()
def do_api_calls_update_cache(self): def do_api_calls_update_cache(self):
''' Do API calls to each region, and save data in cache files ''' ''' Do API calls to each region, and save data in cache files '''
@ -307,7 +306,6 @@ class PacketInventory(object):
if ip_address['public'] is True and ip_address['address_family'] == 4: if ip_address['public'] is True and ip_address['address_family'] == 4:
dest = ip_address['address'] dest = ip_address['address']
if not dest: if not dest:
# Skip devices we cannot address (e.g. private VPC subnet) # Skip devices we cannot address (e.g. private VPC subnet)
return return
@ -373,7 +371,6 @@ class PacketInventory(object):
self.inventory["_meta"]["hostvars"][dest] = self.get_host_info_dict_from_device(device) self.inventory["_meta"]["hostvars"][dest] = self.get_host_info_dict_from_device(device)
def get_host_info_dict_from_device(self, device): def get_host_info_dict_from_device(self, device):
device_vars = {} device_vars = {}
for key in vars(device): for key in vars(device):
@ -416,10 +413,10 @@ class PacketInventory(object):
# Need to load index from cache # Need to load index from cache
self.load_index_from_cache() self.load_index_from_cache()
if not self.args.host in self.index: if self.args.host not in self.index:
# try updating the cache # try updating the cache
self.do_api_calls_update_cache() self.do_api_calls_update_cache()
if not self.args.host in self.index: if self.args.host not in self.index:
# host might not exist anymore # host might not exist anymore
return self.json_format_dict({}, True) return self.json_format_dict({}, True)
@ -455,7 +452,6 @@ class PacketInventory(object):
json_inventory = cache.read() json_inventory = cache.read()
return json_inventory return json_inventory
def load_index_from_cache(self): def load_index_from_cache(self):
''' Reads the index from the cache file sets self.index ''' ''' Reads the index from the cache file sets self.index '''
@ -463,7 +459,6 @@ class PacketInventory(object):
json_index = cache.read() json_index = cache.read()
self.index = json.loads(json_index) self.index = json.loads(json_index)
def write_to_cache(self, data, filename): def write_to_cache(self, data, filename):
''' Writes data in JSON format to a file ''' ''' Writes data in JSON format to a file '''

View file

@ -38,10 +38,12 @@ from six import iteritems
from ansible.module_utils.urls import open_url from ansible.module_utils.urls import open_url
class ProxmoxNodeList(list): class ProxmoxNodeList(list):
def get_names(self): def get_names(self):
return [node['node'] for node in self] return [node['node'] for node in self]
class ProxmoxVM(dict): class ProxmoxVM(dict):
def get_variables(self): def get_variables(self):
variables = {} variables = {}
@ -49,6 +51,7 @@ class ProxmoxVM(dict):
variables['proxmox_' + key] = value variables['proxmox_' + key] = value
return variables return variables
class ProxmoxVMList(list): class ProxmoxVMList(list):
def __init__(self, data=[]): def __init__(self, data=[]):
for item in data: for item in data:
@ -68,14 +71,17 @@ class ProxmoxVMList(list):
return variables return variables
class ProxmoxPoolList(list): class ProxmoxPoolList(list):
def get_names(self): def get_names(self):
return [pool['poolid'] for pool in self] return [pool['poolid'] for pool in self]
class ProxmoxPool(dict): class ProxmoxPool(dict):
def get_members_name(self): def get_members_name(self):
return [member['name'] for member in self['members'] if member['template'] != 1] return [member['name'] for member in self['members'] if member['template'] != 1]
class ProxmoxAPI(object): class ProxmoxAPI(object):
def __init__(self, options): def __init__(self, options):
self.options = options self.options = options
@ -139,6 +145,7 @@ class ProxmoxAPI(object):
def pool(self, poolid): def pool(self, poolid):
return ProxmoxPool(self.get('api2/json/pools/{}'.format(poolid))) return ProxmoxPool(self.get('api2/json/pools/{}'.format(poolid)))
def main_list(options): def main_list(options):
results = { results = {
'all': { 'all': {
@ -199,6 +206,7 @@ def main_list(options):
return results return results
def main_host(options): def main_host(options):
proxmox_api = ProxmoxAPI(options) proxmox_api = ProxmoxAPI(options)
proxmox_api.auth() proxmox_api.auth()
@ -211,6 +219,7 @@ def main_host(options):
return {} return {}
def main(): def main():
parser = OptionParser(usage='%prog [options] --list | --host HOSTNAME') parser = OptionParser(usage='%prog [options] --list | --host HOSTNAME')
parser.add_option('--list', action="store_true", default=False, dest="list") parser.add_option('--list', action="store_true", default=False, dest="list")
@ -235,5 +244,6 @@ def main():
print(json.dumps(data, indent=indent)) print(json.dumps(data, indent=indent))
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View file

@ -41,6 +41,7 @@ try:
except: except:
import simplejson as json import simplejson as json
class SoftLayerInventory(object): class SoftLayerInventory(object):
common_items = [ common_items = [
'id', 'id',

View file

@ -78,6 +78,7 @@ if not os.path.exists(CACHE_DIR):
# Helper functions # Helper functions
# ------------------------------ # ------------------------------
def spacewalk_report(name): def spacewalk_report(name):
"""Yield a dictionary form of each CSV output produced by the specified """Yield a dictionary form of each CSV output produced by the specified
spacewalk-report spacewalk-report

View file

@ -110,6 +110,7 @@ def stack_net_list(endpoint, header, client):
headers=header) headers=header)
return json.loads(stack_r.json()) return json.loads(stack_r.json())
def format_meta(hostdata, intfdata, config): def format_meta(hostdata, intfdata, config):
use_hostnames = config['use_hostnames'] use_hostnames = config['use_hostnames']
meta = dict(all=dict(hosts=list()), meta = dict(all=dict(hosts=list()),
@ -159,7 +160,6 @@ def parse_args():
def main(): def main():
args = parse_args() args = parse_args()
if StrictVersion(requests.__version__) < StrictVersion("2.4.3"): if StrictVersion(requests.__version__) < StrictVersion("2.4.3"):
sys.exit('requests>=2.4.3 is required for this inventory script') sys.exit('requests>=2.4.3 is required for this inventory script')

View file

@ -23,6 +23,7 @@ try:
except ImportError: except ImportError:
import simplejson as json import simplejson as json
class SetEncoder(json.JSONEncoder): class SetEncoder(json.JSONEncoder):
def default(self, obj): def default(self, obj):
if isinstance(obj, set): if isinstance(obj, set):

View file

@ -47,9 +47,11 @@ try:
from logging import NullHandler from logging import NullHandler
except ImportError: except ImportError:
from logging import Handler from logging import Handler
class NullHandler(Handler): class NullHandler(Handler):
def emit(self, record): def emit(self, record):
pass pass
logging.getLogger('psphere').addHandler(NullHandler()) logging.getLogger('psphere').addHandler(NullHandler())
logging.getLogger('suds').addHandler(NullHandler()) logging.getLogger('suds').addHandler(NullHandler())

View file

@ -54,6 +54,7 @@ except ImportError as e:
# Imports for ansible # Imports for ansible
import ConfigParser import ConfigParser
class AzureInventory(object): class AzureInventory(object):
def __init__(self): def __init__(self):
"""Main execution path.""" """Main execution path."""
@ -173,8 +174,7 @@ class AzureInventory(object):
parser.add_argument('--refresh-cache', parser.add_argument('--refresh-cache',
action='store_true', default=False, action='store_true', default=False,
help='Force refresh of thecache by making API requests to Azure ' help='Force refresh of thecache by making API requests to Azure '
'(default: False - use cache files)', '(default: False - use cache files)')
)
parser.add_argument('--host', action='store', parser.add_argument('--host', action='store',
help='Get all information about an instance.') help='Get all information about an instance.')
self.args = parser.parse_args() self.args = parser.parse_args()

View file

@ -49,6 +49,7 @@ try:
except: except:
import simplejson as json import simplejson as json
class ZabbixInventory(object): class ZabbixInventory(object):
def read_settings(self): def read_settings(self):
@ -96,7 +97,7 @@ class ZabbixInventory(object):
for group in host['groups']: for group in host['groups']:
groupname = group['name'] groupname = group['name']
if not groupname in data: if groupname not in data:
data[groupname] = self.hoststub() data[groupname] = self.hoststub()
data[groupname]['hosts'].append(hostname) data[groupname]['hosts'].append(hostname)

View file

@ -1,42 +1,6 @@
contrib/inventory/abiquo.py
contrib/inventory/apache-libcloud.py
contrib/inventory/apstra_aos.py
contrib/inventory/azure_rm.py
contrib/inventory/cobbler.py
contrib/inventory/collins.py
contrib/inventory/consul_io.py
contrib/inventory/digital_ocean.py
contrib/inventory/docker.py
contrib/inventory/ec2.py
contrib/inventory/fleet.py
contrib/inventory/foreman.py
contrib/inventory/freeipa.py contrib/inventory/freeipa.py
contrib/inventory/gce.py
contrib/inventory/jail.py
contrib/inventory/libvirt_lxc.py
contrib/inventory/linode.py
contrib/inventory/lxc_inventory.py
contrib/inventory/mdt_dynamic_inventory.py
contrib/inventory/nagios_livestatus.py
contrib/inventory/nagios_ndo.py
contrib/inventory/nova.py
contrib/inventory/nsot.py
contrib/inventory/openvz.py
contrib/inventory/ovirt.py
contrib/inventory/packet_net.py
contrib/inventory/proxmox.py
contrib/inventory/rackhd.py contrib/inventory/rackhd.py
contrib/inventory/rax.py
contrib/inventory/softlayer.py
contrib/inventory/spacewalk.py
contrib/inventory/ssh_config.py
contrib/inventory/stacki.py
contrib/inventory/vbox.py
contrib/inventory/vmware.py
contrib/inventory/vmware_inventory.py contrib/inventory/vmware_inventory.py
contrib/inventory/windows_azure.py
contrib/inventory/zabbix.py
contrib/inventory/zone.py
docs/api/conf.py docs/api/conf.py
docs/bin/dump_keywords.py docs/bin/dump_keywords.py
docs/bin/plugin_formatter.py docs/bin/plugin_formatter.py