pep8 fixes for contrib (#24344)

This commit is contained in:
Matt Martz 2017-05-09 16:38:08 -05:00 committed by GitHub
commit d3249e7875
37 changed files with 326 additions and 380 deletions

View file

@ -53,6 +53,7 @@ except ImportError:
from ansible.module_utils.urls import open_url
def api_get(link, config):
try:
if link is None:
@ -67,6 +68,7 @@ def api_get(link, config):
except:
return None
def save_cache(data, config):
''' saves item to cache '''
dpath = config.get('cache', 'cache_dir')
@ -91,6 +93,7 @@ def get_cache(cache_item, config):
return inv
def cache_available(config):
''' checks if we have a 'fresh' cache available for item requested '''
@ -105,11 +108,12 @@ def cache_available(config):
if config.has_option('cache', 'cache_max_age'):
maxage = config.get('cache', 'cache_max_age')
if ((int(time.time()) - int(existing.st_mtime)) <= int(maxage)):
if (int(time.time()) - int(existing.st_mtime)) <= int(maxage):
return True
return False
def generate_inv_from_api(enterprise_entity, config):
try:
inventory['all'] = {}
@ -119,7 +123,7 @@ def generate_inv_from_api(enterprise_entity,config):
inventory['_meta']['hostvars'] = {}
enterprise = api_get(enterprise_entity, config)
vms_entity = next(link for link in (enterprise['links']) if (link['rel']=='virtualmachines'))
vms_entity = next(link for link in enterprise['links'] if link['rel'] == 'virtualmachines')
vms = api_get(vms_entity, config)
for vmcollection in vms['collection']:
for link in vmcollection['links']:
@ -133,7 +137,7 @@ def generate_inv_from_api(enterprise_entity,config):
# From abiquo.ini: Only adding to inventory VMs with public IP
if config.getboolean('defaults', 'public_ip_only') is True:
for link in vmcollection['links']:
if (link['type']=='application/vnd.abiquo.publicip+json' and link['rel']=='ip'):
if link['type'] == 'application/vnd.abiquo.publicip+json' and link['rel'] == 'ip':
vm_nic = link['title']
break
else:
@ -166,7 +170,7 @@ def generate_inv_from_api(enterprise_entity,config):
inventory[vm_template]['children'] = []
inventory[vm_template]['hosts'] = []
if config.getboolean('defaults', 'get_metadata') is True:
meta_entity = next(link for link in (vmcollection['links']) if (link['rel']=='metadata'))
meta_entity = next(link for link in vmcollection['links'] if link['rel'] == 'metadata')
try:
metadata = api_get(meta_entity, config)
if (config.getfloat("api", "version") >= 3.0):
@ -189,6 +193,7 @@ def generate_inv_from_api(enterprise_entity,config):
# Return empty hosts output
return {'all': {'hosts': []}, '_meta': {'hostvars': {}}}
def get_inventory(enterprise, config):
''' Reads the inventory from cache or Abiquo api '''
@ -202,6 +207,7 @@ def get_inventory(enterprise, config):
save_cache(inv, config)
return json.dumps(inv)
if __name__ == '__main__':
inventory = {}
enterprise = {}
@ -215,7 +221,7 @@ if __name__ == '__main__':
try:
login = api_get(None, config)
enterprise = next(link for link in (login['links']) if (link['rel']=='enterprise'))
enterprise = next(link for link in login['links'] if link['rel'] == 'enterprise')
except Exception as e:
enterprise = None

View file

@ -82,7 +82,6 @@ class LibcloudInventory(object):
print(data_to_print)
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
@ -95,7 +94,6 @@ class LibcloudInventory(object):
return False
def read_settings(self):
''' Reads the settings from the libcloud.ini file '''
@ -146,7 +144,6 @@ class LibcloudInventory(object):
self.cache_path_index = cache_path + "/ansible-libcloud.index"
self.cache_max_age = config.getint('cache', 'cache_max_age')
def parse_cli_args(self):
'''
Command line argument processing
@ -161,7 +158,6 @@ class LibcloudInventory(object):
help='Force refresh of cache by making API requests to libcloud supported providers (default: False - use cache files)')
self.args = parser.parse_args()
def do_api_calls_update_cache(self):
'''
Do API calls to a location, and save data in cache files
@ -172,7 +168,6 @@ class LibcloudInventory(object):
self.write_to_cache(self.inventory, self.cache_path_cache)
self.write_to_cache(self.index, self.cache_path_index)
def get_nodes(self):
'''
Gets the list of all nodes
@ -181,7 +176,6 @@ class LibcloudInventory(object):
for node in self.conn.list_nodes():
self.add_node(node)
def get_node(self, node_id):
'''
Gets details about a specific node
@ -189,7 +183,6 @@ class LibcloudInventory(object):
return [node for node in self.conn.list_nodes() if node.id == node_id][0]
def add_node(self, node):
'''
Adds a node to the inventory and index, as long as it is
@ -244,10 +237,10 @@ class LibcloudInventory(object):
# Need to load index from cache
self.load_index_from_cache()
if not self.args.host in self.index:
if self.args.host not in self.index:
# try updating the cache
self.do_api_calls_update_cache()
if not self.args.host in self.index:
if self.args.host not in self.index:
# host migh not exist anymore
return self.json_format_dict({}, True)
@ -289,7 +282,6 @@ class LibcloudInventory(object):
return self.json_format_dict(instance_vars, True)
def push(self, my_dict, key, element):
'''
Pushed an element onto an array that may not have been defined in
@ -301,7 +293,6 @@ class LibcloudInventory(object):
else:
my_dict[key] = [element]
def get_inventory_from_cache(self):
'''
Reads the inventory from the cache file and returns it as a JSON
@ -312,7 +303,6 @@ class LibcloudInventory(object):
json_inventory = cache.read()
return json_inventory
def load_index_from_cache(self):
'''
Reads the index from the cache file sets self.index
@ -322,7 +312,6 @@ class LibcloudInventory(object):
json_index = cache.read()
self.index = json.loads(json_index)
def write_to_cache(self, data, filename):
'''
Writes data in JSON format to a file
@ -333,7 +322,6 @@ class LibcloudInventory(object):
cache.write(json_data)
cache.close()
def to_safe(self, word):
'''
Converts 'bad' characters in a string to underscores so they can be
@ -342,7 +330,6 @@ class LibcloudInventory(object):
return re.sub("[^A-Za-z0-9\-]", "_", word)
def json_format_dict(self, data, pretty=False):
'''
Converts a dict to a JSON object and dumps it as a formatted
@ -354,6 +341,7 @@ class LibcloudInventory(object):
else:
return json.dumps(data)
def main():
LibcloudInventory()

View file

@ -278,10 +278,12 @@ Expected output format in Device mode
}
"""
def fail(msg):
sys.stderr.write("%s\n" % msg)
sys.exit(1)
class AosInventory(object):
def __init__(self):
@ -496,7 +498,6 @@ class AosInventory(object):
except:
pass
def parse_cli_args(self):
""" Command line argument processing """

View file

@ -229,11 +229,11 @@ class CobblerInventory(object):
# Need to load index from cache
self.load_cache_from_cache()
if not self.args.host in self.cache:
if self.args.host not in self.cache:
# try updating the cache
self.update_cache()
if not self.args.host in self.cache:
if self.args.host not in self.cache:
# host might not exist anymore
return self.json_format_dict({}, True)

View file

@ -85,6 +85,7 @@ from six import iteritems
from ansible.module_utils.urls import open_url
class CollinsDefaults(object):
ASSETS_API_ENDPOINT = '%s/api/assets'
SPECIAL_ATTRIBUTES = set([
@ -174,8 +175,7 @@ class CollinsInventory(object):
# Formats asset search query to locate assets matching attributes, using
# the CQL search feature as described here:
# http://tumblr.github.io/collins/recipes.html
attributes_query = [ '='.join(attr_pair)
for attr_pair in iteritems(attributes) ]
attributes_query = ['='.join(attr_pair) for attr_pair in iteritems(attributes)]
query_parameters = {
'details': ['True'],
'operation': [operation],
@ -190,8 +190,7 @@ class CollinsInventory(object):
# Locates all assets matching the provided query, exhausting pagination.
while True:
if num_retries == self.collins_max_retries:
raise MaxRetriesError("Maximum of %s retries reached; giving up" % \
self.collins_max_retries)
raise MaxRetriesError("Maximum of %s retries reached; giving up" % self.collins_max_retries)
query_parameters['page'] = cur_page
query_url = "%s?%s" % (
(CollinsDefaults.ASSETS_API_ENDPOINT % self.collins_host),
@ -212,8 +211,7 @@ class CollinsInventory(object):
cur_page += 1
num_retries = 0
except:
self.log.error("Error while communicating with Collins, retrying:\n%s",
traceback.format_exc())
self.log.error("Error while communicating with Collins, retrying:\n%s" % traceback.format_exc())
num_retries += 1
return assets
@ -232,19 +230,15 @@ class CollinsInventory(object):
def read_settings(self):
""" Reads the settings from the collins.ini file """
config_loc = os.getenv('COLLINS_CONFIG',
os.path.dirname(os.path.realpath(__file__)) + '/collins.ini')
config_loc = os.getenv('COLLINS_CONFIG', os.path.dirname(os.path.realpath(__file__)) + '/collins.ini')
config = ConfigParser.SafeConfigParser()
config.read(os.path.dirname(os.path.realpath(__file__)) + '/collins.ini')
self.collins_host = config.get('collins', 'host')
self.collins_username = os.getenv('COLLINS_USERNAME',
config.get('collins', 'username'))
self.collins_password = os.getenv('COLLINS_PASSWORD',
config.get('collins', 'password'))
self.collins_asset_type = os.getenv('COLLINS_ASSET_TYPE',
config.get('collins', 'asset_type'))
self.collins_username = os.getenv('COLLINS_USERNAME', config.get('collins', 'username'))
self.collins_password = os.getenv('COLLINS_PASSWORD', config.get('collins', 'password'))
self.collins_asset_type = os.getenv('COLLINS_ASSET_TYPE', config.get('collins', 'asset_type'))
self.collins_timeout_secs = config.getint('collins', 'timeout_secs')
self.collins_max_retries = config.getint('collins', 'max_retries')
@ -268,16 +262,12 @@ class CollinsInventory(object):
parser = argparse.ArgumentParser(
description='Produces an Ansible Inventory file based on Collins')
parser.add_argument('--list',
action='store_true', default=True, help='List instances (default: True)')
parser.add_argument('--host',
action='store', help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache',
action='store_true', default=False,
help='Force refresh of cache by making API requests to Collins ' \
parser.add_argument('--list', action='store_true', default=True, help='List instances (default: True)')
parser.add_argument('--host', action='store', help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to Collins '
'(default: False - use cache files)')
parser.add_argument('--pretty',
action='store_true', default=False, help='Pretty print all JSON output')
parser.add_argument('--pretty', action='store_true', default=False, help='Pretty print all JSON output')
self.args = parser.parse_args()
def update_cache(self):
@ -290,8 +280,7 @@ class CollinsInventory(object):
try:
server_assets = self.find_assets()
except:
self.log.error("Error while locating assets from Collins:\n%s",
traceback.format_exc())
self.log.error("Error while locating assets from Collins:\n%s" % traceback.format_exc())
return False
for asset in server_assets:
@ -315,8 +304,7 @@ class CollinsInventory(object):
if self.prefer_hostnames and self._asset_has_attribute(asset, 'HOSTNAME'):
asset_identifier = self._asset_get_attribute(asset, 'HOSTNAME')
elif 'ADDRESSES' not in asset:
self.log.warning("No IP addresses found for asset '%s', skipping",
asset)
self.log.warning("No IP addresses found for asset '%s', skipping" % asset)
continue
elif len(asset['ADDRESSES']) < ip_index + 1:
self.log.warning(
@ -384,11 +372,11 @@ class CollinsInventory(object):
# Need to load index from cache
self.load_cache_from_cache()
if not self.args.host in self.cache:
if self.args.host not in self.cache:
# try updating the cache
self.update_cache()
if not self.args.host in self.cache:
if self.args.host not in self.cache:
# host might not exist anymore
return self.json_format_dict({}, self.args.pretty)

View file

@ -239,7 +239,6 @@ class ConsulInventory(object):
self.current_dc = datacenter
self.load_data_for_datacenter(datacenter)
def load_availability_groups(self, node, datacenter):
'''check the health of each service on a node and add add the node to either
an 'available' or 'unavailable' grouping. The suffix for each group can be
@ -259,7 +258,6 @@ class ConsulInventory(object):
self.add_node_to_map(self.nodes_by_availability,
service_name + suffix, node['Node'])
def load_data_for_datacenter(self, datacenter):
'''processes all the nodes in a particular datacenter'''
index, nodes = self.consul_api.catalog.nodes(dc=datacenter)
@ -380,7 +378,6 @@ class ConsulInventory(object):
def add_node_to_map(self, map, name, node):
self.push(map, name, self.get_inventory_name(node))
def push(self, my_dict, key, element):
''' Pushed an element onto an array that may not have been defined in the
dict '''
@ -439,14 +436,13 @@ class ConsulConfig(dict):
def read_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description=
'Produce an Ansible Inventory file based nodes in a Consul cluster')
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based nodes in a Consul cluster')
parser.add_argument('--list', action='store_true',
help='Get all inventory variables from all nodes in the consul cluster')
parser.add_argument('--host', action='store',
help='Get all inventory variables about a specific consul node, \
requires datacenter set in consul.ini.')
help='Get all inventory variables about a specific consul node,'
'requires datacenter set in consul.ini.')
parser.add_argument('--datacenter', action='store',
help='Get all inventory about a specific consul datacenter')
@ -462,7 +458,6 @@ class ConsulConfig(dict):
return self.has_config(suffix)
return default
def get_consul_api(self):
'''get an instance of the api based on the supplied configuration'''
host = 'localhost'

View file

@ -152,7 +152,6 @@ except ImportError as e:
sys.exit("failed=True msg='`dopy` library required for this script'")
class DigitalOceanInventory(object):
###########################################################################
@ -240,7 +239,6 @@ or environment variables (DO_API_TOKEN)\n''')
print(json.dumps(json_data))
# That's all she wrote...
###########################################################################
# Script configuration
###########################################################################
@ -276,7 +274,6 @@ or environment variables (DO_API_TOKEN)\n''')
if os.getenv("DO_API_KEY"):
self.api_token = os.getenv("DO_API_KEY")
def read_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials')
@ -315,7 +312,6 @@ or environment variables (DO_API_TOKEN)\n''')
not self.args.all and not self.args.host):
self.args.list = True
###########################################################################
# Data Management
###########################################################################
@ -349,7 +345,6 @@ or environment variables (DO_API_TOKEN)\n''')
self.data['domains'] = self.manager.all_domains()
self.cache_refreshed = True
def build_inventory(self):
'''Build Ansible inventory of droplets'''
self.inventory = {
@ -406,7 +401,6 @@ or environment variables (DO_API_TOKEN)\n''')
info = self.do_namespace(droplet)
self.inventory['_meta']['hostvars'][dest] = info
def load_droplet_variables_for_host(self):
'''Generate a JSON response to a --host call'''
host = int(self.args.host)
@ -414,8 +408,6 @@ or environment variables (DO_API_TOKEN)\n''')
info = self.do_namespace(droplet)
return {'droplet': info}
###########################################################################
# Cache Management
###########################################################################
@ -429,7 +421,6 @@ or environment variables (DO_API_TOKEN)\n''')
return True
return False
def load_from_cache(self):
''' Reads the data from the cache file and assigns it to member variables as Python Objects'''
try:
@ -443,7 +434,6 @@ or environment variables (DO_API_TOKEN)\n''')
self.data = data['data']
self.inventory = data['inventory']
def write_to_cache(self):
''' Writes data in JSON format to a file '''
data = {'data': self.data, 'inventory': self.inventory}
@ -453,7 +443,6 @@ or environment variables (DO_API_TOKEN)\n''')
cache.write(json_data)
cache.close()
###########################################################################
# Utilities
###########################################################################
@ -465,7 +454,6 @@ or environment variables (DO_API_TOKEN)\n''')
else:
my_dict[key] = [element]
def to_safe(self, word):
''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups '''
return re.sub("[^A-Za-z0-9\-\.]", "_", word)
@ -478,7 +466,6 @@ or environment variables (DO_API_TOKEN)\n''')
return info
###########################################################################
# Run the script
DigitalOceanInventory()

View file

@ -205,7 +205,6 @@ class Ec2Inventory(object):
print(data_to_print)
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
@ -218,7 +217,6 @@ class Ec2Inventory(object):
return False
def read_settings(self):
''' Reads the settings from the ec2.ini file '''
@ -226,7 +224,8 @@ class Ec2Inventory(object):
scriptbasename = os.path.basename(scriptbasename)
scriptbasename = scriptbasename.replace('.py', '')
defaults = {'ec2': {
defaults = {
'ec2': {
'ini_path': os.path.join(os.path.dirname(__file__), '%s.ini' % scriptbasename)
}
}
@ -511,7 +510,6 @@ class Ec2Inventory(object):
help='Use boto profile for connections to EC2')
self.args = parser.parse_args()
def do_api_calls_update_cache(self):
''' Do API calls to each region, and save data in cache files '''
@ -990,7 +988,6 @@ class Ec2Inventory(object):
self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance)
self.inventory["_meta"]["hostvars"][hostname]['ansible_ssh_host'] = dest
def add_rds_instance(self, instance, region):
''' Adds an RDS instance to the inventory and index, as long as it is
addressable '''
@ -1070,7 +1067,6 @@ class Ec2Inventory(object):
self.fail_with_error('\n'.join(['Package boto seems a bit older.',
'Please upgrade boto >= 2.3.0.']))
# Inventory: Group by engine
if self.group_by_rds_engine:
self.push(self.inventory, self.to_safe("rds_" + instance.engine), hostname)
@ -1338,8 +1334,7 @@ class Ec2Inventory(object):
r53_conn = route53.Route53Connection()
all_zones = r53_conn.get_zones()
route53_zones = [ zone for zone in all_zones if zone.name[:-1]
not in self.route53_excluded_zones ]
route53_zones = [zone for zone in all_zones if zone.name[:-1] not in self.route53_excluded_zones]
self.route53_records = {}
@ -1356,7 +1351,6 @@ class Ec2Inventory(object):
self.route53_records.setdefault(resource, set())
self.route53_records[resource].add(record_name)
def get_instance_route53_names(self, instance):
''' Check if an instance is referenced in the records we have from
Route53. If it is, return the list of domain names pointing to said
@ -1523,10 +1517,10 @@ class Ec2Inventory(object):
# Need to load index from cache
self.load_index_from_cache()
if not self.args.host in self.index:
if self.args.host not in self.index:
# try updating the cache
self.do_api_calls_update_cache()
if not self.args.host in self.index:
if self.args.host not in self.index:
# host might not exist anymore
return self.json_format_dict({}, True)

View file

@ -48,6 +48,7 @@ parser.add_option('--host', default=None, dest="host",
# helper functions
#
def get_ssh_config():
configs = []
for box in list_running_boxes():
@ -55,6 +56,7 @@ def get_ssh_config():
configs.append(config)
return configs
# list all the running instances in the fleet
def list_running_boxes():
boxes = []
@ -65,6 +67,7 @@ def list_running_boxes():
return boxes
def get_a_ssh_config(box_name):
config = {}
config['Host'] = box_name
@ -72,6 +75,7 @@ def get_a_ssh_config(box_name):
config['ansible_python_interpreter'] = '/opt/bin/python'
return config
# List out servers that vagrant has running
# ------------------------------
if options.list:

View file

@ -46,6 +46,7 @@ if LooseVersion(requests.__version__) < LooseVersion('1.1.0'):
from requests.auth import HTTPBasicAuth
def json_format_dict(data, pretty=False):
"""Converts a dict to a JSON object and dumps it as a formatted string"""
@ -54,6 +55,7 @@ def json_format_dict(data, pretty=False):
else:
return json.dumps(data)
class ForemanInventory(object):
def __init__(self):

View file

@ -340,7 +340,6 @@ class GceInventory(object):
help='Force refresh of cache by making API requests (default: False - use cache files)')
self.args = parser.parse_args()
def node_to_dict(self, inst):
md = {}

View file

@ -113,6 +113,7 @@ load_chube_config()
# Imports for ansible
import ConfigParser
class LinodeInventory(object):
def __init__(self):
"""Main execution path."""
@ -245,10 +246,10 @@ class LinodeInventory(object):
# Need to load index from cache
self.load_index_from_cache()
if not self.args.host in self.index:
if self.args.host not in self.index:
# try updating the cache
self.do_api_calls_update_cache()
if not self.args.host in self.index:
if self.args.host not in self.index:
# host might not exist anymore
return self.json_format_dict({}, True)

View file

@ -35,6 +35,7 @@ import sys
import lxc
import json
def build_dict():
"""Returns a dictionary keyed to the defined LXC groups. All
containers, including the ones not in any group, are included in the
@ -53,6 +54,7 @@ def build_dict():
return dict([(g, {'hosts': [k for k, v in containers.items() if g in v],
'vars': {'ansible_connection': 'lxc'}}) for g in groups])
def main(argv):
"""Returns a JSON dictionary as expected by Ansible"""
result = build_dict()

View file

@ -32,6 +32,7 @@ try:
except ImportError:
import ConfigParser as configparser
class MDTInventory(object):
def __init__(self):
@ -119,7 +120,6 @@ class MDTInventory(object):
if config.has_option('tower', 'groupname'):
self.mdt_groupname = config.get('tower', 'groupname')
def parse_cli_args(self):
'''
Command line argument processing

View file

@ -47,6 +47,7 @@ except ImportError:
print("Error: mk_livestatus is needed. Try something like: pip install python-mk-livestatus")
exit(1)
class NagiosLivestatusInventory(object):
def parse_ini_file(self):

View file

@ -42,6 +42,7 @@ except ImportError:
print("Error: SQLAlchemy is needed. Try something like: pip install sqlalchemy")
exit(1)
class NagiosNDOInventory(object):
def read_settings(self):

View file

@ -150,6 +150,7 @@ from click.exceptions import UsageError
from six import string_types
def warning(*objs):
print("WARNING: ", *objs, file=sys.stderr)

View file

@ -38,6 +38,7 @@ inventory = {'vzhosts': {'hosts': vzhosts}, '_meta': {'hostvars': {}}}
# default group, when description not defined
default_group = ['vzguest']
def get_guests():
# Loop through vzhosts
for h in vzhosts:

View file

@ -63,6 +63,7 @@ except ImportError:
ini_section = 'packet'
class PacketInventory(object):
def _empty_inventory(self):
@ -101,7 +102,6 @@ class PacketInventory(object):
print(data_to_print)
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
@ -231,7 +231,6 @@ class PacketInventory(object):
help='Force refresh of cache by making API requests to Packet (default: False - use cache files)')
self.args = parser.parse_args()
def do_api_calls_update_cache(self):
''' Do API calls to each region, and save data in cache files '''
@ -307,7 +306,6 @@ class PacketInventory(object):
if ip_address['public'] is True and ip_address['address_family'] == 4:
dest = ip_address['address']
if not dest:
# Skip devices we cannot address (e.g. private VPC subnet)
return
@ -373,7 +371,6 @@ class PacketInventory(object):
self.inventory["_meta"]["hostvars"][dest] = self.get_host_info_dict_from_device(device)
def get_host_info_dict_from_device(self, device):
device_vars = {}
for key in vars(device):
@ -416,10 +413,10 @@ class PacketInventory(object):
# Need to load index from cache
self.load_index_from_cache()
if not self.args.host in self.index:
if self.args.host not in self.index:
# try updating the cache
self.do_api_calls_update_cache()
if not self.args.host in self.index:
if self.args.host not in self.index:
# host might not exist anymore
return self.json_format_dict({}, True)
@ -455,7 +452,6 @@ class PacketInventory(object):
json_inventory = cache.read()
return json_inventory
def load_index_from_cache(self):
''' Reads the index from the cache file sets self.index '''
@ -463,7 +459,6 @@ class PacketInventory(object):
json_index = cache.read()
self.index = json.loads(json_index)
def write_to_cache(self, data, filename):
''' Writes data in JSON format to a file '''

View file

@ -38,10 +38,12 @@ from six import iteritems
from ansible.module_utils.urls import open_url
class ProxmoxNodeList(list):
def get_names(self):
return [node['node'] for node in self]
class ProxmoxVM(dict):
def get_variables(self):
variables = {}
@ -49,6 +51,7 @@ class ProxmoxVM(dict):
variables['proxmox_' + key] = value
return variables
class ProxmoxVMList(list):
def __init__(self, data=[]):
for item in data:
@ -68,14 +71,17 @@ class ProxmoxVMList(list):
return variables
class ProxmoxPoolList(list):
def get_names(self):
return [pool['poolid'] for pool in self]
class ProxmoxPool(dict):
def get_members_name(self):
return [member['name'] for member in self['members'] if member['template'] != 1]
class ProxmoxAPI(object):
def __init__(self, options):
self.options = options
@ -139,6 +145,7 @@ class ProxmoxAPI(object):
def pool(self, poolid):
return ProxmoxPool(self.get('api2/json/pools/{}'.format(poolid)))
def main_list(options):
results = {
'all': {
@ -199,6 +206,7 @@ def main_list(options):
return results
def main_host(options):
proxmox_api = ProxmoxAPI(options)
proxmox_api.auth()
@ -211,6 +219,7 @@ def main_host(options):
return {}
def main():
parser = OptionParser(usage='%prog [options] --list | --host HOSTNAME')
parser.add_option('--list', action="store_true", default=False, dest="list")
@ -235,5 +244,6 @@ def main():
print(json.dumps(data, indent=indent))
if __name__ == '__main__':
main()

View file

@ -41,6 +41,7 @@ try:
except:
import simplejson as json
class SoftLayerInventory(object):
common_items = [
'id',

View file

@ -78,6 +78,7 @@ if not os.path.exists(CACHE_DIR):
# Helper functions
# ------------------------------
def spacewalk_report(name):
"""Yield a dictionary form of each CSV output produced by the specified
spacewalk-report

View file

@ -110,6 +110,7 @@ def stack_net_list(endpoint, header, client):
headers=header)
return json.loads(stack_r.json())
def format_meta(hostdata, intfdata, config):
use_hostnames = config['use_hostnames']
meta = dict(all=dict(hosts=list()),
@ -159,7 +160,6 @@ def parse_args():
def main():
args = parse_args()
if StrictVersion(requests.__version__) < StrictVersion("2.4.3"):
sys.exit('requests>=2.4.3 is required for this inventory script')

View file

@ -23,6 +23,7 @@ try:
except ImportError:
import simplejson as json
class SetEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):

View file

@ -47,9 +47,11 @@ try:
from logging import NullHandler
except ImportError:
from logging import Handler
class NullHandler(Handler):
def emit(self, record):
pass
logging.getLogger('psphere').addHandler(NullHandler())
logging.getLogger('suds').addHandler(NullHandler())

View file

@ -54,6 +54,7 @@ except ImportError as e:
# Imports for ansible
import ConfigParser
class AzureInventory(object):
def __init__(self):
"""Main execution path."""
@ -173,8 +174,7 @@ class AzureInventory(object):
parser.add_argument('--refresh-cache',
action='store_true', default=False,
help='Force refresh of thecache by making API requests to Azure '
'(default: False - use cache files)',
)
'(default: False - use cache files)')
parser.add_argument('--host', action='store',
help='Get all information about an instance.')
self.args = parser.parse_args()

View file

@ -49,6 +49,7 @@ try:
except:
import simplejson as json
class ZabbixInventory(object):
def read_settings(self):
@ -96,7 +97,7 @@ class ZabbixInventory(object):
for group in host['groups']:
groupname = group['name']
if not groupname in data:
if groupname not in data:
data[groupname] = self.hoststub()
data[groupname]['hosts'].append(hostname)

View file

@ -1,42 +1,6 @@
contrib/inventory/abiquo.py
contrib/inventory/apache-libcloud.py
contrib/inventory/apstra_aos.py
contrib/inventory/azure_rm.py
contrib/inventory/cobbler.py
contrib/inventory/collins.py
contrib/inventory/consul_io.py
contrib/inventory/digital_ocean.py
contrib/inventory/docker.py
contrib/inventory/ec2.py
contrib/inventory/fleet.py
contrib/inventory/foreman.py
contrib/inventory/freeipa.py
contrib/inventory/gce.py
contrib/inventory/jail.py
contrib/inventory/libvirt_lxc.py
contrib/inventory/linode.py
contrib/inventory/lxc_inventory.py
contrib/inventory/mdt_dynamic_inventory.py
contrib/inventory/nagios_livestatus.py
contrib/inventory/nagios_ndo.py
contrib/inventory/nova.py
contrib/inventory/nsot.py
contrib/inventory/openvz.py
contrib/inventory/ovirt.py
contrib/inventory/packet_net.py
contrib/inventory/proxmox.py
contrib/inventory/rackhd.py
contrib/inventory/rax.py
contrib/inventory/softlayer.py
contrib/inventory/spacewalk.py
contrib/inventory/ssh_config.py
contrib/inventory/stacki.py
contrib/inventory/vbox.py
contrib/inventory/vmware.py
contrib/inventory/vmware_inventory.py
contrib/inventory/windows_azure.py
contrib/inventory/zabbix.py
contrib/inventory/zone.py
docs/api/conf.py
docs/bin/dump_keywords.py
docs/bin/plugin_formatter.py