From 79b47bd0b7c069f4034fbf321cf16345e71de16a Mon Sep 17 00:00:00 2001 From: Jarrod Johnson Date: Wed, 4 Oct 2017 16:27:40 -0400 Subject: [PATCH 1/8] Create nodediscover command Provide a command to interact with discovery data in a more convenient way. This commit comprises at least most of the ability to list and filter. --- confluent_client/bin/nodediscover | 115 ++++++++++++++++++++++++++++++ 1 file changed, 115 insertions(+) create mode 100755 confluent_client/bin/nodediscover diff --git a/confluent_client/bin/nodediscover b/confluent_client/bin/nodediscover new file mode 100755 index 00000000..fcc013e8 --- /dev/null +++ b/confluent_client/bin/nodediscover @@ -0,0 +1,115 @@ +#!/usr/bin/env python +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2017 Lenovo +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import csv +import optparse +import os +import sys + +path = os.path.dirname(os.path.realpath(__file__)) +path = os.path.realpath(os.path.join(path, '..', 'lib', 'python')) +if path.startswith('/opt'): + sys.path.append(path) + +import confluent.client as client + +tabformat = '{0:>15}|{1:>15}|{2:>15}|{3:>36}|{4:>17}|{5:>12}|{6:>48}' +columns = ['Node', 'Model', 'Serial', 'UUID', 'Mac Address', 'Type', + 'Current IP Addresses'] +delimit = ['-' * 15, '-' * 15, '-' * 15, '-' * 36, '-' * 17, '-' * 12, + '-' * 48] + + +def dumpmacs(procinfo): + return ','.join(procinfo['macs']) # + procinfo.get('relatedmacs', [])) + + +def print_disco(options, session, currmac): + procinfo = {} + for tmpinfo in session.read('/discovery/by-mac/{0}'.format(currmac)): + + procinfo.update(tmpinfo) + record = [procinfo['nodename'], procinfo['modelnumber'], + procinfo['serialnumber'], procinfo['uuid'], dumpmacs(procinfo), + ','.join(procinfo['types']), + ','.join(sorted(procinfo['ipaddrs']))] + if options.csv: + csv.writer(sys.stdout).writerow(record) + else: + print(tabformat.format(*record)) + + +def list_discovery(options, session): + if options.csv: + csv.writer(sys.stdout).writerow(columns) + else: + print(tabformat.format(*columns)) + print(tabformat.format(*delimit)) + path = '/discovery/' + if options.model: + path += 'by-model/{0}/'.format(options.model) + if options.serial: + path += 'by-serial/{0}/'.format(options.serial) + if options.uuid: + path += 'by-uuid/{0}/'.format(options.uuid) + if options.type: + path += 'by-type/{0}/'.format(options.type) + if options.mac: + #path += 'by-mac/{0}'.format(options.mac) + print_disco(options, session, options.mac) + else: + path += 'by-mac/' + macs = [x['item']['href'] for x in session.read(path)] + for currmac in macs: + print_disco(options, session, currmac) + +def main(): + parser = optparse.OptionParser( + usage='Usage: %prog [list|assign] [options])') + parser.add_option('-m', '--model', dest='model', + help='Operate with nodes matching the specified model ' + 'number', metavar='MODEL') + parser.add_option('-s', '--serial', dest='serial', + help='Operate against the system matching the specified ' + 'serial number', metavar='SERIAL') + parser.add_option('-u', '--uuid', dest='uuid', + help='Operate against the system matching the specified ' + 'UUID', metavar='UUID') + parser.add_option('-n', '--netaddr', dest='mac', + help='Operate against the system with the specified MAC ' + 'address', metavar='MAC') + parser.add_option('-t', '--type', dest='type', + help='Operate against the system of the specified type', + metavar='TYPE') + parser.add_option('-c', '--csv', dest='csv', + help='Use CSV formatted output', action='store_true') + parser.add_option('-i', '--import', + help='Import bulk assignment data from given CSV file', + metavar='IMPORT.CSV') + (options, args) = parser.parse_args() + if len(args) == 0 or args[0] not in ('list', 'assign'): + parser.print_help() + sys.exit(1) + session = client.Command() + if args[0] == 'list': + list_discovery(options, session) + if args[0] == 'assign': + assign_discovery(options) + + +if __name__ == '__main__': + main() \ No newline at end of file From 91ff08158ffab53af462d15ad06b52eb7dacaf11 Mon Sep 17 00:00:00 2001 From: Jarrod Johnson Date: Thu, 5 Oct 2017 10:05:56 -0400 Subject: [PATCH 2/8] Add rescan subcommand to nodediscover Allow nodediscover to initiate an active scan. --- confluent_client/bin/nodediscover | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/confluent_client/bin/nodediscover b/confluent_client/bin/nodediscover index fcc013e8..6bca063c 100755 --- a/confluent_client/bin/nodediscover +++ b/confluent_client/bin/nodediscover @@ -79,7 +79,7 @@ def list_discovery(options, session): def main(): parser = optparse.OptionParser( - usage='Usage: %prog [list|assign] [options])') + usage='Usage: %prog [list|assign|rescan] [options]') parser.add_option('-m', '--model', dest='model', help='Operate with nodes matching the specified model ' 'number', metavar='MODEL') @@ -101,14 +101,17 @@ def main(): help='Import bulk assignment data from given CSV file', metavar='IMPORT.CSV') (options, args) = parser.parse_args() - if len(args) == 0 or args[0] not in ('list', 'assign'): + if len(args) == 0 or args[0] not in ('list', 'assign', 'rescan'): parser.print_help() sys.exit(1) session = client.Command() if args[0] == 'list': list_discovery(options, session) if args[0] == 'assign': - assign_discovery(options) + assign_discovery(options, session) + if args[0] == 'rescan': + session.update('/discovery/rescan', {'rescan': 'start'}) + print("Rescan initiated") if __name__ == '__main__': From fdc4e959f741ac4761ccaeec830b4ef02bcc84cc Mon Sep 17 00:00:00 2001 From: Jarrod Johnson Date: Thu, 5 Oct 2017 10:16:49 -0400 Subject: [PATCH 3/8] Populate nodename on pxe entries when possible If a PXE is matched by UUID, populate the nodename just like the serial number and such. --- confluent_server/confluent/discovery/core.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/confluent_server/confluent/discovery/core.py b/confluent_server/confluent/discovery/core.py index fd69b28a..996818ff 100644 --- a/confluent_server/confluent/discovery/core.py +++ b/confluent_server/confluent/discovery/core.py @@ -150,6 +150,7 @@ pending_nodes = {} def enrich_pxe_info(info): sn = None mn = None + nodename = info.get('nodename', None) uuid = info.get('uuid', '') if not uuid_is_valid(uuid): return info @@ -158,6 +159,9 @@ def enrich_pxe_info(info): info['serialnumber'] = known_uuids[uuid][mac]['serialnumber'] if not mn and 'modelnumber' in known_uuids[uuid][mac]: info['modelnumber'] = known_uuids[uuid][mac]['modelnumber'] + if nodename is None and 'nodename' in known_uuids[uuid][mac]: + info['nodename'] = known_uuids[uuid][mac]['nodename'] + def uuid_is_valid(uuid): @@ -170,10 +174,10 @@ def uuid_is_valid(uuid): def send_discovery_datum(info): addresses = info.get('addresses', []) - yield msg.KeyValueData({'nodename': info.get('nodename', '')}) - yield msg.KeyValueData({'ipaddrs': [x[0] for x in addresses]}) if info['handler'] == pxeh: enrich_pxe_info(info) + yield msg.KeyValueData({'nodename': info.get('nodename', '')}) + yield msg.KeyValueData({'ipaddrs': [x[0] for x in addresses]}) sn = info.get('serialnumber', '') mn = info.get('modelnumber', '') uuid = info.get('uuid', '') From a8a32118db0f2efe61386c36a7388a42a652b762 Mon Sep 17 00:00:00 2001 From: Jarrod Johnson Date: Thu, 5 Oct 2017 13:52:20 -0400 Subject: [PATCH 4/8] Add the ability to assign Start with the ability to assign one at a time. --- confluent_client/bin/nodediscover | 42 ++++++++++++++++++++++++++----- 1 file changed, 36 insertions(+), 6 deletions(-) diff --git a/confluent_client/bin/nodediscover b/confluent_client/bin/nodediscover index 6bca063c..311e959a 100755 --- a/confluent_client/bin/nodediscover +++ b/confluent_client/bin/nodediscover @@ -59,6 +59,11 @@ def list_discovery(options, session): else: print(tabformat.format(*columns)) print(tabformat.format(*delimit)) + for mac in list_matching_macs(options, session): + print_disco(options, session, mac) + + +def list_matching_macs(options, session): path = '/discovery/' if options.model: path += 'by-model/{0}/'.format(options.model) @@ -69,17 +74,41 @@ def list_discovery(options, session): if options.type: path += 'by-type/{0}/'.format(options.type) if options.mac: - #path += 'by-mac/{0}'.format(options.mac) - print_disco(options, session, options.mac) + # path += 'by-mac/{0}'.format(options.mac) + return [options.mac.replace(':', '-')] else: path += 'by-mac/' - macs = [x['item']['href'] for x in session.read(path)] - for currmac in macs: - print_disco(options, session, currmac) + return [x['item']['href'] for x in session.read(path)] + +def assign_discovery(options, session): + abort = False + if not (options.serial or options.uuid or options.mac): + sys.stderr.write( + "UUID (-u), serial (-s), or ether address (-e) required for " + "assignment\n") + abort = True + if not options.node: + sys.stderr.write("Node (-n) must be specified for assignment\n") + abort = True + if abort: + sys.exit(1) + matches = list_matching_macs(options, session) + if not matches: + sys.stderr.write("No matching discovery candidates found\n") + sys.exit(1) + for res in session.update('/discovery/by-mac/{0}'.format(matches[0]), + {'node': options.node}): + if 'assigned' in res: + print('Assigned: {0}'.format(res['assigned'])) + else: + print(repr(res)) + + def main(): parser = optparse.OptionParser( usage='Usage: %prog [list|assign|rescan] [options]') + # -a for 'address' maybe? parser.add_option('-m', '--model', dest='model', help='Operate with nodes matching the specified model ' 'number', metavar='MODEL') @@ -89,7 +118,8 @@ def main(): parser.add_option('-u', '--uuid', dest='uuid', help='Operate against the system matching the specified ' 'UUID', metavar='UUID') - parser.add_option('-n', '--netaddr', dest='mac', + parser.add_option('-n', '--node', help='Operate with the given nodename') + parser.add_option('-e', '--ethaddr', dest='mac', help='Operate against the system with the specified MAC ' 'address', metavar='MAC') parser.add_option('-t', '--type', dest='type', From c891cff9260df05a1b4ca81de4a461c87993db83 Mon Sep 17 00:00:00 2001 From: Jarrod Johnson Date: Thu, 5 Oct 2017 16:55:11 -0400 Subject: [PATCH 5/8] Phase 1 of bulk assignment Parse CSV and do some validation, next phase will actually create nodes and assign the discovery. --- confluent_client/bin/nodediscover | 93 ++++++++++++++++++++++++++++++- 1 file changed, 91 insertions(+), 2 deletions(-) diff --git a/confluent_client/bin/nodediscover b/confluent_client/bin/nodediscover index 311e959a..1fd42122 100755 --- a/confluent_client/bin/nodediscover +++ b/confluent_client/bin/nodediscover @@ -53,6 +53,90 @@ def print_disco(options, session, currmac): print(tabformat.format(*record)) +def process_header(header): + # normalize likely header titles + fields = [] + broken = False + for datum in header: + datum = datum.lower() + if datum.startswith('node') or datum.startswith('name'): + fields.append('node') + elif datum in ('nodegroup', 'nodegroups', 'group', 'groups'): + fields.append('groups') + elif datum.startswith('mac') or datum.startswith('ether'): + fields.append('mac') + elif datum.startswith('serial') or datum in ('sn', 's/n'): + fields.append('serial') + elif datum == 'uuid': + fields.append('uuid') + elif datum in ('bmc', 'imm', 'xcc'): + fields.append('hardwaremanagement.manager') + elif datum in ('bmc gateway', 'xcc gateway', 'imm gateway'): + fields.append('net.bmc.gateway') + elif datum in ('bmcuser', 'username', 'user'): + fields.append('secret.hardwaremanagementuser') + elif datum in ('bmcpass', 'password', 'pass'): + fields.append('secret.hardwaremanagementpassword') + else: + print("Unrecognized column name {0}".format(datum)) + broken = True + if broken: + sys.exit(1) + return tuple(fields) + + +def datum_complete(datum): + if 'node' not in datum or not datum['node']: + sys.stderr.write('Nodename is a required field') + return False + provided = set(datum) + required = set(['serial', 'uuid', 'mac']) + for field in provided & required: + if datum[field]: + break + else: + sys.stderr.write('One of the fields "Serial Number", "UUID", or ' + '"MAC Address" must be provided') + return False + return True + + +searchkeys = set(['mac', 'serial', 'uuid']) + + +def search_record(datum, options, session): + for searchkey in searchkeys: + options.__dict__[searchkey] = None + for searchkey in searchkeys & set(datum): + options.__dict__[searchkey] = datum[searchkey] + return list(list_matching_macs(options, session)) + + + +def import_csv(options, session): + with open(options.importfile, 'r') as datasrc: + records = csv.reader(datasrc) + fields = process_header(next(records)) + nodedata = [] + for record in records: + currfields = list(fields) + nodedatum = {} + for datum in record: + nodedatum[currfields.pop(0)] = datum + if not datum_complete(nodedatum): + sys.exit(1) + if not search_record(nodedatum, options, session): + sys.stderr.write( + "Could not match the following data: " + + repr(nodedatum) + '\n') + sys.exit(1) + nodedata.append(nodedata) + # ok, we have vetted the csv and we can proceed, next we will do a create + # to make node definitions to hold if there isn't one already, fixing up + # fields like groups and bmc + # then will iterate through matches on each doing an assign once per + + def list_discovery(options, session): if options.csv: csv.writer(sys.stdout).writerow(columns) @@ -74,7 +158,10 @@ def list_matching_macs(options, session): if options.type: path += 'by-type/{0}/'.format(options.type) if options.mac: - # path += 'by-mac/{0}'.format(options.mac) + path += 'by-mac/{0}'.format(options.mac) + result = list(session.read(path))[0] + if 'error' in result: + return [] return [options.mac.replace(':', '-')] else: path += 'by-mac/' @@ -82,6 +169,8 @@ def list_matching_macs(options, session): def assign_discovery(options, session): abort = False + if options.importfile: + return import_csv(options, session) if not (options.serial or options.uuid or options.mac): sys.stderr.write( "UUID (-u), serial (-s), or ether address (-e) required for " @@ -127,7 +216,7 @@ def main(): metavar='TYPE') parser.add_option('-c', '--csv', dest='csv', help='Use CSV formatted output', action='store_true') - parser.add_option('-i', '--import', + parser.add_option('-i', '--import', dest='importfile', help='Import bulk assignment data from given CSV file', metavar='IMPORT.CSV') (options, args) = parser.parse_args() From e7c6dfab2ba05698d7edd9c256e943e6e685c081 Mon Sep 17 00:00:00 2001 From: Jarrod Johnson Date: Fri, 6 Oct 2017 14:02:49 -0400 Subject: [PATCH 6/8] Fix list by invalid type Rather than list everything, list nothing when asked to list an invalid type. --- confluent_server/confluent/discovery/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/confluent_server/confluent/discovery/core.py b/confluent_server/confluent/discovery/core.py index 996818ff..ecb3bd79 100644 --- a/confluent_server/confluent/discovery/core.py +++ b/confluent_server/confluent/discovery/core.py @@ -329,7 +329,7 @@ def _parameterize_path(pathcomponents): if key not in validselectors: raise exc.NotFoundException('{0} is not valid here'.format(key)) if key == 'by-type': - keyparams[key] = servicebyname.get(val, None) + keyparams[key] = servicebyname.get(val, '!!!!invalid-type') else: keyparams[key] = val validselectors.discard(key) From 158a9705dba98efe68d96f57535e1be035cf6870 Mon Sep 17 00:00:00 2001 From: Jarrod Johnson Date: Fri, 6 Oct 2017 14:34:34 -0400 Subject: [PATCH 7/8] Support full assign from csv input --- confluent_client/bin/nodediscover | 45 ++++++++++++++++++++++++++----- 1 file changed, 39 insertions(+), 6 deletions(-) diff --git a/confluent_client/bin/nodediscover b/confluent_client/bin/nodediscover index 1fd42122..318699bc 100755 --- a/confluent_client/bin/nodediscover +++ b/confluent_client/bin/nodediscover @@ -112,12 +112,21 @@ def search_record(datum, options, session): return list(list_matching_macs(options, session)) +def datum_to_attrib(datum): + for key in ('serial', 'uuid', 'mac'): + try: + del datum[key] + except KeyError: + pass + datum['name'] = datum['node'] + del datum['node'] + return datum def import_csv(options, session): + nodedata = [] with open(options.importfile, 'r') as datasrc: records = csv.reader(datasrc) fields = process_header(next(records)) - nodedata = [] for record in records: currfields = list(fields) nodedatum = {} @@ -130,11 +139,29 @@ def import_csv(options, session): "Could not match the following data: " + repr(nodedatum) + '\n') sys.exit(1) - nodedata.append(nodedata) - # ok, we have vetted the csv and we can proceed, next we will do a create - # to make node definitions to hold if there isn't one already, fixing up - # fields like groups and bmc - # then will iterate through matches on each doing an assign once per + nodedata.append(nodedatum) + for datum in nodedata: + maclist = search_record(datum, options, session) + datum = datum_to_attrib(datum) + nodename = datum['name'] + for res in session.create('/nodes/', datum): + if 'error' in res: + sys.stderr.write(res['error'] + '\n') + continue + elif 'created' in res: + print('Defined ' + res['created']) + else: + print(repr(res)) + for mac in maclist: + for res in session.update('/discovery/by-mac/{0}'.format(mac), + {'node': nodename}): + if 'error' in res: + sys.stderr.write(res['error'] + '\n') + continue + elif 'assigned' in res: + print('Discovered ' + res['assigned']) + else: + print(repr(res)) def list_discovery(options, session): @@ -198,6 +225,12 @@ def main(): parser = optparse.OptionParser( usage='Usage: %prog [list|assign|rescan] [options]') # -a for 'address' maybe? + # order by + # show state (discovered or.. + # nodediscover approve? + # flush to clear old data out? (e.g. no good way to age pxe data) + # also delete discovery datum... more targeted + # defect: -t lenovo-imm returns all parser.add_option('-m', '--model', dest='model', help='Operate with nodes matching the specified model ' 'number', metavar='MODEL') From 672c12762119f0a63f8b826faa5343252b324840 Mon Sep 17 00:00:00 2001 From: Jarrod Johnson Date: Fri, 6 Oct 2017 16:10:47 -0400 Subject: [PATCH 8/8] Fix error propogation on manual discovery Manual discovery needs to give the user, not the log info about the failure. --- confluent_server/confluent/discovery/core.py | 2 ++ confluent_server/confluent/discovery/handlers/bmc.py | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/confluent_server/confluent/discovery/core.py b/confluent_server/confluent/discovery/core.py index ecb3bd79..11f2345c 100644 --- a/confluent_server/confluent/discovery/core.py +++ b/confluent_server/confluent/discovery/core.py @@ -757,6 +757,8 @@ def discover_node(cfg, handler, info, nodename, manual): handler.config(nodename) except Exception as e: info['discofailure'] = 'bug' + if manual: + raise log.log( {'error': 'Error encountered trying to set up {0}, {1}'.format( diff --git a/confluent_server/confluent/discovery/handlers/bmc.py b/confluent_server/confluent/discovery/handlers/bmc.py index 7d6c67b5..4f26a88a 100644 --- a/confluent_server/confluent/discovery/handlers/bmc.py +++ b/confluent_server/confluent/discovery/handlers/bmc.py @@ -91,7 +91,8 @@ class NodeHandler(generic.NodeHandler): if ('secret.hardwaremanagementuser' not in cd or 'secret.hardwaremanagementpassword' not in cd): raise exc.TargetEndpointBadCredentials( - 'Missing user and/or password') + 'secret.hardwaremanagementuser and/or ' + 'secret.hardwaremanagementpassword was not configured') if ('hardwaremanagement.manager' in cd and cd['hardwaremanagement.manager']['value'] and not cd['hardwaremanagement.manager']['value'].startswith(