2
0
mirror of https://github.com/xcat2/confluent.git synced 2025-04-09 01:16:17 +00:00

Merge branch 'master' into nodesearch

This commit is contained in:
Jarrod Johnson 2019-10-16 13:24:11 -04:00
commit 0fd4c3b2f7
83 changed files with 890 additions and 367 deletions

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2017 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 IBM Corporation
@ -89,6 +89,10 @@ except NameError:
netserver = None
laststate = {}
try:
input = raw_input
except NameError:
pass
class BailOut(Exception):
def __init__(self, errorcode=0):
@ -195,9 +199,9 @@ def prompt():
if os.environ.get('TERM', '') not in ('linux'):
sys.stdout.write('\x1b]0;confetty: %s\x07' % target)
try:
return raw_input(target + ' -> ')
return input(target + ' -> ')
except KeyboardInterrupt:
print ""
print("")
return ""
except EOFError: # ctrl-d
print("exit")
@ -296,7 +300,7 @@ currchildren = None
def print_result(res):
if 'errorcode' in res or 'error' in res:
print res['error']
print(res['error'])
return
if 'databynode' in res:
print_result(res['databynode'])
@ -309,9 +313,9 @@ def print_result(res):
attrstr = '%s=%s' % (key, recurse_format(res[key]))
elif not isinstance(res[key], dict):
try:
print '{0}: {1}'.format(key, res[key])
print('{0}: {1}'.format(key, res[key]))
except UnicodeEncodeError:
print '{0}: {1}'.format(key, repr(res[key]))
print('{0}: {1}'.format(key, repr(res[key])))
continue
elif 'value' in res[key] and res[key]['value'] is not None:
attrstr = '%s="%s"' % (key, res[key]['value'])
@ -324,7 +328,7 @@ def print_result(res):
else:
sys.stdout.write('{0}: '.format(key))
if isinstance(res[key], str) or isinstance(res[key], unicode):
print res[key]
print(res[key])
else:
print_result(res[key])
continue
@ -423,10 +427,10 @@ def do_command(command, server):
for res in session.read(targpath):
if 'item' in res: # a link relation
if type(res['item']) == dict:
print res['item']["href"]
print(res['item']["href"])
else:
for item in res['item']:
print item["href"]
print(item["href"])
else: # generic attributes to list
if 'error' in res:
sys.stderr.write(res['error'] + '\n')
@ -851,7 +855,7 @@ def server_connect():
passphrase = os.environ['CONFLUENT_PASSPHRASE']
session.authenticate(username, passphrase)
while not session.authenticated:
username = raw_input("Name: ")
username = input("Name: ")
passphrase = getpass.getpass("Passphrase: ")
session.authenticate(username, passphrase)
@ -871,7 +875,7 @@ def main():
global inconsole
try:
server_connect()
except EOFError, KeyboardInterrupt:
except (EOFError, KeyboardInterrupt) as _:
raise BailOut(0)
except socket.gaierror:
sys.stderr.write('Could not connect to confluent\n')
@ -929,6 +933,7 @@ def main():
updatestatus(data)
continue
if data is not None:
data = client.stringify(data)
if clearpowermessage:
sys.stdout.write("\x1b[2J\x1b[;H")
clearpowermessage = False

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
import optparse
import signal
import sys

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
import csv
import optparse
import signal

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
import csv
import optparse
import signal

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2017 Lenovo
@ -128,4 +128,4 @@ else:
else:
print(res['item']['href'].replace('/', ''))
sys.exit(exitcode)
sys.exit(exitcode)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015-2017 Lenovo
@ -56,7 +56,7 @@ for node in session.read('/noderange/{0}/nodes/'.format(noderange)):
goodNodes = allNodes - errorNodes
for node in goodNodes:
print node + ": BMC Reset Successful"
print(node + ": BMC Reset Successful")
sys.exit(success)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2017 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2017 Lenovo
@ -55,4 +55,4 @@ for r in session.create('/noderange/', attribs):
exitcode |= 1
if 'created' in r:
print('{0}: created'.format(r['created']))
sys.exit(exitcode)
sys.exit(exitcode)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2017 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015-2019 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2016-2017 Lenovo
@ -163,4 +163,4 @@ try:
update_firmware(session, upfile)
except KeyboardInterrupt:
print('')
sys.exit(exitcode)
sys.exit(exitcode)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2017 Lenovo
@ -97,7 +97,7 @@ if len(args) > 1:
requestargs=args[1:]
except Exception as e:
print str(e)
print(str(e))
if exitcode != 0:
sys.exit(exitcode)
@ -123,6 +123,6 @@ else:
sys.stderr.write(res['error'] + '\n')
exitcode = 1
else:
print res['item']['href'].replace('/', '')
print(res['item']['href'].replace('/', ''))
sys.exit(exitcode)
sys.exit(exitcode)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2017 Lenovo
@ -55,4 +55,4 @@ for r in session.create('/nodegroups/', attribs):
exitcode |= 1
if 'created' in r:
print('{0}: created'.format(r['created']))
sys.exit(exitcode)
sys.exit(exitcode)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015-2017 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2017 Lenovo
@ -49,4 +49,4 @@ for r in session.delete('/nodegroups/{0}'.format(noderange)):
exitcode |= 1
if 'deleted' in r:
print('{0}: deleted'.format(r['deleted']))
sys.exit(exitcode)
sys.exit(exitcode)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2019 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015-2017 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015-2017 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2016-2017 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2019 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015-2017 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2018 Lenovo
@ -188,4 +188,4 @@ def main():
sys.exit(1)
handler(noderange, media)
if __name__ == '__main__':
main()
main()

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015-2017 Lenovo
@ -72,4 +72,4 @@ if options.previous:
# add dictionary to session
session.add_precede_dict(prev)
sys.exit(session.simple_noderange_command(noderange, '/power/state', setstate))
sys.exit(session.simple_noderange_command(noderange, '/power/state', setstate))

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2017 Lenovo
@ -49,4 +49,4 @@ for r in session.delete('/noderange/{0}'.format(noderange)):
exitcode |= 1
if 'deleted' in r:
print('{0}: deleted'.format(r['deleted']))
sys.exit(exitcode)
sys.exit(exitcode)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2019 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015-2017 Lenovo
@ -56,7 +56,7 @@ for node in session.read('/noderange/{0}/nodes/'.format(noderange)):
goodNodes = allNodes - errorNodes
for node in goodNodes:
print node + ": Reseat successful"
print(node + ": Reseat successful")
sys.exit(success)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2016-2017 Lenovo
@ -55,7 +55,7 @@ def run():
noderange, targpath = args[-1].split(':', 1)
client.check_globbing(noderange)
c = client.Command()
cmdstr = " ".join(args[:-1])
cmdstr = ' '.join(args[:-1])
cmdstr = 'rsync -av --info=progress2 ' + cmdstr
cmdstr += ' {node}:' + targpath
@ -66,13 +66,15 @@ def run():
exitcode = 0
for exp in c.create('/noderange/{0}/attributes/expression'.format(noderange),
{'expression': cmdstr}):
{'expression': cmdstr}):
if 'error' in exp:
sys.stderr.write(exp['error'] + '\n')
exitcode |= exp.get('errorcode', 1)
ex = exp.get('databynode', ())
for node in ex:
cmd = ex[node]['value'].encode('utf-8')
cmd = ex[node]['value']
if not isinstance(cmd, bytes) and not isinstance(cmd, str):
cmd = cmd.encode('utf-8')
cmdv = shlex.split(cmd)
if currprocs < concurrentprocs:
currprocs += 1
@ -98,7 +100,7 @@ def run():
if desc['type'] == 'stdout':
if node not in pernodeout:
pernodeout[node] = ''
pernodeout[node] += data
pernodeout[node] += stringify(data)
if '\n' in pernodeout[node]:
currout, pernodeout[node] = pernodeout[node].split('\n', 1)
if currout:
@ -117,7 +119,7 @@ def run():
output.set_output(node, 'error!')
if node not in nodeerrs:
nodeerrs[node] = ''
nodeerrs[node] += data
nodeerrs[node] += client.stringify(data)
else:
pop = desc['popen']
ret = pop.poll()

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2016-2017 Lenovo
@ -71,7 +71,9 @@ def run():
exitcode |= exp.get('errorcode', 1)
ex = exp.get('databynode', ())
for node in ex:
cmd = ex[node]['value'].encode('utf-8')
cmd = ex[node]['value']
if not isinstance(cmd, bytes) and not isinstance(cmd, str):
cmd = cmd.encode('utf-8')
cmdv = shlex.split(cmd)
if currprocs < concurrentprocs:
currprocs += 1
@ -95,6 +97,7 @@ def run():
pernodeout[node] = []
pernodeout[node].append(data)
else:
data = client.stringify(data)
if options.nonodeprefix:
sys.stderr.write(data)
else:
@ -112,6 +115,7 @@ def run():
run_cmdv(node, cmdv, all, pipedesc)
for node in sortutil.natural_sort(pernodeout):
for line in pernodeout[node]:
line = client.stringify(line)
if options.nonodeprefix:
sys.stdout.write(line)
else:

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015-2017 Lenovo
@ -149,8 +149,11 @@ def sensorpass(showout=True, appendtime=False):
if appendtime:
showval += ' @' + time.strftime(
'%Y-%m-%dT%H:%M:%S')
print(u'{0}: {1}:{2}'.format(
node, sensedata['name'], showval).encode('utf8'))
printval = u'{0}: {1}:{2}'.format(
node, sensedata['name'], showval)
if not isinstance(printval, str):
printval = printval.encode('utf-8')
print(printval)
sys.stdout.flush()
return resultdata
@ -199,7 +202,11 @@ def main():
orderedsensors.append(name)
orderedsensors.sort()
for name in orderedsensors:
headernames.append(sensorheaders[name].encode('utf-8'))
headername = sensorheaders[name]
if (not isinstance(headername, str) and
not isinstance(headername, bytes)):
headername = headername.encode('utf-8')
headernames.append(headername)
if options.csv:
linebyline = False
csvwriter = csv.writer(sys.stdout)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2016-2017 Lenovo
@ -72,7 +72,9 @@ def run():
exitcode |= exp.get('errorcode', 1)
ex = exp.get('databynode', ())
for node in ex:
cmd = ex[node]['value'].encode('utf-8')
cmd = ex[node]['value']
if not isinstance(cmd, str) and not isinstance(cmd, bytes):
cmd = cmd.encode('utf-8')
cmdv = ['ssh', node, cmd]
if currprocs < concurrentprocs:
currprocs += 1
@ -96,6 +98,7 @@ def run():
pernodeout[node] = []
pernodeout[node].append(data)
else:
data = client.stringify(data)
if options.nonodeprefix:
sys.stderr.write(data)
else:
@ -113,6 +116,7 @@ def run():
run_cmdv(node, cmdv, all, pipedesc)
for node in sortutil.natural_sort(pernodeout):
for line in pernodeout[node]:
line = client.stringify(line)
if options.nonodeprefix:
sys.stdout.write(line)
else:

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2018 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2018 Lenovo
@ -144,4 +144,4 @@ def main():
sys.exit(1)
handler(noderange, media)
if __name__ == '__main__':
main()
main()

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2019 Lenovo

View File

@ -40,6 +40,15 @@ _attraliases = {
}
def stringify(instr):
# Normalize unicode and bytes to 'str', correcting for
# current python version
if isinstance(instr, bytes) and not isinstance(instr, str):
return instr.decode('utf-8')
elif not isinstance(instr, bytes) and not isinstance(instr, str):
return instr.encode('utf-8')
return instr
class Tabulator(object):
def __init__(self, headers):
self.headers = headers

View File

@ -25,6 +25,11 @@ try:
except NameError:
unicode = str
try:
range = xrange
except NameError:
pass
def decodestr(value):
ret = None
try:
@ -40,7 +45,7 @@ def decodestr(value):
def unicode_dictvalues(dictdata):
for key in dictdata:
if isinstance(dictdata[key], str):
if isinstance(dictdata[key], bytes):
dictdata[key] = decodestr(dictdata[key])
elif isinstance(dictdata[key], datetime):
dictdata[key] = dictdata[key].strftime('%Y-%m-%dT%H:%M:%S')
@ -51,7 +56,7 @@ def unicode_dictvalues(dictdata):
def _unicode_list(currlist):
for i in xrange(len(currlist)):
for i in range(len(currlist)):
if isinstance(currlist[i], str):
currlist[i] = decodestr(currlist[i])
elif isinstance(currlist[i], dict):
@ -66,7 +71,7 @@ def send(handle, data):
data = data.encode('utf-8')
except AttributeError:
pass
if isinstance(data, str) or isinstance(data, unicode):
if isinstance(data, bytes) or isinstance(data, unicode):
# plain text, e.g. console data
tl = len(data)
if tl == 0:

View File

@ -24,10 +24,18 @@ a confluent server.
%setup -n %{name}-%{version} -n %{name}-%{version}
%build
python setup.py build
%if "%{dist}" == ".el8"
python3 setup.py build
%else
python2 setup.py build
%endif
%install
python setup.py install --single-version-externally-managed -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES --install-scripts=/opt/confluent/bin --install-purelib=/opt/confluent/lib/python
%if "%{dist}" == ".el8"
python3 setup.py install --single-version-externally-managed -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES --install-scripts=/opt/confluent/bin --install-purelib=/opt/confluent/lib/python
%else
python2 setup.py install --single-version-externally-managed -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES --install-scripts=/opt/confluent/bin --install-purelib=/opt/confluent/lib/python
%endif
%clean

View File

@ -42,7 +42,7 @@ The attribute name may use a wildcard:
Commas can be used to indicate multiple nodes, and can mix and match any of the above primitives. The following can be a valid single noderange, combining any and all members of each comma separated component
`n1,n2,rack1,storage,location.rack=9,~s1..,n20-n30`
Exclusions can be done by prepending a - before a portion of a noderange:
Exclusions can be done by prepending a '-' before a portion of a noderange:
`rack1,-n2`
`compute,-rack1`
`compute,-location.row=12`
@ -54,7 +54,7 @@ To indicate nodes that match multiple selections at once (set intersection), the
For complex expressions, () may be used to indicate order of expanding the noderange to be explicit
`rack1,-(console.logging=full@compute)`
Noderange syntax can also indicate pagination, or separating the nodes into well defined chunks. > is used to indicate how many nodes to display at a time, and < is used to indicate how many nodes to skip into a noderange:
Noderange syntax can also indicate 'pagination', or separating the nodes into well defined chunks. > is used to indicate how many nodes to display at a time, and < is used to indicate how many nodes to skip into a noderange:
`rack1>3<6`
The above would show the seventh through ninth nodes of the rack1 group. Like all other noderange operations, this may be combined with any of the above, but must appear as the very last operation. Ordering is done with a natural sort.

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# This is a sample python script for going through all observed mac addresses
# and assuming they are BMC related and printing nodeattrib commands

View File

@ -22,10 +22,10 @@ This provides the modules common for both client and server
%setup -n %{name}-%{version} -n %{name}-%{version}
%build
python setup.py build
python2 setup.py build
%install
python setup.py install --single-version-externally-managed -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES --install-purelib=/opt/confluent/lib/python --install-scripts=/opt/confluent/bin
python2 setup.py install --single-version-externally-managed -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES --install-purelib=/opt/confluent/lib/python --install-scripts=/opt/confluent/bin
%clean
rm -rf $RPM_BUILD_ROOT

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
import argparse
import errno
@ -22,7 +22,7 @@ except NameError:
pass
def make_certificate():
umask = os.umask(0077)
umask = os.umask(0o77)
try:
os.makedirs('/etc/confluent/cfg')
except OSError as e:
@ -61,7 +61,7 @@ def join_collective(server, invitation):
make_certificate()
s = client.Command().connection
while not invitation:
invitation = raw_input('Paste the invitation here: ')
invitation = input('Paste the invitation here: ')
tlvdata.send(s, {'collective': {'operation': 'join',
'invitation': invitation,
'server': server}})
@ -69,6 +69,19 @@ def join_collective(server, invitation):
res = res.get('collective',
{'status': 'Unknown response: ' + repr(res)})
print(res.get('status', res.get('error', repr(res))))
if 'error' in res:
sys.exit(1)
def delete_member(name):
s = client.Command().connection
tlvdata.send(s, {'collective': {'operation': 'delete',
'member': name}})
res = tlvdata.recv(s)
res = res.get('collective',
{'status': 'Unknown response: ' + repr(res)})
print(res.get('status', res.get('error', repr(res))))
if 'error' in res:
sys.exit(1)
def show_collective():
@ -104,6 +117,8 @@ def main():
'collective member. Run collective invite -h for more information')
ic.add_argument('name', help='Name of server to invite to join the '
'collective')
dc = sp.add_parser('delete', help='Delete a member of a collective')
dc.add_argument('name', help='Name of server to delete from collective')
jc = sp.add_parser('join', help='Join a collective. Run collective join -h for more information')
jc.add_argument('server', help='Existing collective member that ran invite and generated a token')
jc.add_argument('-i', help='Invitation provided by runniing invite on an '
@ -117,6 +132,8 @@ def main():
join_collective(cmdset.server, cmdset.i)
elif cmdset.command == 'show':
show_collective()
elif cmdset.command == 'delete':
delete_member(cmdset.name)
if __name__ == '__main__':
main()

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 IBM Corporation

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2017 Lenovo
@ -82,7 +82,7 @@ elif args[0] == 'dump':
"or -s to do encrypted backup that requires keys.json from "
"another backup to restore.")
sys.exit(1)
os.umask(077)
os.umask(0o77)
main._initsecurity(conf.get_config())
if not os.path.exists(dumpdir):
os.makedirs(dumpdir)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 IBM Corporation

View File

@ -28,8 +28,10 @@ import hashlib
import hmac
import multiprocessing
import confluent.userutil as userutil
import confluent.util as util
pam = None
try:
import PAM
import confluent.pam as pam
except ImportError:
pass
import time
@ -94,23 +96,6 @@ _deniedbyrole = {
]
}
}
class Credentials(object):
def __init__(self, username, passphrase):
self.username = username
self.passphrase = passphrase
self.haspam = False
def pam_conv(self, auth, query_list):
# use stored credentials in a pam conversation
self.haspam = True
resp = []
for query_entry in query_list:
query, pamtype = query_entry
if query.startswith('Password'):
resp.append((self.passphrase, 0))
else:
return None
return resp
def _prune_passcache():
@ -135,11 +120,13 @@ def _get_usertenant(name, tenant=False):
administrator account a tenant gets.
Otherwise, just assume a user in the default tenant
"""
if not isinstance(name, bytes):
name = name.encode('utf-8')
if not isinstance(tenant, bool):
# if not boolean, it must be explicit tenant
user = name
elif '/' in name: # tenant scoped name
tenant, user = name.split('/', 1)
elif b'/' in name: # tenant scoped name
tenant, user = name.split(b'/', 1)
elif configmanager.is_tenant(name):
# the account is the implicit tenant owner account
user = name
@ -147,6 +134,9 @@ def _get_usertenant(name, tenant=False):
else: # assume it is a non-tenant user account
user = name
tenant = None
user = util.stringify(user)
if tenant:
tenant = util.stringify(tenant)
yield user
yield tenant
@ -227,7 +217,6 @@ def check_user_passphrase(name, passphrase, operation=None, element=None, tenant
# would normally make an event and wait
# but here there's no need for that
eventlet.sleep(0.5)
credobj = Credentials(user, passphrase)
cfm = configmanager.ConfigManager(tenant, username=user)
ucfg = cfm.get_user(user)
if ucfg is None:
@ -278,22 +267,16 @@ def check_user_passphrase(name, passphrase, operation=None, element=None, tenant
if crypt == crypted:
_passcache[(user, tenant)] = hashlib.sha256(passphrase).digest()
return authorize(user, element, tenant, operation)
try:
pammy = PAM.pam()
pammy.start(_pamservice, user, credobj.pam_conv)
pammy.authenticate()
pammy.acct_mgmt()
if pam:
pammy = pam.pam()
usergood = pammy.authenticate(user, passphrase)
del pammy
_passcache[(user, tenant)] = hashlib.sha256(passphrase).digest()
return authorize(user, element, tenant, operation, skipuserobj=False)
except NameError:
pass
except PAM.error:
pass
if usergood:
_passcache[(user, tenant)] = hashlib.sha256(passphrase).digest()
return authorize(user, element, tenant, operation, skipuserobj=False)
eventlet.sleep(0.05) # stall even on test for existence of a username
return None
def _apply_pbkdf(passphrase, salt):
return KDF.PBKDF2(passphrase, salt, 32, 10000,
lambda p, s: hmac.new(p, s, hashlib.sha256).digest())
@ -311,4 +294,4 @@ def _do_pbkdf(passphrase, salt):
# compute. However, we do want to wait for result, so we have
# one of the exceedingly rare sort of circumstances where 'apply'
# actually makes sense
return authworkers.apply(_apply_pbkdf, [passphrase, salt])
return authworkers.apply(_apply_pbkdf, [passphrase, salt])

View File

@ -27,6 +27,7 @@ import eventlet.green.ssl as ssl
import eventlet.green.threading as threading
import greenlet
import random
import sys
try:
import OpenSSL.crypto as crypto
except ImportError:
@ -70,11 +71,22 @@ def connect_to_leader(cert=None, name=None, leader=None):
return False
with connecting:
with cfm._initlock:
tlvdata.recv(remote) # the banner
banner = tlvdata.recv(remote) # the banner
vers = banner.split()[2]
pvers = 0
reqver = 4
if vers == b'v0':
pvers = 2
elif vers == b'v1':
pvers = 4
if sys.version_info[0] < 3:
pvers = 2
reqver = 2
tlvdata.recv(remote) # authpassed... 0..
if name is None:
name = get_myname()
tlvdata.send(remote, {'collective': {'operation': 'connect',
'protover': reqver,
'name': name,
'txcount': cfm._txcount}})
keydata = tlvdata.recv(remote)
@ -119,7 +131,7 @@ def connect_to_leader(cert=None, name=None, leader=None):
globaldata = tlvdata.recv(remote)
dbi = tlvdata.recv(remote)
dbsize = dbi['dbsize']
dbjson = ''
dbjson = b''
while (len(dbjson) < dbsize):
ndata = remote.recv(dbsize - len(dbjson))
if not ndata:
@ -148,15 +160,15 @@ def connect_to_leader(cert=None, name=None, leader=None):
raise
currentleader = leader
#spawn this as a thread...
follower = eventlet.spawn(follow_leader, remote)
follower = eventlet.spawn(follow_leader, remote, pvers)
return True
def follow_leader(remote):
def follow_leader(remote, proto):
global currentleader
cleanexit = False
try:
cfm.follow_channel(remote)
cfm.follow_channel(remote, proto)
except greenlet.GreenletExit:
cleanexit = True
finally:
@ -208,8 +220,7 @@ def handle_connection(connection, cert, request, local=False):
else:
if not local:
return
if 'show' == operation:
if operation in ('show', 'delete'):
if not list(cfm.list_collective()):
tlvdata.send(connection,
{'collective': {'error': 'Collective mode not '
@ -246,7 +257,23 @@ def handle_connection(connection, cert, request, local=False):
collinfo['quorum'] = True
except exc.DegradedCollective:
collinfo['quorum'] = False
tlvdata.send(connection, {'collective': collinfo})
if operation == 'show':
tlvdata.send(connection, {'collective': collinfo})
elif operation == 'delete':
todelete = request['member']
if (todelete == collinfo['leader'] or
todelete in collinfo['active']):
tlvdata.send(connection, {'collective':
{'error': '{0} is still active, stop the confluent service to remove it'.format(todelete)}})
return
if todelete not in collinfo['offline']:
tlvdata.send(connection, {'collective':
{'error': '{0} is not a recognized collective member'.format(todelete)}})
return
cfm.del_collective_member(todelete)
tlvdata.send(connection,
{'collective': {'status': 'Successfully deleted {0}'.format(todelete)}})
connection.close()
return
if 'invite' == operation:
try:
@ -267,7 +294,8 @@ def handle_connection(connection, cert, request, local=False):
invitation = request['invitation']
try:
invitation = base64.b64decode(invitation)
name, invitation = invitation.split('@', 1)
name, invitation = invitation.split(b'@', 1)
name = util.stringify(name)
except Exception:
tlvdata.send(
connection,
@ -375,7 +403,7 @@ def handle_connection(connection, cert, request, local=False):
connection.close()
return
if (currentleader == connection.getpeername()[0] and
follower and follower.isAlive()):
follower and not follower.dead):
# if we are happily following this leader already, don't stir
# the pot
tlvdata.send(connection, {'status': 0})
@ -402,6 +430,7 @@ def handle_connection(connection, cert, request, local=False):
tlvdata.send(connection, collinfo)
if 'connect' == operation:
drone = request['name']
folver = request.get('protover', 2)
droneinfo = cfm.get_collective_member(drone)
if not (droneinfo and util.cert_matches(droneinfo['fingerprint'],
cert)):
@ -450,7 +479,7 @@ def handle_connection(connection, cert, request, local=False):
connection.sendall(cfgdata)
#tlvdata.send(connection, {'tenants': 0}) # skip the tenants for now,
# so far unused anyway
if not cfm.relay_slaved_requests(drone, connection):
if not cfm.relay_slaved_requests(drone, connection, folver):
if not retrythread: # start a recovery if everyone else seems
# to have disappeared
retrythread = eventlet.spawn_after(30 + random.random(),

View File

@ -16,7 +16,10 @@
# This defines config variable to store the global configuration for confluent
import ConfigParser
try:
import ConfigParser
except ModuleNotFoundError:
import configparser as ConfigParser
import os
_config = None

View File

@ -46,7 +46,10 @@ import Cryptodome.Protocol.KDF as KDF
from Cryptodome.Cipher import AES
from Cryptodome.Hash import HMAC
from Cryptodome.Hash import SHA256
import anydbm as dbm
try:
import anydbm as dbm
except ModuleNotFoundError:
import dbm
import ast
import base64
import confluent.config.attributes as allattributes
@ -57,7 +60,10 @@ import confluent.util
import confluent.netutil as netutil
import confluent.exceptions as exc
import copy
import cPickle
try:
import cPickle
except ModuleNotFoundError:
import pickle as cPickle
import errno
import eventlet
import eventlet.event as event
@ -74,6 +80,10 @@ import struct
import sys
import threading
import traceback
try:
unicode
except NameError:
unicode = str
_masterkey = None
@ -91,6 +101,10 @@ _cfgstore = None
_pendingchangesets = {}
_txcount = 0
_hasquorum = True
if sys.version_info[0] >= 3:
lowestver = 4
else:
lowestver = 2
_attraliases = {
'bmc': 'hardwaremanagement.manager',
@ -299,12 +313,12 @@ def check_quorum():
def exec_on_leader(function, *args):
if isinstance(cfgleader, bool):
raise exc.DegradedCollective()
xid = os.urandom(8)
xid = confluent.util.stringify(base64.b64encode(os.urandom(8)))
while xid in _pendingchangesets:
xid = os.urandom(8)
xid = confluent.util.stringify(base64.b64encode(os.urandom(8)))
_pendingchangesets[xid] = event.Event()
rpcpayload = cPickle.dumps({'function': function, 'args': args,
'xid': xid})
'xid': xid}, protocol=cfgproto)
rpclen = len(rpcpayload)
cfgleader.sendall(struct.pack('!Q', rpclen))
cfgleader.sendall(rpcpayload)
@ -314,15 +328,19 @@ def exec_on_leader(function, *args):
def exec_on_followers(fnname, *args):
global _txcount
if len(cfgstreams) < (len(_cfgstore['collective']) // 2):
# the leader counts in addition to registered streams
raise exc.DegradedCollective()
exec_on_followers_unconditional(fnname, *args)
def exec_on_followers_unconditional(fnname, *args):
global _txcount
pushes = eventlet.GreenPool()
_txcount += 1
payload = cPickle.dumps({'function': fnname, 'args': args,
'txcount': _txcount})
for res in pushes.starmap(
'txcount': _txcount}, protocol=lowestver)
for _ in pushes.starmap(
_push_rpc, [(cfgstreams[s], payload) for s in cfgstreams]):
pass
@ -391,12 +409,12 @@ def decrypt_value(cryptvalue,
raise Exception("bad HMAC value on crypted value")
decrypter = AES.new(key, AES.MODE_CBC, iv)
value = decrypter.decrypt(cipherdata)
padsize = ord(value[-1])
padsize = bytearray(value)[-1]
pad = value[-padsize:]
# Note that I cannot grasp what could be done with a subliminal
# channel in padding in this case, but check the padding anyway
for padbyte in pad:
if ord(padbyte) != padsize:
for padbyte in bytearray(pad):
if padbyte != padsize:
raise Exception("bad padding in encrypted value")
return value[0:-padsize]
else:
@ -462,11 +480,9 @@ def crypt_value(value,
key = _masterkey
iv = os.urandom(12)
crypter = AES.new(key, AES.MODE_GCM, nonce=iv)
try:
cryptval, hmac = crypter.encrypt_and_digest(value)
except TypeError:
cryptval, hmac = crypter.encrypt_and_digest(value.encode('utf-8'))
return iv, cryptval, hmac, '\x02'
value = confluent.util.stringify(value).encode('utf-8')
cryptval, hmac = crypter.encrypt_and_digest(value)
return iv, cryptval, hmac, b'\x02'
def _load_dict_from_dbm(dpath, tdb):
@ -474,16 +490,19 @@ def _load_dict_from_dbm(dpath, tdb):
dbe = dbm.open(tdb, 'r')
currdict = _cfgstore
for elem in dpath:
elem = confluent.util.stringify(elem)
if elem not in currdict:
currdict[elem] = {}
currdict = currdict[elem]
try:
for tk in dbe:
currdict[tk] = cPickle.loads(dbe[tk])
for tk in dbe.keys():
tks = confluent.util.stringify(tk)
currdict[tks] = cPickle.loads(dbe[tk])
except AttributeError:
tk = dbe.firstkey()
while tk != None:
currdict[tk] = cPickle.loads(dbe[tk])
tks = confluent.util.stringify(tk)
currdict[tks] = cPickle.loads(dbe[tk])
tk = dbe.nextkey(tk)
except dbm.error:
return
@ -522,13 +541,7 @@ def set_global(globalname, value, sync=True):
"""
if _cfgstore is None:
init(not sync)
try:
globalname = globalname.encode('utf-8')
except AttributeError:
# We have to remove the unicode-ness of the string,
# but if it is already bytes in python 3, then we will
# get an attributeerror, so pass
pass
globalname = confluent.util.stringify(globalname)
with _dirtylock:
if 'dirtyglobals' not in _cfgstore:
_cfgstore['dirtyglobals'] = set()
@ -541,9 +554,14 @@ def set_global(globalname, value, sync=True):
ConfigManager._bg_sync_to_file()
cfgstreams = {}
def relay_slaved_requests(name, listener):
def relay_slaved_requests(name, listener, vers):
global cfgleader
global _hasquorum
global lowestver
if vers > 2 and sys.version_info[0] < 3:
vers = 2
if vers < lowestver:
lowestver = vers
pushes = eventlet.GreenPool()
if name not in _followerlocks:
_followerlocks[name] = gthread.RLock()
@ -560,7 +578,7 @@ def relay_slaved_requests(name, listener):
lh = StreamHandler(listener)
_hasquorum = len(cfgstreams) >= (
len(_cfgstore['collective']) // 2)
payload = cPickle.dumps({'quorum': _hasquorum})
payload = cPickle.dumps({'quorum': _hasquorum}, protocol=lowestver)
for _ in pushes.starmap(
_push_rpc,
[(cfgstreams[s], payload) for s in cfgstreams]):
@ -573,7 +591,7 @@ def relay_slaved_requests(name, listener):
raise Exception("Unexpected loss of node in followers: " + name)
sz = struct.unpack('!Q', msg)[0]
if sz != 0:
rpc = ''
rpc = b''
while len(rpc) < sz:
nrpc = listener.recv(sz - len(rpc))
if not nrpc:
@ -587,7 +605,7 @@ def relay_slaved_requests(name, listener):
exc = e
if 'xid' in rpc:
_push_rpc(listener, cPickle.dumps({'xid': rpc['xid'],
'exc': exc}))
'exc': exc}, protocol=vers))
try:
msg = lh.get_next_msg()
except Exception:
@ -604,7 +622,7 @@ def relay_slaved_requests(name, listener):
if cfgstreams:
_hasquorum = len(cfgstreams) >= (
len(_cfgstore['collective']) // 2)
payload = cPickle.dumps({'quorum': _hasquorum})
payload = cPickle.dumps({'quorum': _hasquorum}, protocol=lowestver)
for _ in pushes.starmap(
_push_rpc,
[(cfgstreams[s], payload) for s in cfgstreams]):
@ -644,15 +662,19 @@ class StreamHandler(object):
self.sock = None
def stop_following(replacement=None):
def stop_following(replacement=None, proto=2):
with _leaderlock:
global cfgleader
global cfgproto
if cfgleader and not isinstance(cfgleader, bool):
try:
cfgleader.close()
except Exception:
pass
cfgleader = replacement
if proto > 2 and sys.version_info[0] < 3:
proto = 2
cfgproto = proto
def stop_leading():
for stream in list(cfgstreams):
@ -710,20 +732,21 @@ def commit_clear():
ConfigManager._bg_sync_to_file()
cfgleader = None
cfgproto = 2
def follow_channel(channel):
def follow_channel(channel, proto=2):
global _txcount
global _hasquorum
try:
stop_leading()
stop_following(channel)
stop_following(channel, proto)
lh = StreamHandler(channel)
msg = lh.get_next_msg()
while msg:
sz = struct.unpack('!Q', msg)[0]
if sz != 0:
rpc = ''
rpc = b''
while len(rpc) < sz:
nrpc = channel.recv(sz - len(rpc))
if not nrpc:
@ -761,6 +784,34 @@ def add_collective_member(name, address, fingerprint):
exec_on_followers('_true_add_collective_member', name, address, fingerprint)
_true_add_collective_member(name, address, fingerprint)
def del_collective_member(name):
if cfgleader and not isinstance(cfgleader, bool):
return exec_on_leader('del_collective_member', name)
if cfgstreams:
exec_on_followers_unconditional('_true_del_collective_member', name)
_true_del_collective_member(name)
def _true_del_collective_member(name, sync=True):
global cfgleader
name = confluent.util.stringify(name)
if _cfgstore is None:
return
if 'collective' not in _cfgstore:
return
if name not in _cfgstore['collective']:
return
del _cfgstore['collective'][name]
with _dirtylock:
if 'collectivedirty' not in _cfgstore:
_cfgstore['collectivedirty'] = set([])
_cfgstore['collectivedirty'].add(name)
if len(_cfgstore['collective']) < 2:
del _cfgstore['collective']
cfgleader = None
if sync:
ConfigManager._bg_sync_to_file()
_pending_collective_updates = {}
@ -785,10 +836,7 @@ def apply_pending_collective_updates():
def _true_add_collective_member(name, address, fingerprint, sync=True):
try:
name = name.encode('utf-8')
except AttributeError:
pass
name = confluent.util.stringify(name)
if _cfgstore is None:
init(not sync) # use not sync to avoid read from disk
if 'collective' not in _cfgstore:
@ -823,8 +871,7 @@ def get_collective_member_by_address(address):
def _mark_dirtykey(category, key, tenant=None):
if type(key) in (str, unicode):
key = key.encode('utf-8')
key = confluent.util.stringify(key)
with _dirtylock:
if 'dirtykeys' not in _cfgstore:
_cfgstore['dirtykeys'] = {}
@ -1145,9 +1192,9 @@ class ConfigManager(object):
Returns an identifier that can be used to unsubscribe from these
notifications using remove_watcher
"""
notifierid = random.randint(0, sys.maxint)
notifierid = random.randint(0, sys.maxsize)
while notifierid in self._notifierids:
notifierid = random.randint(0, sys.maxint)
notifierid = random.randint(0, sys.maxsize)
self._notifierids[notifierid] = {'attriblist': []}
if self.tenant not in self._attribwatchers:
self._attribwatchers[self.tenant] = {}
@ -1186,9 +1233,9 @@ class ConfigManager(object):
# use in case of cancellation.
# I anticipate no more than a handful of watchers of this sort, so
# this loop should not have to iterate too many times
notifierid = random.randint(0, sys.maxint)
notifierid = random.randint(0, sys.maxsize)
while notifierid in self._notifierids:
notifierid = random.randint(0, sys.maxint)
notifierid = random.randint(0, sys.maxsize)
# going to track that this is a nodecollection type watcher,
# but there is no additional data associated.
self._notifierids[notifierid] = set(['nodecollection'])
@ -1304,7 +1351,7 @@ class ConfigManager(object):
def _true_create_usergroup(self, groupname, role="Administrator"):
if 'usergroups' not in self._cfgstore:
self._cfgstore['usergroups'] = {}
groupname = groupname.encode('utf-8')
groupname = confluent.util.stringify(groupname)
if groupname in self._cfgstore['usergroups']:
raise Exception("Duplicate groupname requested")
self._cfgstore['usergroups'][groupname] = {'role': role}
@ -1406,7 +1453,7 @@ class ConfigManager(object):
raise Exception("Duplicate id requested")
if 'users' not in self._cfgstore:
self._cfgstore['users'] = {}
name = name.encode('utf-8')
name = confluent.util.stringify(name)
if name in self._cfgstore['users']:
raise Exception("Duplicate username requested")
self._cfgstore['users'][name] = {'id': uid}
@ -1664,7 +1711,7 @@ class ConfigManager(object):
"{0} node does not exist to add to {1}".format(
node, group))
for group in attribmap:
group = group.encode('utf-8')
group = confluent.util.stringify(group)
if group not in self._cfgstore['nodegroups']:
self._cfgstore['nodegroups'][group] = {'nodes': set()}
cfgobj = self._cfgstore['nodegroups'][group]
@ -1722,8 +1769,8 @@ class ConfigManager(object):
attributes = realattributes
if type(groups) in (str, unicode):
groups = (groups,)
for group in groups:
group = group.encode('utf-8')
for group in groups:
group = confluent.util.stringify(group)
try:
groupentry = self._cfgstore['nodegroups'][group]
except KeyError:
@ -1814,7 +1861,8 @@ class ConfigManager(object):
'nodeattrs': {node: [attrname]},
'callback': attribwatcher[watchkey][notifierid]
}
for watcher in notifdata.itervalues():
for watcher in notifdata:
watcher = notifdata[watcher]
callback = watcher['callback']
eventlet.spawn_n(_do_notifier, self, watcher, callback)
@ -1828,14 +1876,15 @@ class ConfigManager(object):
def _true_del_nodes(self, nodes):
if self.tenant in self._nodecollwatchers:
for watcher in self._nodecollwatchers[self.tenant].itervalues():
for watcher in self._nodecollwatchers[self.tenant]:
watcher = self._nodecollwatchers[self.tenant][watcher]
watcher(added=(), deleting=nodes, renamed=(), configmanager=self)
changeset = {}
for node in nodes:
# set a reserved attribute for the sake of the change notification
# framework to trigger on
changeset[node] = {'_nodedeleted': 1}
node = node.encode('utf-8')
node = confluent.util.stringify(node)
if node in self._cfgstore['nodes']:
self._sync_groups_to_node(node=node, groups=[],
changeset=changeset)
@ -1883,7 +1932,7 @@ class ConfigManager(object):
realattributes.append(attrname)
attributes = realattributes
for node in nodes:
node = node.encode('utf-8')
node = confluent.util.stringify(node)
try:
nodek = self._cfgstore['nodes'][node]
except KeyError:
@ -1954,7 +2003,8 @@ class ConfigManager(object):
self._recalculate_expressions(cfgobj, formatter=exprmgr, node=renamemap[name], changeset=changeset)
if self.tenant in self._nodecollwatchers:
nodecollwatchers = self._nodecollwatchers[self.tenant]
for watcher in nodecollwatchers.itervalues():
for watcher in nodecollwatchers:
watcher = nodecollwatchers[watcher]
eventlet.spawn_n(_do_add_watcher, watcher, (), self, renamemap)
self._bg_sync_to_file()
@ -2011,7 +2061,7 @@ class ConfigManager(object):
# first do a sanity check of the input upfront
# this mitigates risk of arguments being partially applied
for node in attribmap:
node = node.encode('utf-8')
node = confluent.util.stringify(node)
if node == '':
raise ValueError('"{0}" is not a valid node name'.format(node))
if autocreate:
@ -2066,8 +2116,8 @@ class ConfigManager(object):
attrname, node)
raise ValueError(errstr)
attribmap[node][attrname] = attrval
for node in attribmap:
node = node.encode('utf-8')
for node in attribmap:
node = confluent.util.stringify(node)
exprmgr = None
if node not in self._cfgstore['nodes']:
newnodes.append(node)
@ -2110,7 +2160,8 @@ class ConfigManager(object):
if newnodes:
if self.tenant in self._nodecollwatchers:
nodecollwatchers = self._nodecollwatchers[self.tenant]
for watcher in nodecollwatchers.itervalues():
for watcher in nodecollwatchers:
watcher = nodecollwatchers[watcher]
eventlet.spawn_n(_do_add_watcher, watcher, newnodes, self)
self._bg_sync_to_file()
#TODO: wait for synchronization to suceed/fail??)
@ -2228,7 +2279,7 @@ class ConfigManager(object):
target = dumpdata[confarea][element][attribute]['cryptvalue']
cryptval = []
for value in target:
cryptval.append(base64.b64encode(value))
cryptval.append(confluent.util.stringify(base64.b64encode(value)))
if attribute == 'cryptpass':
dumpdata[confarea][element][attribute] = '!'.join(cryptval)
else:
@ -2248,7 +2299,7 @@ class ConfigManager(object):
_cfgstore = {}
rootpath = cls._cfgdir
try:
with open(os.path.join(rootpath, 'transactioncount'), 'r') as f:
with open(os.path.join(rootpath, 'transactioncount'), 'rb') as f:
txbytes = f.read()
if len(txbytes) == 8:
_txcount = struct.unpack('!Q', txbytes)[0]
@ -2306,7 +2357,7 @@ class ConfigManager(object):
if statelessmode:
return
_mkpath(cls._cfgdir)
with open(os.path.join(cls._cfgdir, 'transactioncount'), 'w') as f:
with open(os.path.join(cls._cfgdir, 'transactioncount'), 'wb') as f:
f.write(struct.pack('!Q', _txcount))
if (fullsync or 'dirtyglobals' in _cfgstore and
'globals' in _cfgstore):
@ -2321,31 +2372,37 @@ class ConfigManager(object):
for globalkey in dirtyglobals:
if globalkey in _cfgstore['globals']:
globalf[globalkey] = \
cPickle.dumps(_cfgstore['globals'][globalkey])
cPickle.dumps(_cfgstore['globals'][globalkey], protocol=cPickle.HIGHEST_PROTOCOL)
else:
if globalkey in globalf:
del globalf[globalkey]
finally:
globalf.close()
if fullsync or 'collectivedirty' in _cfgstore:
collectivef = dbm.open(os.path.join(cls._cfgdir, "collective"),
'c', 384)
try:
if fullsync:
colls = _cfgstore['collective']
else:
with _dirtylock:
colls = copy.deepcopy(_cfgstore['collectivedirty'])
del _cfgstore['collectivedirty']
for coll in colls:
if coll in _cfgstore['collective']:
collectivef[coll] = cPickle.dumps(
_cfgstore['collective'][coll])
if len(_cfgstore.get('collective', ())) > 1:
collectivef = dbm.open(os.path.join(cls._cfgdir, "collective"),
'c', 384)
try:
if fullsync:
colls = _cfgstore['collective']
else:
if coll in collectivef:
del globalf[coll]
finally:
collectivef.close()
with _dirtylock:
colls = copy.deepcopy(_cfgstore['collectivedirty'])
del _cfgstore['collectivedirty']
for coll in colls:
if coll in _cfgstore['collective']:
collectivef[coll] = cPickle.dumps(
_cfgstore['collective'][coll], protocol=cPickle.HIGHEST_PROTOCOL)
else:
if coll in collectivef:
del collectivef[coll]
finally:
collectivef.close()
else:
try:
os.remove(os.path.join(cls._cfgdir, "collective"))
except OSError:
pass
if fullsync:
pathname = cls._cfgdir
currdict = _cfgstore['main']
@ -2354,7 +2411,7 @@ class ConfigManager(object):
dbf = dbm.open(os.path.join(pathname, category), 'c', 384) # 0600
try:
for ck in currdict[category]:
dbf[ck] = cPickle.dumps(currdict[category][ck])
dbf[ck] = cPickle.dumps(currdict[category][ck], protocol=cPickle.HIGHEST_PROTOCOL)
finally:
dbf.close()
elif 'dirtykeys' in _cfgstore:
@ -2378,7 +2435,7 @@ class ConfigManager(object):
if ck in dbf:
del dbf[ck]
else:
dbf[ck] = cPickle.dumps(currdict[category][ck])
dbf[ck] = cPickle.dumps(currdict[category][ck], protocol=cPickle.HIGHEST_PROTOCOL)
finally:
dbf.close()
willrun = False
@ -2417,7 +2474,9 @@ def _restore_keys(jsond, password, newpassword=None, sync=True):
else:
keydata = json.loads(jsond)
cryptkey = _parse_key(keydata['cryptkey'], password)
integritykey = _parse_key(keydata['integritykey'], password)
integritykey = None
if 'integritykey' in keydata:
integritykey = _parse_key(keydata['integritykey'], password)
conf.init_config()
cfg = conf.get_config()
if cfg.has_option('security', 'externalcfgkey'):
@ -2426,8 +2485,9 @@ def _restore_keys(jsond, password, newpassword=None, sync=True):
newpassword = keyfile.read()
set_global('master_privacy_key', _format_key(cryptkey,
password=newpassword), sync)
set_global('master_integrity_key', _format_key(integritykey,
password=newpassword), sync)
if integritykey:
set_global('master_integrity_key', _format_key(integritykey,
password=newpassword), sync)
_masterkey = cryptkey
_masterintegritykey = integritykey
if sync:
@ -2439,20 +2499,21 @@ def _dump_keys(password, dojson=True):
init_masterkey()
cryptkey = _format_key(_masterkey, password=password)
if 'passphraseprotected' in cryptkey:
cryptkey = '!'.join(map(base64.b64encode,
cryptkey['passphraseprotected']))
cryptkey = '!'.join(
[confluent.util.stringify(base64.b64encode(x))
for x in cryptkey['passphraseprotected']])
else:
cryptkey = '*unencrypted:{0}'.format(base64.b64encode(
cryptkey['unencryptedvalue']))
cryptkey = '*unencrypted:{0}'.format(confluent.util.stringify(base64.b64encode(
cryptkey['unencryptedvalue'])))
keydata = {'cryptkey': cryptkey}
if _masterintegritykey is not None:
integritykey = _format_key(_masterintegritykey, password=password)
if 'passphraseprotected' in integritykey:
integritykey = '!'.join(map(base64.b64encode,
integritykey['passphraseprotected']))
integritykey = '!'.join([confluent.util.stringify(base64.b64encode(x)) for x in
integritykey['passphraseprotected']])
else:
integritykey = '*unencrypted:{0}'.format(base64.b64encode(
integritykey['unencryptedvalue']))
integritykey = '*unencrypted:{0}'.format(confluent.util.stringify(base64.b64encode(
integritykey['unencryptedvalue'])))
keydata['integritykey'] = integritykey
if dojson:
return json.dumps(keydata, sort_keys=True, indent=4, separators=(',', ': '))

View File

@ -94,7 +94,7 @@ def _utf8_normalize(data, shiftin, decoder):
def pytechars2line(chars, maxlen=None):
line = '\x1b[m' # start at default params
line = b'\x1b[m' # start at default params
lb = False # last bold
li = False # last italic
lu = False # last underline
@ -106,7 +106,7 @@ def pytechars2line(chars, maxlen=None):
len = 1
for charidx in range(maxlen):
char = chars[charidx]
csi = []
csi = bytearray([])
if char.fg != lfg:
csi.append(30 + pytecolors2ansi[char.fg])
lfg = char.fg
@ -129,10 +129,13 @@ def pytechars2line(chars, maxlen=None):
lr = char.reverse
csi.append(7 if lr else 27)
if csi:
line += b'\x1b[' + b';'.join(['{0}'.format(x) for x in csi]) + b'm'
if not hasdata and char.data.encode('utf-8').rstrip():
line += b'\x1b[' + b';'.join(['{0}'.format(x).encode('utf-8') for x in csi]) + b'm'
if not hasdata and char.data.rstrip():
hasdata = True
line += char.data.encode('utf-8')
chardata = char.data
if not isinstance(chardata, bytes):
chardata = chardata.encode('utf-8')
line += chardata
if maxlen and len >= maxlen:
break
len += 1
@ -185,7 +188,7 @@ class ConsoleHandler(object):
if termstate & 1:
self.appmodedetected = True
if termstate & 2:
self.shiftin = '0'
self.shiftin = b'0'
self.users = {}
self._attribwatcher = None
self._console = None
@ -210,6 +213,8 @@ class ConsoleHandler(object):
return retrytime + (retrytime * random.random())
def feedbuffer(self, data):
if not isinstance(data, bytes):
data = data.encode('utf-8')
try:
self.termstream.feed(data)
except StopIteration: # corrupt parser state, start over
@ -448,6 +453,7 @@ class ConsoleHandler(object):
def _got_disconnected(self):
if self.connectstate != 'unconnected':
self._console.close()
self.connectstate = 'unconnected'
self.log(
logdata='console disconnected', ltype=log.DataTypes.event,
@ -525,17 +531,19 @@ class ConsoleHandler(object):
if data == conapi.ConsoleEvent.Disconnect:
self._got_disconnected()
return
elif data == '':
elif data in (b'', u''):
# ignore empty strings from a cconsole provider
return
if '\x1b[?1l' in data: # request for ansi mode cursor keys
if not isinstance(data, bytes):
data = data.encode('utf-8')
if b'\x1b[?1l' in data: # request for ansi mode cursor keys
self.appmodedetected = False
if '\x1b[?1h' in data: # remember the session wants the client to use
if b'\x1b[?1h' in data: # remember the session wants the client to use
# 'application mode' Thus far only observed on esxi
self.appmodedetected = True
if '\x1b)0' in data:
if b'\x1b)0' in data:
# console indicates it wants access to special drawing characters
self.shiftin = '0'
self.shiftin = b'0'
eventdata = 0
if self.appmodedetected:
eventdata |= 1
@ -588,25 +596,30 @@ class ConsoleHandler(object):
if pendingbl:
retdata += pendingbl
pendingbl = b''
retdata += nline + '\r\n'
retdata += nline + b'\r\n'
else:
pendingbl += nline + '\r\n'
pendingbl += nline + b'\r\n'
if len(retdata) > 6:
retdata = retdata[:-2] # remove the last \r\n
retdata += b'\x1b[{0};{1}H'.format(self.buffer.cursor.y + 1,
self.buffer.cursor.x + 1)
cursordata = '\x1b[{0};{1}H'.format(self.buffer.cursor.y + 1,
self.buffer.cursor.x + 1)
if not isinstance(cursordata, bytes):
cursordata = cursordata.encode('utf-8')
retdata += cursordata
if self.shiftin is not None: # detected that terminal requested a
# shiftin character set, relay that to the terminal that cannected
retdata += '\x1b)' + self.shiftin
retdata += b'\x1b)' + self.shiftin
if self.appmodedetected:
retdata += '\x1b[?1h'
retdata += b'\x1b[?1h'
else:
retdata += '\x1b[?1l'
retdata += b'\x1b[?1l'
return retdata, connstate
def write(self, data):
if self.connectstate == 'connected':
try:
if isinstance(data, str) and not isinstance(data, bytes):
data = data.encode('utf-8')
self._console.write(data)
except Exception:
_tracelog.log(traceback.format_exc(), ltype=log.DataTypes.event,

View File

@ -63,8 +63,10 @@ import itertools
import os
try:
import cPickle as pickle
pargs = {}
except ImportError:
import pickle
pargs = {'encoding': 'utf-8'}
import socket
import struct
import sys
@ -86,7 +88,10 @@ def seek_element(currplace, currkey):
def nested_lookup(nestdict, key):
try:
return reduce(seek_element, key, nestdict)
currloc = nestdict
for currk in key:
currloc = seek_element(currloc, currk)
return currloc
except TypeError:
raise exc.NotFoundException("Invalid element requested")
@ -106,6 +111,8 @@ def load_plugins():
for plugin in os.listdir(plugindir):
if plugin.startswith('.'):
continue
if '__pycache__' in plugin:
continue
(plugin, plugtype) = os.path.splitext(plugin)
if plugtype == '.sh':
pluginmap[plugin] = shellmodule.Plugin(
@ -398,6 +405,7 @@ def _init_core():
nodegroupresources = {
'attributes': {
'check': PluginRoute({'handler': 'attributes'}),
'rename': PluginRoute({'handler': 'attributes'}),
'all': PluginRoute({'handler': 'attributes'}),
'current': PluginRoute({'handler': 'attributes'}),
@ -444,7 +452,7 @@ def show_usergroup(groupname, configmanager):
def show_user(name, configmanager):
userobj = configmanager.get_user(name)
rv = {}
for attr in attrscheme.user.iterkeys():
for attr in attrscheme.user:
rv[attr] = None
if attr == 'password':
if 'cryptpass' in userobj:
@ -681,7 +689,10 @@ def handle_dispatch(connection, cert, dispatch, peername):
cfm.get_collective_member(peername)['fingerprint'], cert):
connection.close()
return
dispatch = pickle.loads(dispatch)
pversion = 0
if bytearray(dispatch)[0] == 0x80:
pversion = bytearray(dispatch)[1]
dispatch = pickle.loads(dispatch, **pargs)
configmanager = cfm.ConfigManager(dispatch['tenant'])
nodes = dispatch['nodes']
inputdata = dispatch['inputdata']
@ -717,18 +728,18 @@ def handle_dispatch(connection, cert, dispatch, peername):
configmanager=configmanager,
inputdata=inputdata))
for res in itertools.chain(*passvalues):
_forward_rsp(connection, res)
_forward_rsp(connection, res, pversion)
except Exception as res:
_forward_rsp(connection, res)
_forward_rsp(connection, res, pversion)
connection.sendall('\x00\x00\x00\x00\x00\x00\x00\x00')
def _forward_rsp(connection, res):
def _forward_rsp(connection, res, pversion):
try:
r = pickle.dumps(res)
r = pickle.dumps(res, protocol=pversion)
except TypeError:
r = pickle.dumps(Exception(
'Cannot serialize error, check collective.manager error logs for details' + str(res)))
'Cannot serialize error, check collective.manager error logs for details' + str(res)), protocol=pversion)
rlen = len(r)
if not rlen:
return
@ -957,12 +968,20 @@ def dispatch_request(nodes, manager, element, configmanager, inputdata,
if not util.cert_matches(a['fingerprint'], remote.getpeercert(
binary_form=True)):
raise Exception("Invalid certificate on peer")
tlvdata.recv(remote)
banner = tlvdata.recv(remote)
vers = banner.split()[2]
if vers == b'v0':
pvers = 2
elif vers == b'v1':
pvers = 4
if sys.version_info[0] < 3:
pvers = 2
tlvdata.recv(remote)
myname = collective.get_myname()
dreq = pickle.dumps({'name': myname, 'nodes': list(nodes),
'path': element,'tenant': configmanager.tenant,
'operation': operation, 'inputdata': inputdata})
'operation': operation, 'inputdata': inputdata},
protocol=pvers)
tlvdata.send(remote, {'dispatch': {'name': myname, 'length': len(dreq)}})
remote.sendall(dreq)
while True:
@ -1009,7 +1028,10 @@ def dispatch_request(nodes, manager, element, configmanager, inputdata,
a['name']))
return
rsp += nrsp
rsp = pickle.loads(rsp)
try:
rsp = pickle.loads(rsp, **pargs)
except UnicodeDecodeError:
rsp = pickle.loads(rsp, encoding='latin1')
if isinstance(rsp, Exception):
raise rsp
yield rsp

View File

@ -705,7 +705,8 @@ def detected(info):
def b64tohex(b64str):
bd = base64.b64decode(b64str)
return ''.join(['{0:02x}'.format(ord(x)) for x in bd])
bd = bytearray(bd)
return ''.join(['{0:02x}'.format(x) for x in bd])
def get_enclosure_chain_head(nodename, cfg):
@ -1209,6 +1210,7 @@ def rescan():
def start_detection():
global attribwatcher
global rechecker
global rechecktime
_map_unique_ids()
cfg = cfm.ConfigManager(None)
allnodes = cfg.list_nodes()

View File

@ -12,9 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import confluent.discovery.handlers.bmc as bmchandler
import pyghmi.exceptions as pygexc
import pyghmi.ipmi.private.util as pygutil
import confluent.util as util
import struct
class NodeHandler(bmchandler.NodeHandler):
@ -37,8 +39,9 @@ class NodeHandler(bmchandler.NodeHandler):
if wronguuid:
# we need to fix the first three portions of the uuid
uuidprefix = wronguuid.split('-')[:3]
uuidprefix = struct.pack(
'<IHH', *[int(x, 16) for x in uuidprefix]).encode('hex')
uuidprefix = codecs.encode(struct.pack(
'<IHH', *[int(x, 16) for x in uuidprefix]), 'hex')
uuidprefix = util.stringify(uuidprefix)
uuidprefix = uuidprefix[:8] + '-' + uuidprefix[8:12] + '-' + \
uuidprefix[12:16]
self.info['uuid'] = uuidprefix + '-' + '-'.join(

View File

@ -12,14 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import confluent.discovery.handlers.bmc as bmchandler
import confluent.exceptions as exc
import eventlet
webclient = eventlet.import_patched('pyghmi.util.webclient')
import struct
import urllib
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
import eventlet.support.greendns
import confluent.netutil as netutil
import confluent.util as util
getaddrinfo = eventlet.support.greendns.getaddrinfo
from xml.etree.ElementTree import fromstring
@ -27,8 +32,9 @@ from xml.etree.ElementTree import fromstring
def fixuuid(baduuid):
# SMM dumps it out in hex
uuidprefix = (baduuid[:8], baduuid[8:12], baduuid[12:16])
a = struct.pack('<IHH', *[int(x, 16) for x in uuidprefix]).encode(
a = codecs.encode(struct.pack('<IHH', *[int(x, 16) for x in uuidprefix]),
'hex')
a = util.stringify(a)
uuid = (a[:8], a[8:12], a[12:16], baduuid[16:20], baduuid[20:])
return '-'.join(uuid).lower()
@ -110,16 +116,16 @@ class NodeHandler(bmchandler.NodeHandler):
'password': 'PASSW0RD',
}
headers = {'Connection': 'keep-alive', 'Content-Type': 'application/x-www-form-urlencoded'}
wc.request('POST', '/data/login', urllib.urlencode(authdata), headers)
wc.request('POST', '/data/login', urlencode(authdata), headers)
rsp = wc.getresponse()
rspdata = rsp.read()
rspdata = util.stringify(rsp.read())
if 'authResult>0' not in rspdata:
# default credentials are refused, try with the actual
authdata['user'] = username
authdata['password'] = password
wc.request('POST', '/data/login', urllib.urlencode(authdata), headers)
wc.request('POST', '/data/login', urlencode(authdata), headers)
rsp = wc.getresponse()
rspdata = rsp.read()
rspdata = util.stringify(rsp.read())
if 'renew_account' in rspdata:
raise Exception('Configured password has expired')
if 'authResult>0' not in rspdata:
@ -133,11 +139,11 @@ class NodeHandler(bmchandler.NodeHandler):
tokens = fromstring(rspdata)
st2 = tokens.findall('st2')[0].text
wc.set_header('ST2', st2)
wc.request('POST', '/data/changepwd', urllib.urlencode(passwdchange))
wc.request('POST', '/data/changepwd', urlencode(passwdchange))
rsp = wc.getresponse()
rspdata = rsp.read()
authdata['password'] = password
wc.request('POST', '/data/login', urllib.urlencode(authdata), headers)
wc.request('POST', '/data/login', urlencode(authdata), headers)
rsp = wc.getresponse()
rspdata = rsp.read()
if 'authResult>0' in rspdata:
@ -153,7 +159,7 @@ class NodeHandler(bmchandler.NodeHandler):
rsp = wc.getresponse()
rspdata = rsp.read()
authdata['user'] = username
wc.request('POST', '/data/login', urllib.urlencode(authdata, headers))
wc.request('POST', '/data/login', urlencode(authdata, headers))
rsp = wc.getresponse()
rspdata = rsp.read()
tokens = fromstring(rspdata)

View File

@ -18,12 +18,16 @@ import confluent.netutil as netutil
import confluent.util as util
import eventlet.support.greendns
import json
import urllib
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
getaddrinfo = eventlet.support.greendns.getaddrinfo
webclient = eventlet.import_patched('pyghmi.util.webclient')
class NodeHandler(generic.NodeHandler):
devname = 'TSM'
DEFAULT_USER = 'USERID'
DEFAULT_PASS = 'PASSW0RD'
@ -52,8 +56,9 @@ class NodeHandler(generic.NodeHandler):
}
if not self.trieddefault:
wc = webclient.SecureHTTPConnection(self.ipaddr, 443, verifycallback=self.validate_cert)
rsp, status = wc.grab_json_response_with_status('/api/session', urllib.urlencode(authdata))
rsp, status = wc.grab_json_response_with_status('/api/session', urlencode(authdata))
if status > 400:
rsp = util.stringify(rsp)
self.trieddefault = True
if '555' in rsp:
passchange = {
@ -63,9 +68,9 @@ class NodeHandler(generic.NodeHandler):
'default_password': self.DEFAULT_PASS,
'username': self.DEFAULT_USER
}
rsp, status = wc.grab_json_response_with_status('/api/reset-pass', urllib.urlencode(passchange))
rsp, status = wc.grab_json_response_with_status('/api/reset-pass', urlencode(passchange))
authdata['password'] = self.targpass
rsp, status = wc.grab_json_response_with_status('/api/session', urllib.urlencode(authdata))
rsp, status = wc.grab_json_response_with_status('/api/session', urlencode(authdata))
self.csrftok = rsp['CSRFToken']
self.channel = rsp['channel']
self.curruser = self.DEFAULT_USER
@ -80,7 +85,7 @@ class NodeHandler(generic.NodeHandler):
if self.curruser:
authdata['username'] = self.curruser
authdata['password'] = self.currpass
rsp, status = wc.grab_json_response_with_status('/api/session', urllib.urlencode(authdata))
rsp, status = wc.grab_json_response_with_status('/api/session', urlencode(authdata))
if rsp.status != 200:
return None
self.csrftok = rsp['CSRFToken']
@ -88,7 +93,7 @@ class NodeHandler(generic.NodeHandler):
return wc
authdata['username'] = self.targuser
authdata['password'] = self.targpass
rsp, status = wc.grab_json_response_with_status('/api/session', urllib.urlencode(authdata))
rsp, status = wc.grab_json_response_with_status('/api/session', urlencode(authdata))
if status != 200:
return None
self.curruser = self.targuser
@ -107,6 +112,8 @@ class NodeHandler(generic.NodeHandler):
cd = creds.get(nodename, {})
user, passwd, _ = self.get_node_credentials(
nodename, creds, self.DEFAULT_USER, self.DEFAULT_PASS)
user = util.stringify(user)
passwd = util.stringify(passwd)
self.targuser = user
self.targpass = passwd
wc = self._get_wc()
@ -149,6 +156,9 @@ class NodeHandler(generic.NodeHandler):
raise exc.NotImplementedException('IPv6 remote config TODO')
currnet = wc.grab_json_response('/api/settings/network')
for net in currnet:
if net['channel_number'] == self.channel and net['lan_enable'] == 0:
# ignore false indication and switch to 8 (dedicated)
self.channel = 8
if net['channel_number'] == self.channel:
# we have found the interface to potentially manipulate
if net['ipv4_address'] != newip:
@ -162,6 +172,12 @@ class NodeHandler(generic.NodeHandler):
rsp, status = wc.grab_json_response_with_status(
'/api/settings/network/{0}'.format(net['id']), net, method='PUT')
break
elif self.ipaddr.startswith('fe80::'):
self.configmanager.set_node_attributes(
{nodename: {'hardwaremanagement.manager': self.ipaddr}})
else:
raise exc.TargetEndpointUnreachable(
'hardwaremanagement.manager must be set to desired address (No IPv6 Link Local detected)')
rsp, status = wc.grab_json_response_with_status('/api/session', method='DELETE')

View File

@ -13,6 +13,7 @@
# limitations under the License.
import base64
import codecs
import confluent.discovery.handlers.imm as immhandler
import confluent.netutil as netutil
import confluent.util as util
@ -31,7 +32,8 @@ getaddrinfo = eventlet.support.greendns.getaddrinfo
def fixup_uuid(uuidprop):
baduuid = ''.join(uuidprop.split())
uuidprefix = (baduuid[:8], baduuid[8:12], baduuid[12:16])
a = struct.pack('<IHH', *[int(x, 16) for x in uuidprefix]).encode('hex')
a = codecs.encode(struct.pack('<IHH', *[int(x, 16) for x in uuidprefix]), 'hex')
a = util.stringify(a)
uuid = (a[:8], a[8:12], a[12:16], baduuid[16:20], baduuid[20:])
return '-'.join(uuid).upper()
@ -125,8 +127,8 @@ class NodeHandler(immhandler.NodeHandler):
raise
return (None, None)
pwdchanged = False
adata = json.dumps({'username': username,
'password': password
adata = json.dumps({'username': util.stringify(username),
'password': util.stringify(password)
})
headers = {'Connection': 'keep-alive',
'Content-Type': 'application/json'}
@ -289,6 +291,8 @@ class NodeHandler(immhandler.NodeHandler):
userinfo = wc.grab_json_response('/api/dataset/imm_users')
curruser = None
uid = None
user = util.stringify(user)
passwd = util.stringify(passwd)
for userent in userinfo['items'][0]['users']:
if userent['users_user_name'] == user:
curruser = userent

View File

@ -34,8 +34,8 @@ pxearchs = {
def decode_uuid(rawguid):
lebytes = struct.unpack_from('<IHH', buffer(rawguid[:8]))
bebytes = struct.unpack_from('>HHI', buffer(rawguid[8:]))
lebytes = struct.unpack_from('<IHH', rawguid[:8])
bebytes = struct.unpack_from('>HHI', rawguid[8:])
return '{0:08X}-{1:04X}-{2:04X}-{3:04X}-{4:04X}{5:08X}'.format(
lebytes[0], lebytes[1], lebytes[2], bebytes[0], bebytes[1], bebytes[2]).lower()
@ -98,7 +98,7 @@ def snoop(handler, protocol=None):
netaddr = ':'.join(['{0:02x}'.format(x) for x in netaddr])
optidx = 0
try:
optidx = rq.index('\x63\x82\x53\x63') + 4
optidx = rq.index(b'\x63\x82\x53\x63') + 4
except ValueError:
continue
uuid, arch = find_info_in_options(rq, optidx)

View File

@ -49,7 +49,6 @@ except AttributeError:
IPPROTO_IPV6 = 41 # Assume Windows value if socket is missing it
def _parse_slp_header(packet):
packet = bytearray(packet)
if len(packet) < 16 or packet[0] != 2:
@ -247,24 +246,23 @@ def _grab_rsps(socks, rsps, interval, xidmap):
def _parse_attrlist(attrstr):
attribs = {}
previousattrlen = None
attrstr = util.stringify(attrstr)
while attrstr:
if len(attrstr) == previousattrlen:
raise Exception('Looping in attrstr parsing')
previousattrlen = len(attrstr)
if attrstr[0] == '(':
if ')' not in attrstr:
attribs['INCOMPLETE'] = True
return attribs
currattr = attrstr[1:attrstr.index(')')]
if '=' not in currattr: # Not allegedly kosher, but still..
currattr = currattr.decode('utf-8')
attribs[currattr] = None
else:
attrname, attrval = currattr.split('=', 1)
attrname = attrname.decode('utf-8')
attribs[attrname] = []
for val in attrval.split(','):
try:
val = val.decode('utf-8')
except UnicodeDecodeError:
val = '*DECODEERROR*'
if val[:3] == '\\FF': # we should make this bytes
finalval = bytearray([])
for bnum in attrval[3:].split('\\'):
@ -274,9 +272,9 @@ def _parse_attrlist(attrstr):
val = finalval
if 'uuid' in attrname and len(val) == 16:
lebytes = struct.unpack_from(
'<IHH', buffer(val[:8]))
'<IHH', memoryview(val[:8]))
bebytes = struct.unpack_from(
'>HHI', buffer(val[8:]))
'>HHI', memoryview(val[8:]))
val = '{0:08X}-{1:04X}-{2:04X}-{3:04X}-' \
'{4:04X}{5:08X}'.format(
lebytes[0], lebytes[1], lebytes[2], bebytes[0],
@ -284,7 +282,7 @@ def _parse_attrlist(attrstr):
).lower()
attribs[attrname].append(val)
attrstr = attrstr[attrstr.index(')'):]
elif attrstr[0] == ',':
elif attrstr[0] == ','[0]:
attrstr = attrstr[1:]
elif ',' in attrstr:
currattr = attrstr[:attrstr.index(',')]

View File

@ -1,7 +1,7 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 IBM Corporation
# Copyright 2015-2016 Lenovo
# Copyright 2015-2019 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -17,7 +17,10 @@
# This SCGI server provides a http wrap to confluent api
# It additionally manages httprequest console sessions
import base64
import Cookie
try:
import Cookie
except ModuleNotFoundError:
import http.cookies as Cookie
import confluent.auth as auth
import confluent.config.attributes as attribs
import confluent.consoleserver as consoleserver
@ -39,7 +42,10 @@ import socket
import sys
import traceback
import time
import urlparse
try:
import urlparse
except ModuleNotFoundError:
import urllib.parse as urlparse
import eventlet.wsgi
#scgi = eventlet.import_patched('flup.server.scgi')
tlvdata = confluent.tlvdata
@ -74,7 +80,7 @@ def group_creation_resources():
yield confluent.messages.ListAttributes(kv={'nodes': []},
desc='Nodes to add to the group'
).html() + '<br>\n'
for attr in sorted(attribs.node.iterkeys()):
for attr in sorted(attribs.node):
if attr == 'groups':
continue
if attr.startswith("secret."):
@ -95,7 +101,7 @@ def group_creation_resources():
def node_creation_resources():
yield confluent.messages.Attributes(
kv={'name': None}, desc="Name of the node").html() + '<br>'
for attr in sorted(attribs.node.iterkeys()):
for attr in sorted(attribs.node):
if attr.startswith("secret."):
yield confluent.messages.CryptedAttributes(
kv={attr: None},
@ -126,7 +132,7 @@ def user_creation_resources():
'description': (''),
},
}
for attr in sorted(credential.iterkeys()):
for attr in sorted(credential):
if attr == "password":
yield confluent.messages.CryptedAttributes(
kv={attr: None},
@ -176,7 +182,7 @@ def _get_query_dict(env, reqbody, reqtype):
if reqbody is not None:
if "application/x-www-form-urlencoded" in reqtype:
pbody = urlparse.parse_qs(reqbody, True)
for ky in pbody.iterkeys():
for ky in pbody:
if len(pbody[ky]) > 1: # e.g. REST explorer
na = [i for i in pbody[ky] if i != '']
qdict[ky] = na
@ -184,7 +190,7 @@ def _get_query_dict(env, reqbody, reqtype):
qdict[ky] = pbody[ky][0]
elif 'application/json' in reqtype:
pbody = json.loads(reqbody)
for key in pbody.iterkeys():
for key in pbody:
qdict[key] = pbody[key]
if 'restexplorerhonorkey' in qdict:
nqdict = {}
@ -305,7 +311,7 @@ def _authorize_request(env, operation):
return {'code': 401}
return ('logout',)
name, passphrase = base64.b64decode(
env['HTTP_AUTHORIZATION'].replace('Basic ', '')).split(':', 1)
env['HTTP_AUTHORIZATION'].replace('Basic ', '')).split(b':', 1)
authdata = auth.check_user_passphrase(name, passphrase, operation=operation, element=element)
if authdata is False:
return {'code': 403}
@ -319,14 +325,14 @@ def _authorize_request(env, operation):
'inflight': set([])}
if 'HTTP_CONFLUENTAUTHTOKEN' in env:
httpsessions[sessid]['csrftoken'] = util.randomstring(32)
cookie['confluentsessionid'] = sessid
cookie['confluentsessionid'] = util.stringify(sessid)
cookie['confluentsessionid']['secure'] = 1
cookie['confluentsessionid']['httponly'] = 1
cookie['confluentsessionid']['path'] = '/'
skiplog = _should_skip_authlog(env)
if authdata:
auditmsg = {
'user': name,
'user': util.stringify(name),
'operation': operation,
'target': env['PATH_INFO'],
}
@ -338,7 +344,7 @@ def _authorize_request(env, operation):
if authdata[3] is not None:
auditmsg['tenant'] = authdata[3]
authinfo['tenant'] = authdata[3]
auditmsg['user'] = authdata[2]
auditmsg['user'] = util.stringify(authdata[2])
if sessid is not None:
authinfo['sessionid'] = sessid
if not skiplog:
@ -446,6 +452,8 @@ def resourcehandler_backend(env, start_response):
httpsessions[authorized['sessionid']]['inflight'])):
pagecontent += rsp
start_response("200 OK", headers)
if not isinstance(pagecontent, bytes):
pagecontent = pagecontent.encode('utf-8')
yield pagecontent
return
except exc.ConfluentException as e:
@ -491,7 +499,7 @@ def resourcehandler_backend(env, start_response):
auditmsg = {
'operation': 'start',
'target': env['PATH_INFO'],
'user': authorized['username'],
'user': util.stringify(authorized['username']),
}
if 'tenant' in authorized:
auditmsg['tenant'] = authorized['tenant']
@ -626,6 +634,7 @@ def resourcehandler_backend(env, start_response):
sessinfo = {'username': authorized['username']}
if 'authtoken' in authorized:
sessinfo['authtoken'] = authorized['authtoken']
tlvdata.unicode_dictvalues(sessinfo)
yield json.dumps(sessinfo)
return
resource = '.' + url[url.rindex('/'):]
@ -647,6 +656,8 @@ def resourcehandler_backend(env, start_response):
for datum in _assemble_json(hdlr, resource, url, extension):
pagecontent += datum
start_response('200 OK', headers)
if not isinstance(pagecontent, bytes):
pagecontent = pagecontent.encode('utf-8')
yield pagecontent
except exc.ConfluentException as e:
if ((not isinstance(e, exc.LockedCredentials)) and
@ -731,7 +742,7 @@ def _assemble_json(responses, resource=None, url=None, extension=None):
for rsp in responses:
if isinstance(rsp, confluent.messages.LinkRelation):
haldata = rsp.raw()
for hk in haldata.iterkeys():
for hk in haldata:
if 'href' in haldata[hk]:
if isinstance(haldata[hk]['href'], int):
haldata[hk]['href'] = str(haldata[hk]['href'])
@ -747,7 +758,7 @@ def _assemble_json(responses, resource=None, url=None, extension=None):
links[hk] = haldata[hk]
else:
rsp = rsp.raw()
for dk in rsp.iterkeys():
for dk in rsp:
if dk in rspdata:
if isinstance(rspdata[dk], list):
if isinstance(rsp[dk], list):
@ -766,8 +777,8 @@ def _assemble_json(responses, resource=None, url=None, extension=None):
rspdata[dk] = rsp[dk]
rspdata["_links"] = links
tlvdata.unicode_dictvalues(rspdata)
yield json.dumps(
rspdata, sort_keys=True, indent=4, ensure_ascii=False).encode('utf-8')
yield util.stringify(json.dumps(
rspdata, sort_keys=True, indent=4, ensure_ascii=False).encode('utf-8'))
def serve(bind_host, bind_port):

View File

@ -76,6 +76,10 @@ import stat
import struct
import time
import traceback
try:
unicode
except NameError:
unicode = str
daemonized = False
logfull = False
@ -176,6 +180,8 @@ class BaseRotatingHandler(object):
self.textfile = open(self.textpath, mode='ab')
if self.binfile is None:
self.binfile = open(self.binpath, mode='ab')
if not isinstance(textrecord, bytes):
textrecord = textrecord.encode('utf-8')
self.textfile.write(textrecord)
self.binfile.write(binrecord)
self.textfile.flush()
@ -594,7 +600,11 @@ class Logger(object):
if ltype == 2:
textrecord = data
else:
textrecord = textdate + data + ']'
if not isinstance(textdate, bytes):
textdate = textdate.encode('utf-8')
if not isinstance(data, bytes):
data = data.encode('utf-8')
textrecord = textdate + data + b']'
else:
textrecord = textdate + data
if not textrecord.endswith('\n'):
@ -742,7 +752,7 @@ class Logger(object):
pass
def log(self, logdata=None, ltype=None, event=0, eventdata=None):
if type(logdata) not in (str, unicode, dict):
if type(logdata) not in (bytes, unicode, dict):
raise Exception("Unsupported logdata")
if ltype is None:
if type(logdata) == dict:
@ -792,4 +802,4 @@ def logtrace():
if tracelog is None:
tracelog = Logger('trace', buffered=False)
tracelog.log(traceback.format_exc(), ltype=DataTypes.event,
event=Events.stacktrace)
event=Events.stacktrace)

View File

@ -43,9 +43,11 @@ except ImportError:
import confluent.discovery.core as disco
import eventlet
dbgif = False
if map(int, (eventlet.__version__.split('.'))) > [0, 18]:
try:
import eventlet.backdoor as backdoor
dbgif = True
except Exception:
pass
havefcntl = True
try:
import fcntl

View File

@ -25,6 +25,11 @@ from copy import deepcopy
from datetime import datetime
import json
try:
unicode
except NameError:
unicode = str
valid_health_values = set([
'ok',
'warning',
@ -54,7 +59,7 @@ def _htmlify_structure(indict):
if isinstance(indict, dict):
for key in sorted(indict):
ret += "<li>{0}: ".format(key)
if type(indict[key]) in (str, unicode, float, int):
if type(indict[key]) in (bytes, unicode, float, int):
ret += str(indict[key])
elif isinstance(indict[key], datetime):
ret += indict[key].strftime('%Y-%m-%dT%H:%M:%S')
@ -62,7 +67,7 @@ def _htmlify_structure(indict):
ret += _htmlify_structure(indict[key])
elif isinstance(indict, list):
if len(indict) > 0:
if type(indict[0]) in (str, unicode, None):
if type(indict[0]) in (bytes, unicode, None):
nd = []
for datum in indict:
if datum is None:
@ -121,14 +126,14 @@ class ConfluentMessage(object):
return self._generic_html_value(self.kvpairs)
if not self.stripped:
htmlout = ''
for node in self.kvpairs.iterkeys():
for node in self.kvpairs:
htmlout += '{0}:{1}\n'.format(
node, self._generic_html_value(self.kvpairs[node]))
return htmlout
def _generic_html_value(self, pairs):
snippet = ""
for key in pairs.iterkeys():
for key in pairs:
val = pairs[key]
value = self.defaultvalue
if isinstance(val, dict) and 'type' in val:
@ -156,7 +161,7 @@ class ConfluentMessage(object):
'<input type="checkbox" name="restexplorerhonorkey" '
'value="{1}">\r').format(valtype, key, self.desc)
return snippet
if (isinstance(val, bool) or isinstance(val, str) or
if (isinstance(val, bool) or isinstance(val, bytes) or
isinstance(val, unicode)):
value = str(val)
elif val is not None and 'value' in val:
@ -321,14 +326,14 @@ class ConfluentChoiceMessage(ConfluentMessage):
return self._create_option(self.kvpairs)
else:
htmlout = ''
for node in self.kvpairs.iterkeys():
for node in self.kvpairs:
htmlout += '{0}:{1}\n'.format(
node, self._create_option(self.kvpairs[node]))
return htmlout
def _create_option(self, pairdata):
snippet = ''
for key in pairdata.iterkeys():
for key in pairdata:
val = pairdata[key]
snippet += key + ':<select name="%s">' % key
valid_values = self.valid_values
@ -587,7 +592,7 @@ class InputConfigChangeSet(InputExpression):
endattrs = {}
for attr in attrs:
origval = attrs[attr]
if isinstance(origval, str) or isinstance(origval, unicode):
if isinstance(origval, bytes) or isinstance(origval, unicode):
origval = {'expression': origval}
if 'expression' not in origval:
endattrs[attr] = attrs[attr]
@ -614,7 +619,7 @@ class InputAttributes(ConfluentMessage):
if nodes is None:
self.attribs = inputdata
for attrib in self.attribs:
if type(self.attribs[attrib]) in (str, unicode):
if type(self.attribs[attrib]) in (bytes, unicode):
try:
# ok, try to use format against the string
# store back result to the attribute to
@ -640,7 +645,7 @@ class InputAttributes(ConfluentMessage):
return {}
nodeattr = deepcopy(self.nodeattribs[node])
for attr in nodeattr:
if type(nodeattr[attr]) in (str, unicode):
if type(nodeattr[attr]) in (bytes, unicode):
try:
# as above, use format() to see if string follows
# expression, store value back in case of escapes
@ -743,7 +748,7 @@ class InputCredential(ConfluentMessage):
if len(path) == 4:
inputdata['uid'] = path[-1]
# if the operation is 'create' check if all fields are present
if (isinstance(inputdata['uid'], str) and
if (type(inputdata['uid']) in (bytes, unicode) and
not inputdata['uid'].isdigit()):
inputdata['uid'] = inputdata['uid']
else:
@ -769,7 +774,7 @@ class InputCredential(ConfluentMessage):
return {}
credential = deepcopy(self.credentials[node])
for attr in credential:
if type(credential[attr]) in (str, unicode):
if type(credential[attr]) in (bytes, unicode):
try:
# as above, use format() to see if string follows
# expression, store value back in case of escapes
@ -1359,7 +1364,7 @@ class AlertDestination(ConfluentMessage):
class InputAlertDestination(ConfluentMessage):
valid_alert_params = {
'acknowledge': lambda x: False if type(x) in (unicode,str) and x.lower() == 'false' else bool(x),
'acknowledge': lambda x: False if type(x) in (unicode, bytes) and x.lower() == 'false' else bool(x),
'acknowledge_timeout': lambda x: int(x) if x and x.isdigit() else None,
'ip': lambda x: x,
'retries': lambda x: int(x)
@ -1573,7 +1578,7 @@ class Attributes(ConfluentMessage):
nkv = {}
self.notnode = name is None
for key in kv:
if type(kv[key]) in (str, unicode):
if type(kv[key]) in (bytes, unicode):
nkv[key] = {'value': kv[key]}
else:
nkv[key] = kv[key]
@ -1684,7 +1689,7 @@ class CryptedAttributes(Attributes):
# for now, just keep the dictionary keys and discard crypt value
self.desc = desc
nkv = {}
for key in kv.iterkeys():
for key in kv:
nkv[key] = {'isset': False}
try:
if kv[key] is not None and kv[key]['cryptvalue'] != '':

View File

@ -18,6 +18,7 @@
# Ultimately, this should use AF_NETLINK, but in the interest of time,
# use ip neigh for the moment
import confluent.util as util
import eventlet.green.subprocess as subprocess
import os
@ -39,6 +40,7 @@ def update_neigh():
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(neighdata, err) = ipn.communicate()
neighdata = util.stringify(neighdata)
for entry in neighdata.split('\n'):
entry = entry.split(' ')
if len(entry) < 5 or not entry[4]:

View File

@ -25,6 +25,11 @@ import itertools
import pyparsing as pp
import re
try:
range = xrange
except NameError:
pass
# construct custom grammar with pyparsing
_nodeword = pp.Word(pp.alphanums + '~^$/=-_:.*+!')
_nodebracket = pp.QuotedString(quoteChar='[', endQuoteChar=']',
@ -157,7 +162,7 @@ class NodeRange(object):
pieces = seqrange.split(delimiter)
if len(pieces) % 2 != 0:
return self.failorreturn(seqrange)
halflen = len(pieces) / 2
halflen = len(pieces) // 2
left = delimiter.join(pieces[:halflen])
right = delimiter.join(pieces[halflen:])
leftbits = _numextractor.parseString(left).asList()
@ -166,7 +171,7 @@ class NodeRange(object):
return self.failorreturn(seqrange)
finalfmt = ''
iterators = []
for idx in xrange(len(leftbits)):
for idx in range(len(leftbits)):
if leftbits[idx] == rightbits[idx]:
finalfmt += leftbits[idx]
elif leftbits[idx][0] in pp.alphas:
@ -181,7 +186,7 @@ class NodeRange(object):
if leftnum > rightnum:
width = len(rightbits[idx])
minnum = rightnum
maxnum = leftnum + 1 # xrange goes to n-1...
maxnum = leftnum + 1 # range goes to n-1...
elif rightnum > leftnum:
width = len(leftbits[idx])
minnum = leftnum
@ -189,7 +194,7 @@ class NodeRange(object):
else: # differently padded, but same number...
return self.failorreturn(seqrange)
numformat = '{0:0%d}' % width
for num in xrange(minnum, maxnum):
for num in range(minnum, maxnum):
curseq.append(numformat.format(num))
results = set([])
for combo in itertools.product(*iterators):
@ -222,7 +227,7 @@ class NodeRange(object):
if self.cfm is None:
raise Exception('Verification configmanager required')
return set(self.cfm.filter_node_attributes(element, filternodes))
for idx in xrange(len(element)):
for idx in range(len(element)):
if element[idx][0] == '[':
nodes = set([])
for numeric in NodeRange(element[idx][1:-1]).nodes:

View File

@ -0,0 +1,235 @@
# Pulled from:
# https://raw.githubusercontent.com/FirefighterBlu3/python-pam/fe44b334970f421635d9e373b563c9e6566613bd/pam.py
# and https://github.com/FirefighterBlu3/python-pam/pull/16/files
# (c) 2007 Chris AtLee <chris@atlee.ca>
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license.php
#
# Original author: Chris AtLee
#
# Modified by David Ford, 2011-12-6
# added py3 support and encoding
# added pam_end
# added pam_setcred to reset credentials after seeing Leon Walker's remarks
# added byref as well
# use readline to prestuff the getuser input
'''
PAM module for python
Provides an authenticate function that will allow the caller to authenticate
a user against the Pluggable Authentication Modules (PAM) on the system.
Implemented using ctypes, so no compilation is necessary.
'''
__all__ = ['pam']
__version__ = '1.8.4'
__author__ = 'David Ford <david@blue-labs.org>'
__released__ = '2018 June 15'
import sys
from ctypes import CDLL, POINTER, Structure, CFUNCTYPE, cast, byref, sizeof
from ctypes import c_void_p, c_size_t, c_char_p, c_char, c_int
from ctypes import memmove
from ctypes.util import find_library
class PamHandle(Structure):
"""wrapper class for pam_handle_t pointer"""
_fields_ = [ ("handle", c_void_p) ]
def __init__(self):
Structure.__init__(self)
self.handle = 0
class PamMessage(Structure):
"""wrapper class for pam_message structure"""
_fields_ = [ ("msg_style", c_int), ("msg", c_char_p) ]
def __repr__(self):
return "<PamMessage %i '%s'>" % (self.msg_style, self.msg)
class PamResponse(Structure):
"""wrapper class for pam_response structure"""
_fields_ = [ ("resp", c_char_p), ("resp_retcode", c_int) ]
def __repr__(self):
return "<PamResponse %i '%s'>" % (self.resp_retcode, self.resp)
conv_func = CFUNCTYPE(c_int, c_int, POINTER(POINTER(PamMessage)), POINTER(POINTER(PamResponse)), c_void_p)
class PamConv(Structure):
"""wrapper class for pam_conv structure"""
_fields_ = [ ("conv", conv_func), ("appdata_ptr", c_void_p) ]
# Various constants
PAM_PROMPT_ECHO_OFF = 1
PAM_PROMPT_ECHO_ON = 2
PAM_ERROR_MSG = 3
PAM_TEXT_INFO = 4
PAM_REINITIALIZE_CRED = 8
libc = CDLL(find_library("c"))
libpam = CDLL(find_library("pam"))
calloc = libc.calloc
calloc.restype = c_void_p
calloc.argtypes = [c_size_t, c_size_t]
# bug #6 (@NIPE-SYSTEMS), some libpam versions don't include this function
if hasattr(libpam, 'pam_end'):
pam_end = libpam.pam_end
pam_end.restype = c_int
pam_end.argtypes = [PamHandle, c_int]
pam_start = libpam.pam_start
pam_start.restype = c_int
pam_start.argtypes = [c_char_p, c_char_p, POINTER(PamConv), POINTER(PamHandle)]
pam_acct_mgmt = libpam.pam_acct_mgmt
pam_acct_mgmt.restype = c_int
pam_acct_mgmt.argtypes = [PamHandle, c_int]
pam_setcred = libpam.pam_setcred
pam_setcred.restype = c_int
pam_setcred.argtypes = [PamHandle, c_int]
pam_strerror = libpam.pam_strerror
pam_strerror.restype = c_char_p
pam_strerror.argtypes = [PamHandle, c_int]
pam_authenticate = libpam.pam_authenticate
pam_authenticate.restype = c_int
pam_authenticate.argtypes = [PamHandle, c_int]
class pam():
code = 0
reason = None
def __init__(self):
pass
def authenticate(self, username, password, service='login', encoding='utf-8', resetcreds=True):
"""username and password authentication for the given service.
Returns True for success, or False for failure.
self.code (integer) and self.reason (string) are always stored and may
be referenced for the reason why authentication failed. 0/'Success' will
be stored for success.
Python3 expects bytes() for ctypes inputs. This function will make
necessary conversions using the supplied encoding.
Inputs:
username: username to authenticate
password: password in plain text
service: PAM service to authenticate against, defaults to 'login'
Returns:
success: True
failure: False
"""
@conv_func
def my_conv(n_messages, messages, p_response, app_data):
"""Simple conversation function that responds to any
prompt where the echo is off with the supplied password"""
# Create an array of n_messages response objects
addr = calloc(n_messages, sizeof(PamResponse))
response = cast(addr, POINTER(PamResponse))
p_response[0] = response
for i in range(n_messages):
if messages[i].contents.msg_style == PAM_PROMPT_ECHO_OFF:
dst = calloc(len(password)+1, sizeof(c_char))
memmove(dst, cpassword, len(password))
response[i].resp = dst
response[i].resp_retcode = 0
return 0
# python3 ctypes prefers bytes
if sys.version_info >= (3,):
if isinstance(username, str): username = username.encode(encoding)
if isinstance(password, str): password = password.encode(encoding)
if isinstance(service, str): service = service.encode(encoding)
else:
if isinstance(username, unicode):
username = username.encode(encoding)
if isinstance(password, unicode):
password = password.encode(encoding)
if isinstance(service, unicode):
service = service.encode(encoding)
if b'\x00' in username or b'\x00' in password or b'\x00' in service:
self.code = 4 # PAM_SYSTEM_ERR in Linux-PAM
self.reason = 'strings may not contain NUL'
return False
# do this up front so we can safely throw an exception if there's
# anything wrong with it
cpassword = c_char_p(password)
handle = PamHandle()
conv = PamConv(my_conv, 0)
retval = pam_start(service, username, byref(conv), byref(handle))
if retval != 0:
# This is not an authentication error, something has gone wrong starting up PAM
self.code = retval
self.reason = "pam_start() failed"
return False
retval = pam_authenticate(handle, 0)
auth_success = retval == 0
if auth_success:
retval = pam_acct_mgmt(handle, 0)
auth_success = retval == 0
if auth_success and resetcreds:
retval = pam_setcred(handle, PAM_REINITIALIZE_CRED)
# store information to inform the caller why we failed
self.code = retval
self.reason = pam_strerror(handle, retval)
if sys.version_info >= (3,):
self.reason = self.reason.decode(encoding)
if hasattr(libpam, 'pam_end'):
pam_end(handle, retval)
return auth_success
def authenticate(*vargs, **dargs):
"""
Compatibility function for older versions of python-pam.
"""
return pam().authenticate(*vargs, **dargs)
if __name__ == "__main__":
import readline, getpass
def input_with_prefill(prompt, text):
def hook():
readline.insert_text(text)
readline.redisplay()
readline.set_pre_input_hook(hook)
if sys.version_info >= (3,):
result = input(prompt)
else:
result = raw_input(prompt)
readline.set_pre_input_hook()
return result
pam = pam()
username = input_with_prefill('Username: ', getpass.getuser())
# enter a valid username and an invalid/valid password, to verify both failure and success
pam.authenticate(username, getpass.getpass())
print('{} {}'.format(pam.code, pam.reason))

View File

@ -134,7 +134,7 @@ def retrieve_nodes(nodes, element, configmanager, inputdata):
attribute, {}).get('description', ''))
elif element[-1] == 'current':
for node in util.natural_sort(list(attributes)):
for attribute in sorted(attributes[node].iterkeys()):
for attribute in sorted(attributes[node]):
currattr = attributes[node][attribute]
try:
desc = allattributes.node[attribute]['description']
@ -164,6 +164,20 @@ def update(nodes, element, configmanager, inputdata):
def update_nodegroup(group, element, configmanager, inputdata):
if element == 'check':
check = inputdata.attribs
decrypt = configmanager.decrypt
configmanager.decrypt = True
currinfo = configmanager.get_nodegroup_attributes(group, list(check))
configmanager.decrypt = decrypt
for inf in check:
checkvalue = check[inf]
if isinstance(checkvalue, dict):
checkvalue = checkvalue.get('value', None)
currvalue = currinfo.get(inf, {}).get('value')
if checkvalue == currvalue:
raise exc.InvalidArgumentException('Checked value matches existing value')
return retrieve_nodegroup(group, element, configmanager, inputdata)
if 'rename' in element:
namemap = {}
namemap[group] = inputdata.attribs['rename']
@ -171,7 +185,7 @@ def update_nodegroup(group, element, configmanager, inputdata):
return yield_rename_resources(namemap, isnode=False)
try:
clearattribs = []
for attrib in inputdata.attribs.iterkeys():
for attrib in inputdata.attribs:
if inputdata.attribs[attrib] is None:
clearattribs.append(attrib)
for attrib in clearattribs:
@ -221,6 +235,18 @@ def update_nodes(nodes, element, configmanager, inputdata):
raise exc.InvalidArgumentException(
'No action to take, noderange is empty (if trying to define '
'group attributes, use nodegroupattrib)')
if element[-1] == 'check':
for node in nodes:
check = inputdata.get_attributes(node, allattributes.node)
currinfo = configmanager.get_node_attributes(node, list(check), decrypt=True)
for inf in check:
checkvalue = check[inf]
if isinstance(checkvalue, dict):
checkvalue = checkvalue.get('value', None)
currvalue = currinfo.get(node, {}).get(inf, {}).get('value')
if checkvalue == currvalue:
raise exc.InvalidArgumentException('Checked value matches existing value')
return retrieve(nodes, element, configmanager, inputdata)
if 'rename' in element:
namemap = {}
for node in nodes:

View File

@ -383,7 +383,7 @@ def perform_requests(operator, nodes, element, cfg, inputdata, realop):
raise datum
if (hasattr(datum, 'kvpairs') and datum.kvpairs and
len(datum.kvpairs) == 1):
bundle.append((datum.kvpairs.keys()[0], datum))
bundle.append((list(datum.kvpairs)[0], datum))
numnodes -= 1
else:
yield datum
@ -491,8 +491,8 @@ class IpmiHandler(object):
# raise exc.TargetEndpointUnreachable(
# "Login process to " + connparams['bmc'] + " died")
except socket.gaierror as ge:
if ge[0] == -2:
raise exc.TargetEndpointUnreachable(ge[1])
if ge.errno == -2:
raise exc.TargetEndpointUnreachable(ge.strerror)
raise
self.ipmicmd = persistent_ipmicmds[(node, tenant)]

View File

@ -274,7 +274,7 @@ def perform_requests(operator, nodes, element, cfg, inputdata, realop):
raise datum
if (hasattr(datum, 'kvpairs') and datum.kvpairs and
len(datum.kvpairs) == 1):
bundle.append((datum.kvpairs.keys()[0], datum))
bundle.append((list(datum.kvpairs)[0], datum))
numnodes -= 1
else:
yield datum
@ -376,8 +376,8 @@ class IpmiHandler(object):
self.loggedin = True
self.ipmicmd = persistent_ipmicmds[(node, tenant)]
except socket.gaierror as ge:
if ge[0] == -2:
raise exc.TargetEndpointUnreachable(ge[1])
if ge.errno == -2:
raise exc.TargetEndpointUnreachable(ge.strerror)
raise
self.ipmicmd = persistent_ipmicmds[(node, tenant)]

View File

@ -77,7 +77,7 @@ class HostKeyHandler(paramiko.client.MissingHostKeyPolicy):
class SshShell(conapi.Console):
def __init__(self, node, config, username='', password=''):
def __init__(self, node, config, username=b'', password=b''):
self.node = node
self.ssh = None
self.datacallback = None
@ -127,15 +127,15 @@ class SshShell(conapi.Console):
look_for_keys=False)
except paramiko.AuthenticationException:
self.inputmode = 0
self.username = ''
self.password = ''
self.username = b''
self.password = b''
self.datacallback('\r\nlogin as: ')
return
except paramiko.ssh_exception.NoValidConnectionsError as e:
self.datacallback(str(e))
self.inputmode = 0
self.username = ''
self.password = ''
self.username = b''
self.password = b''
self.datacallback('\r\nlogin as: ')
return
except cexc.PubkeyInvalid as pi:
@ -205,9 +205,9 @@ class SshShell(conapi.Console):
delidx = data.index(b'\x7f')
data = data[:delidx - 1] + data[delidx + 1:]
self.username += data
if '\r' in self.username:
self.username, self.password = self.username.split('\r')[:2]
lastdata = data.split('\r')[0]
if b'\r' in self.username:
self.username, self.password = self.username.split(b'\r')[:2]
lastdata = data.split(b'\r')[0]
if lastdata != '':
self.datacallback(lastdata)
self.datacallback('\r\nEnter password: ')
@ -223,9 +223,9 @@ class SshShell(conapi.Console):
delidx = data.index(b'\x7f')
data = data[:delidx - 1] + data[delidx + 1:]
self.password += data
if '\r' in self.password:
self.password = self.password.split('\r')[0]
self.datacallback('\r\n')
if b'\r' in self.password:
self.password = self.password.split(b'\r')[0]
self.datacallback(b'\r\n')
self.logon()
else:
self.shell.sendall(data)

View File

@ -89,7 +89,7 @@ class ExecConsole(conapi.Console):
stdin=slave, stdout=slave,
stderr=subprocess.PIPE, close_fds=True)
except OSError:
print "Unable to execute " + self.executable + " (permissions?)"
print("Unable to execute " + self.executable + " (permissions?)")
self.close()
return
os.close(slave)
@ -104,7 +104,7 @@ class ExecConsole(conapi.Console):
try:
os.close(self._master)
except OSError:
print "Error closing master of child process, ignoring"
print("Error closing master of child process, ignoring")
if self.subproc is None or self.subproc.poll() is not None:
return
self.subproc.terminate()

View File

@ -1,7 +1,7 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 IBM Corporation
# Copyright 2015-2018 Lenovo
# Copyright 2015-2019 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -123,7 +123,8 @@ def sessionhdl(connection, authname, skipauth=False, cert=None):
if authdata:
cfm = authdata[1]
authenticated = True
send_data(connection, "Confluent -- v0 --")
# version 0 == original, version 1 == pickle3 allowed
send_data(connection, "Confluent -- v{0} --".format(sys.version_info[0] - 2))
while not authenticated: # prompt for name and passphrase
send_data(connection, {'authpassed': 0})
response = tlvdata.recv(connection)
@ -221,6 +222,7 @@ def process_request(connection, request, cfm, authdata, authname, skipauth):
auditmsg['tenant'] = authdata[3]
auditmsg['allowed'] = True
if _should_authlog(path, operation):
tlvdata.unicode_dictvalues(auditmsg)
auditlog.log(auditmsg)
try:
if operation == 'start':
@ -412,7 +414,7 @@ def _unixdomainhandler():
except OSError: # if file does not exist, no big deal
pass
if not os.path.isdir("/var/run/confluent"):
os.makedirs('/var/run/confluent', 0755)
os.makedirs('/var/run/confluent', 0o755)
unixsocket.bind("/var/run/confluent/api.sock")
os.chmod("/var/run/confluent/api.sock",
stat.S_IWOTH | stat.S_IROTH | stat.S_IWGRP |

View File

@ -1,5 +1,6 @@
from ctypes import *
from ctypes.util import find_library
import confluent.util as util
import grp
import pwd
import os
@ -18,6 +19,8 @@ def getgrouplist(name, gid, ng=32):
_getgrouplist.argtypes = [c_char_p, c_uint, POINTER(c_uint * ng), POINTER(c_int)]
glist = (c_uint * ng)()
nglist = c_int(ng)
if not isinstance(name, bytes):
name = name.encode('utf-8')
count = _getgrouplist(name, gid, byref(glist), byref(nglist))
if count < 0:
raise TooSmallException(nglist.value)
@ -27,6 +30,7 @@ def getgrouplist(name, gid, ng=32):
def grouplist(username):
username = util.stringify(username)
pent = pwd.getpwnam(username)
try:
groups = getgrouplist(pent.pw_name, pent.pw_gid)

View File

@ -27,6 +27,14 @@ import socket
import ssl
import struct
def stringify(instr):
# Normalize unicode and bytes to 'str', correcting for
# current python version
if isinstance(instr, bytes) and not isinstance(instr, str):
return instr.decode('utf-8', errors='replace')
elif not isinstance(instr, bytes) and not isinstance(instr, str):
return instr.encode('utf-8')
return instr
def list_interface_indexes():
# Getting the interface indexes in a portable manner
@ -67,11 +75,11 @@ def randomstring(length=20):
:param length: The number of characters to produce, defaults to 20
"""
chunksize = length / 4
chunksize = length // 4
if length % 4 > 0:
chunksize += 1
strval = base64.urlsafe_b64encode(os.urandom(chunksize * 3))
return strval[0:length-1]
return stringify(strval[0:length-1])
def securerandomnumber(low=0, high=4294967295):
@ -101,7 +109,7 @@ def monotonic_time():
def get_certificate_from_file(certfile):
cert = open(certfile, 'rb').read()
cert = open(certfile, 'r').read()
inpemcert = False
prunedcert = ''
for line in cert.split('\n'):

View File

@ -12,7 +12,11 @@ Group: Development/Libraries
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot
Prefix: %{_prefix}
BuildArch: noarch
%if "%{dist}" == ".el8"
Requires: python3-pyghmi >= 1.0.34, python3-eventlet, python3-greenlet, python3-pycryptodomex >= 3.4.7, confluent_client, python3-pyparsing, python3-paramiko, python3-dns, python3-netifaces, python3-pyasn1 >= 0.2.3, python3-pysnmp >= 4.3.4, python3-pyte, python3-lxml, python3-eficompressor, python3-setuptools, python3-dateutil, python3-enum34, python3-asn1crypto, python3-cffi, python3-pyOpenSSL, python3-monotonic
%else
Requires: python-pyghmi >= 1.0.34, python-eventlet, python-greenlet, python-pycryptodomex >= 3.4.7, confluent_client, python-pyparsing, python-paramiko, python-dns, python-netifaces, python2-pyasn1 >= 0.2.3, python-pysnmp >= 4.3.4, python-pyte, python-lxml, python-eficompressor, python-setuptools, python-dateutil
%endif
Vendor: Jarrod Johnson <jjohnson2@lenovo.com>
Url: http://xcat.sf.net/
@ -23,10 +27,18 @@ Server for console management and systems management aggregation
%setup -n %{name}-%{version} -n %{name}-%{version}
%build
python setup.py build
%if "%{dist}" == ".el8"
python3 setup.py build
%else
python2 setup.py build
%endif
%install
python setup.py install --single-version-externally-managed -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES.bare --install-purelib=/opt/confluent/lib/python --install-scripts=/opt/confluent/bin
%if "%{dist}" == ".el8"
python3 setup.py install --single-version-externally-managed -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES.bare --install-purelib=/opt/confluent/lib/python --install-scripts=/opt/confluent/bin
%else
python2 setup.py install --single-version-externally-managed -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES.bare --install-purelib=/opt/confluent/lib/python --install-scripts=/opt/confluent/bin
%endif
for file in $(grep confluent/__init__.py INSTALLED_FILES.bare); do
rm $RPM_BUILD_ROOT/$file
done

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 IBM Corporation

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2017 Lenovo

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
# Copyright 2017 Lenovo
#

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
import pyghmi.util.webclient as webclient
import json
import os

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
import pyghmi.util.webclient as webclient
import json
import os

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python2
import pyghmi.util.webclient as webclient
from xml.etree.ElementTree import fromstring
import os

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python2
import collections
import os
import struct