2
0
mirror of https://github.com/xcat2/confluent.git synced 2024-11-25 11:01:09 +00:00

Create auth multiprocessing pool on demand

Most of the time, we don't need this pool.  Create when needed,
and clean up after 30 seconds of inactivity.  This avoids a slow
shutdown that was due to core python hanging in help_finish_stuff,
and as a bonus means most of the time, one only sees one confluent
process, which has been a source of questions already.
This commit is contained in:
Jarrod Johnson 2017-03-05 08:28:35 -05:00
parent eb18796d94
commit 919dab9b55
2 changed files with 16 additions and 12 deletions

View File

@ -37,6 +37,7 @@ _passcache = {}
_passchecking = {}
authworkers = None
authcleaner = None
class Credentials(object):
@ -195,6 +196,13 @@ def check_user_passphrase(name, passphrase, element=None, tenant=False):
#such a beast could be passed into pyghmi as a way for pyghmi to
#magically get offload of the crypto functions without having
#to explicitly get into the eventlet tpool game
global authworkers
global authcleaner
if authworkers is None:
authworkers = multiprocessing.Pool(processes=1)
else:
authcleaner.cancel()
authcleaner = eventlet.spawn_after(30, _clean_authworkers)
crypted = eventlet.tpool.execute(_do_pbkdf, passphrase, salt)
del _passchecking[(user, tenant)]
eventlet.sleep(0.05) # either way, we want to stall so that client can't
@ -211,19 +219,16 @@ def _apply_pbkdf(passphrase, salt):
lambda p, s: hmac.new(p, s, hashlib.sha256).digest())
def _clean_authworkers():
global authworkers
global authcleaner
authworkers = None
authcleaner = None
def _do_pbkdf(passphrase, salt):
# we must get it over to the authworkers pool or else get blocked in
# compute. However, we do want to wait for result, so we have
# one of the exceedingly rare sort of circumstances where 'apply'
# actually makes sense
return authworkers.apply(_apply_pbkdf, [passphrase, salt])
def init_auth():
# have a some auth workers available. Keep them distinct from
# the general populace of workers to avoid unauthorized users
# starving out productive work
global authworkers
# for now we'll just have one auth worker and see if there is any
# demand for more. I personally doubt it.
authworkers = multiprocessing.Pool(processes=1)
return authworkers.apply(_apply_pbkdf, [passphrase, salt])

View File

@ -222,7 +222,6 @@ def run():
_daemonize()
if havefcntl:
_updatepidfile()
auth.init_auth()
signal.signal(signal.SIGINT, terminate)
signal.signal(signal.SIGTERM, terminate)
if dbgif: