2014-04-07 16:43:39 -04:00
|
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
|
|
|
|
|
|
|
# Copyright 2014 IBM Corporation
|
2015-01-19 14:45:24 -05:00
|
|
|
# Copyright 2015 Lenovo
|
2014-04-07 16:43:39 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2013-08-09 16:59:08 -04:00
|
|
|
# This SCGI server provides a http wrap to confluent api
|
2014-05-28 13:48:23 -04:00
|
|
|
# It additionally manages httprequest console sessions
|
2013-08-09 16:59:08 -04:00
|
|
|
import base64
|
2013-10-09 16:17:37 -04:00
|
|
|
import Cookie
|
2013-09-04 15:40:35 -04:00
|
|
|
import confluent.auth as auth
|
2014-01-28 11:18:00 -05:00
|
|
|
import confluent.config.attributes as attribs
|
2013-11-02 13:25:56 -04:00
|
|
|
import confluent.consoleserver as consoleserver
|
2013-11-02 17:32:48 -04:00
|
|
|
import confluent.exceptions as exc
|
2014-04-18 10:36:51 -04:00
|
|
|
import confluent.log as log
|
2013-11-03 17:07:17 -05:00
|
|
|
import confluent.messages
|
2014-05-06 10:48:29 -04:00
|
|
|
import confluent.core as pluginapi
|
2015-03-20 15:39:51 -04:00
|
|
|
import confluent.tlvdata as tlvdata
|
2013-08-09 16:59:08 -04:00
|
|
|
import confluent.util as util
|
2014-04-04 08:52:40 -04:00
|
|
|
import copy
|
2013-08-09 16:59:08 -04:00
|
|
|
import eventlet
|
2013-09-13 16:07:39 -04:00
|
|
|
import json
|
2014-05-28 13:48:23 -04:00
|
|
|
import socket
|
2013-11-03 10:12:50 -05:00
|
|
|
import traceback
|
2013-10-09 16:17:37 -04:00
|
|
|
import time
|
2013-08-16 16:37:19 -04:00
|
|
|
import urlparse
|
2013-10-03 17:05:40 -04:00
|
|
|
import eventlet.wsgi
|
|
|
|
#scgi = eventlet.import_patched('flup.server.scgi')
|
2013-08-09 16:59:08 -04:00
|
|
|
|
|
|
|
|
2014-04-18 10:36:51 -04:00
|
|
|
auditlog = None
|
|
|
|
tracelog = None
|
2013-08-09 16:59:08 -04:00
|
|
|
consolesessions = {}
|
2013-10-09 16:17:37 -04:00
|
|
|
httpsessions = {}
|
2013-11-02 18:57:51 -04:00
|
|
|
opmap = {
|
|
|
|
'POST': 'create',
|
|
|
|
'GET': 'retrieve',
|
|
|
|
'PUT': 'update',
|
|
|
|
'DELETE': 'delete',
|
|
|
|
}
|
2013-08-09 16:59:08 -04:00
|
|
|
|
2015-01-21 16:39:00 -05:00
|
|
|
|
2015-01-21 16:20:55 -05:00
|
|
|
class RobustCookie(Cookie.SimpleCookie):
|
|
|
|
# this is very bad form, but BaseCookie has a terrible flaw
|
2015-01-21 16:39:00 -05:00
|
|
|
def _BaseCookie__set(self, K, rval, cval):
|
2015-01-21 16:20:55 -05:00
|
|
|
try:
|
|
|
|
super(RobustCookie, self)._BaseCookie__set(K, rval, cval)
|
|
|
|
except Cookie.CookieError:
|
|
|
|
# empty value if SimpleCookie rejects
|
|
|
|
dict.__setitem__(self, K, Cookie.Morsel())
|
2014-02-06 14:21:11 -05:00
|
|
|
|
2015-01-22 13:56:16 -05:00
|
|
|
|
2014-02-14 16:43:00 -05:00
|
|
|
def group_creation_resources():
|
|
|
|
yield confluent.messages.Attributes(
|
|
|
|
kv={'name': None}, desc="Name of the group").html() + '<br>'
|
|
|
|
yield confluent.messages.ListAttributes(kv={'nodes': []},
|
2014-04-18 17:22:08 -04:00
|
|
|
desc='Nodes to add to the group'
|
|
|
|
).html() + '<br>\n'
|
2014-02-14 16:43:00 -05:00
|
|
|
for attr in sorted(attribs.node.iterkeys()):
|
|
|
|
if attr == 'groups':
|
|
|
|
continue
|
|
|
|
if attr.startswith("secret."):
|
|
|
|
yield confluent.messages.CryptedAttributes(
|
|
|
|
kv={attr: None},
|
|
|
|
desc=attribs.node[attr]['description']).html() + '<br>\n'
|
2014-04-18 17:22:08 -04:00
|
|
|
elif ('type' in attribs.node[attr] and
|
|
|
|
list == attribs.node[attr]['type']):
|
2014-02-14 16:43:00 -05:00
|
|
|
yield confluent.messages.ListAttributes(
|
|
|
|
kv={attr: []},
|
|
|
|
desc=attribs.node[attr]['description']).html() + '<br>\n'
|
|
|
|
else:
|
|
|
|
yield confluent.messages.Attributes(
|
|
|
|
kv={attr: None},
|
|
|
|
desc=attribs.node[attr]['description']).html() + '<br>\n'
|
|
|
|
|
|
|
|
|
2014-01-28 11:18:00 -05:00
|
|
|
def node_creation_resources():
|
|
|
|
yield confluent.messages.Attributes(
|
2014-02-06 14:21:11 -05:00
|
|
|
kv={'name': None}, desc="Name of the node").html() + '<br>'
|
2014-01-28 11:18:00 -05:00
|
|
|
for attr in sorted(attribs.node.iterkeys()):
|
|
|
|
if attr.startswith("secret."):
|
|
|
|
yield confluent.messages.CryptedAttributes(
|
2014-02-06 14:21:11 -05:00
|
|
|
kv={attr: None},
|
2014-02-13 15:55:26 -05:00
|
|
|
desc=attribs.node[attr]['description']).html() + '<br>\n'
|
2014-04-18 17:22:08 -04:00
|
|
|
elif ('type' in attribs.node[attr] and
|
|
|
|
list == attribs.node[attr]['type']):
|
2014-02-13 15:55:26 -05:00
|
|
|
yield confluent.messages.ListAttributes(
|
|
|
|
kv={attr: []},
|
|
|
|
desc=attribs.node[attr]['description']).html() + '<br>\n'
|
2014-01-28 11:18:00 -05:00
|
|
|
else:
|
|
|
|
yield confluent.messages.Attributes(
|
2014-02-06 14:21:11 -05:00
|
|
|
kv={attr: None},
|
2014-02-13 15:55:26 -05:00
|
|
|
desc=attribs.node[attr]['description']).html() + '<br>\n'
|
2014-01-28 11:18:00 -05:00
|
|
|
|
2014-04-18 15:52:29 -04:00
|
|
|
|
2014-01-28 11:18:00 -05:00
|
|
|
create_resource_functions = {
|
2014-03-28 14:59:37 -04:00
|
|
|
'/nodes/': node_creation_resources,
|
|
|
|
'/groups/': group_creation_resources,
|
2014-01-28 11:18:00 -05:00
|
|
|
}
|
2013-08-09 16:59:08 -04:00
|
|
|
|
2014-02-06 14:21:11 -05:00
|
|
|
|
2013-10-09 16:17:37 -04:00
|
|
|
def _sessioncleaner():
|
2014-04-21 10:48:18 -04:00
|
|
|
while True:
|
2013-10-09 16:17:37 -04:00
|
|
|
currtime = time.time()
|
|
|
|
for session in httpsessions.keys():
|
|
|
|
if httpsessions[session]['expiry'] < currtime:
|
|
|
|
del httpsessions[session]
|
|
|
|
for session in consolesessions.keys():
|
|
|
|
if consolesessions[session]['expiry'] < currtime:
|
|
|
|
del consolesessions[session]
|
|
|
|
eventlet.sleep(10)
|
|
|
|
|
|
|
|
|
|
|
|
def _get_query_dict(env, reqbody, reqtype):
|
2013-08-09 16:59:08 -04:00
|
|
|
qdict = {}
|
2013-10-09 16:17:37 -04:00
|
|
|
try:
|
|
|
|
qstring = env['QUERY_STRING']
|
|
|
|
except KeyError:
|
|
|
|
qstring = None
|
2013-09-12 16:54:39 -04:00
|
|
|
if qstring:
|
|
|
|
for qpair in qstring.split('&'):
|
|
|
|
qkey, qvalue = qpair.split('=')
|
|
|
|
qdict[qkey] = qvalue
|
2013-08-16 16:37:19 -04:00
|
|
|
if reqbody is not None:
|
2013-09-12 16:54:39 -04:00
|
|
|
if "application/x-www-form-urlencoded" in reqtype:
|
2013-11-03 08:44:28 -05:00
|
|
|
pbody = urlparse.parse_qs(reqbody, True)
|
2013-09-13 11:45:17 -04:00
|
|
|
for ky in pbody.iterkeys():
|
2013-11-03 01:11:19 -04:00
|
|
|
if len(pbody[ky]) > 1: # e.g. REST explorer
|
2013-11-13 14:52:32 -05:00
|
|
|
na = [i for i in pbody[ky] if i != '']
|
|
|
|
qdict[ky] = na
|
2013-11-03 01:11:19 -04:00
|
|
|
else:
|
|
|
|
qdict[ky] = pbody[ky][0]
|
2014-11-07 11:03:44 -05:00
|
|
|
elif 'application/json' in reqtype:
|
2014-02-22 21:58:21 -05:00
|
|
|
pbody = json.loads(reqbody)
|
|
|
|
for key in pbody.iterkeys():
|
2014-04-18 15:52:29 -04:00
|
|
|
qdict[key] = pbody[key]
|
2013-11-03 08:44:28 -05:00
|
|
|
if 'restexplorerhonorkey' in qdict:
|
|
|
|
nqdict = {}
|
|
|
|
for key in qdict:
|
|
|
|
if key == 'restexplorerop':
|
|
|
|
nqdict[key] = qdict['restexplorerop']
|
|
|
|
continue
|
|
|
|
if key in qdict['restexplorerhonorkey']:
|
|
|
|
nqdict[key] = qdict[key]
|
|
|
|
qdict = nqdict
|
2013-08-09 16:59:08 -04:00
|
|
|
return qdict
|
|
|
|
|
|
|
|
|
2014-04-18 10:36:51 -04:00
|
|
|
def _authorize_request(env, operation):
|
2013-08-09 16:59:08 -04:00
|
|
|
"""Grant/Deny access based on data from wsgi env
|
|
|
|
|
|
|
|
"""
|
2014-04-21 10:48:18 -04:00
|
|
|
authdata = None
|
2014-04-18 15:52:29 -04:00
|
|
|
name = ''
|
2013-10-09 16:17:37 -04:00
|
|
|
cookie = Cookie.SimpleCookie()
|
|
|
|
if 'HTTP_COOKIE' in env:
|
|
|
|
#attempt to use the cookie. If it matches
|
2015-01-21 16:20:55 -05:00
|
|
|
cc = RobustCookie()
|
2013-10-09 16:17:37 -04:00
|
|
|
cc.load(env['HTTP_COOKIE'])
|
|
|
|
if 'confluentsessionid' in cc:
|
|
|
|
sessionid = cc['confluentsessionid'].value
|
|
|
|
if sessionid in httpsessions:
|
|
|
|
httpsessions[sessionid]['expiry'] = time.time() + 90
|
|
|
|
name = httpsessions[sessionid]['name']
|
2014-07-14 15:03:34 -04:00
|
|
|
authdata = auth.authorize(
|
2014-07-25 14:32:04 -04:00
|
|
|
name, element=None,
|
2014-07-14 15:03:34 -04:00
|
|
|
skipuserobj=httpsessions[sessionid]['skipuserobject'])
|
2014-04-21 10:48:18 -04:00
|
|
|
if (not authdata) and 'HTTP_AUTHORIZATION' in env:
|
2013-10-09 16:17:37 -04:00
|
|
|
name, passphrase = base64.b64decode(
|
2014-02-06 14:21:11 -05:00
|
|
|
env['HTTP_AUTHORIZATION'].replace('Basic ', '')).split(':', 1)
|
2013-10-09 16:17:37 -04:00
|
|
|
authdata = auth.check_user_passphrase(name, passphrase, element=None)
|
2014-07-25 15:40:26 -04:00
|
|
|
if not authdata:
|
|
|
|
return {'code': 401}
|
2013-10-09 16:17:37 -04:00
|
|
|
sessid = util.randomstring(32)
|
|
|
|
while sessid in httpsessions:
|
|
|
|
sessid = util.randomstring(32)
|
2014-07-14 15:03:34 -04:00
|
|
|
httpsessions[sessid] = {'name': name, 'expiry': time.time() + 90,
|
|
|
|
'skipuserobject': authdata[4]}
|
2014-02-06 14:21:11 -05:00
|
|
|
cookie['confluentsessionid'] = sessid
|
2013-10-09 16:17:37 -04:00
|
|
|
cookie['confluentsessionid']['secure'] = 1
|
|
|
|
cookie['confluentsessionid']['httponly'] = 1
|
|
|
|
cookie['confluentsessionid']['path'] = '/'
|
2014-04-18 10:36:51 -04:00
|
|
|
skiplog = False
|
|
|
|
if '/console/session' in env['PATH_INFO']:
|
|
|
|
skiplog = True
|
2013-10-09 16:17:37 -04:00
|
|
|
if authdata:
|
2014-04-18 14:34:16 -04:00
|
|
|
auditmsg = {
|
|
|
|
'user': name,
|
|
|
|
'operation': operation,
|
|
|
|
'target': env['PATH_INFO'],
|
|
|
|
}
|
2014-04-18 10:36:51 -04:00
|
|
|
authinfo = {'code': 200,
|
2014-04-18 15:52:29 -04:00
|
|
|
'cookie': cookie,
|
|
|
|
'cfgmgr': authdata[1],
|
|
|
|
'username': authdata[2],
|
|
|
|
'userdata': authdata[0]}
|
2014-04-18 10:36:51 -04:00
|
|
|
if authdata[3] is not None:
|
|
|
|
auditmsg['tenant'] = authdata[3]
|
|
|
|
authinfo['tenant'] = authdata[3]
|
|
|
|
auditmsg['user'] = authdata[2]
|
|
|
|
if not skiplog:
|
|
|
|
auditlog.log(auditmsg)
|
|
|
|
return authinfo
|
2013-10-09 16:17:37 -04:00
|
|
|
else:
|
|
|
|
return {'code': 401}
|
2014-04-18 15:52:29 -04:00
|
|
|
# TODO(jbjohnso): actually evaluate the request for authorization
|
|
|
|
# In theory, the x509 or http auth stuff will get translated and then
|
|
|
|
# passed on to the core authorization function in an appropriate form
|
|
|
|
# expresses return in the form of http code
|
|
|
|
# 401 if there is no known identity
|
|
|
|
# 403 if valid identity, but no access
|
|
|
|
# going to run 200 just to get going for now
|
2013-08-09 16:59:08 -04:00
|
|
|
|
|
|
|
|
|
|
|
def _pick_mimetype(env):
|
|
|
|
"""Detect the http indicated mime to send back.
|
|
|
|
|
|
|
|
Note that as it gets into the ACCEPT header honoring, it only looks for
|
|
|
|
application/json and else gives up and assumes html. This is because
|
2013-08-18 19:11:49 -04:00
|
|
|
browsers are very chaotic about ACCEPT HEADER. It is assumed that
|
2013-08-09 16:59:08 -04:00
|
|
|
XMLHttpRequest.setRequestHeader will be used by clever javascript
|
|
|
|
if the '.json' scheme doesn't cut it.
|
|
|
|
"""
|
|
|
|
if env['PATH_INFO'].endswith('.json'):
|
2014-04-30 15:54:22 -04:00
|
|
|
return 'application/json; charset=utf-8', '.json'
|
2013-08-09 16:59:08 -04:00
|
|
|
elif env['PATH_INFO'].endswith('.html'):
|
2014-04-30 15:54:22 -04:00
|
|
|
return 'text/html', '.html'
|
2013-08-09 16:59:08 -04:00
|
|
|
elif 'application/json' in env['HTTP_ACCEPT']:
|
2014-04-30 15:54:22 -04:00
|
|
|
return 'application/json; charset=utf-8', ''
|
2013-08-09 16:59:08 -04:00
|
|
|
else:
|
2014-04-30 15:54:22 -04:00
|
|
|
return 'text/html', ''
|
2013-08-09 16:59:08 -04:00
|
|
|
|
|
|
|
|
|
|
|
def _assign_consessionid(consolesession):
|
2013-10-09 16:17:37 -04:00
|
|
|
sessid = util.randomstring(32)
|
2013-08-09 16:59:08 -04:00
|
|
|
while sessid in consolesessions.keys():
|
2013-10-09 16:17:37 -04:00
|
|
|
sessid = util.randomstring(32)
|
|
|
|
consolesessions[sessid] = {'session': consolesession,
|
2014-02-06 14:21:11 -05:00
|
|
|
'expiry': time.time() + 60}
|
2013-08-09 16:59:08 -04:00
|
|
|
return sessid
|
|
|
|
|
2014-02-06 14:21:11 -05:00
|
|
|
|
2013-08-09 16:59:08 -04:00
|
|
|
def resourcehandler(env, start_response):
|
2014-04-18 14:34:16 -04:00
|
|
|
try:
|
|
|
|
for rsp in resourcehandler_backend(env, start_response):
|
|
|
|
yield rsp
|
|
|
|
except:
|
|
|
|
tracelog.log(traceback.format_exc(), ltype=log.DataTypes.event,
|
|
|
|
event=log.Events.stacktrace)
|
|
|
|
start_response('500 - Internal Server Error', [])
|
|
|
|
yield '500 - Internal Server Error'
|
|
|
|
return
|
|
|
|
|
2014-04-18 15:52:29 -04:00
|
|
|
|
2014-04-18 14:34:16 -04:00
|
|
|
def resourcehandler_backend(env, start_response):
|
2013-08-09 16:59:08 -04:00
|
|
|
"""Function to handle new wsgi requests
|
|
|
|
"""
|
2014-04-30 15:54:22 -04:00
|
|
|
mimetype, extension = _pick_mimetype(env)
|
2013-08-16 16:37:19 -04:00
|
|
|
reqbody = None
|
|
|
|
reqtype = None
|
2013-09-04 15:40:35 -04:00
|
|
|
if 'CONTENT_LENGTH' in env and int(env['CONTENT_LENGTH']) > 0:
|
2013-08-16 16:37:19 -04:00
|
|
|
reqbody = env['wsgi.input'].read(int(env['CONTENT_LENGTH']))
|
|
|
|
reqtype = env['CONTENT_TYPE']
|
2014-01-28 11:18:00 -05:00
|
|
|
operation = opmap[env['REQUEST_METHOD']]
|
|
|
|
querydict = _get_query_dict(env, reqbody, reqtype)
|
|
|
|
if 'restexplorerop' in querydict:
|
|
|
|
operation = querydict['restexplorerop']
|
|
|
|
del querydict['restexplorerop']
|
2014-04-18 10:36:51 -04:00
|
|
|
authorized = _authorize_request(env, operation)
|
2013-09-04 15:40:35 -04:00
|
|
|
if authorized['code'] == 401:
|
2014-02-06 14:21:11 -05:00
|
|
|
start_response(
|
|
|
|
'401 Authentication Required',
|
2013-09-04 15:40:35 -04:00
|
|
|
[('Content-type', 'text/plain'),
|
2014-02-06 14:21:11 -05:00
|
|
|
('WWW-Authenticate', 'Basic realm="confluent"')])
|
2013-10-13 20:51:30 -04:00
|
|
|
yield 'authentication required'
|
|
|
|
return
|
2013-09-04 15:40:35 -04:00
|
|
|
if authorized['code'] == 403:
|
2014-02-06 14:21:11 -05:00
|
|
|
start_response(
|
|
|
|
'403 Forbidden',
|
2013-09-04 15:40:35 -04:00
|
|
|
[('Content-type', 'text/plain'),
|
2014-02-06 14:21:11 -05:00
|
|
|
('WWW-Authenticate', 'Basic realm="confluent"')])
|
2013-10-13 20:51:30 -04:00
|
|
|
yield 'authorization failed'
|
|
|
|
return
|
2013-09-04 15:40:35 -04:00
|
|
|
if authorized['code'] != 200:
|
|
|
|
raise Exception("Unrecognized code from auth engine")
|
2014-02-06 14:21:11 -05:00
|
|
|
headers = [('Content-Type', mimetype)]
|
|
|
|
headers.extend(
|
|
|
|
("Set-Cookie", m.OutputString())
|
|
|
|
for m in authorized['cookie'].values())
|
2013-09-06 16:15:59 -04:00
|
|
|
cfgmgr = authorized['cfgmgr']
|
2013-08-09 16:59:08 -04:00
|
|
|
if '/console/session' in env['PATH_INFO']:
|
2013-08-16 16:37:19 -04:00
|
|
|
#hard bake JSON into this path, do not support other incarnations
|
2013-08-09 16:59:08 -04:00
|
|
|
prefix, _, _ = env['PATH_INFO'].partition('/console/session')
|
|
|
|
_, _, nodename = prefix.rpartition('/')
|
|
|
|
if 'session' not in querydict.keys() or not querydict['session']:
|
2014-04-18 10:36:51 -04:00
|
|
|
auditmsg = {
|
|
|
|
'operation': 'start',
|
|
|
|
'target': env['PATH_INFO'],
|
|
|
|
'user': authorized['username'],
|
|
|
|
}
|
|
|
|
if 'tenant' in authorized:
|
|
|
|
auditmsg['tenant'] = authorized['tenant']
|
|
|
|
auditlog.log(auditmsg)
|
2013-08-09 16:59:08 -04:00
|
|
|
# Request for new session
|
2015-02-02 17:15:14 -05:00
|
|
|
skipreplay = False
|
|
|
|
if 'skipreplay' in querydict and querydict['skipreplay']:
|
|
|
|
skipreplay = True
|
2014-08-26 14:03:15 -04:00
|
|
|
try:
|
|
|
|
consession = consoleserver.ConsoleSession(
|
|
|
|
node=nodename, configmanager=cfgmgr,
|
2015-02-02 17:15:14 -05:00
|
|
|
username=authorized['username'], skipreplay=skipreplay)
|
2014-08-26 14:03:15 -04:00
|
|
|
except exc.NotFoundException:
|
|
|
|
start_response("404 Not found", headers)
|
|
|
|
yield "404 - Request Path not recognized"
|
|
|
|
return
|
2013-08-09 16:59:08 -04:00
|
|
|
if not consession:
|
2013-10-09 16:17:37 -04:00
|
|
|
start_response("500 Internal Server Error", headers)
|
2013-08-09 16:59:08 -04:00
|
|
|
return
|
|
|
|
sessid = _assign_consessionid(consession)
|
2013-10-09 16:17:37 -04:00
|
|
|
start_response('200 OK', headers)
|
2013-10-13 20:51:30 -04:00
|
|
|
yield '{"session":"%s","data":""}' % sessid
|
|
|
|
return
|
2014-02-23 21:48:01 -05:00
|
|
|
elif 'bytes' in querydict.keys(): # not keycodes...
|
2014-04-21 10:48:18 -04:00
|
|
|
myinput = querydict['bytes']
|
2014-02-23 21:48:01 -05:00
|
|
|
sessid = querydict['session']
|
|
|
|
if sessid not in consolesessions:
|
|
|
|
start_response('400 Expired Session', headers)
|
|
|
|
return
|
|
|
|
consolesessions[sessid]['expiry'] = time.time() + 90
|
2014-04-21 10:48:18 -04:00
|
|
|
consolesessions[sessid]['session'].write(myinput)
|
2014-02-23 21:48:01 -05:00
|
|
|
start_response('200 OK', headers)
|
|
|
|
yield json.dumps({'session': querydict['session']})
|
|
|
|
return # client has requests to send or receive, not both...
|
2014-02-06 14:21:11 -05:00
|
|
|
else: # no keys, but a session, means it's hooking to receive data
|
2013-09-13 11:45:17 -04:00
|
|
|
sessid = querydict['session']
|
2014-01-29 09:12:06 -05:00
|
|
|
if sessid not in consolesessions:
|
|
|
|
start_response('400 Expired Session', headers)
|
|
|
|
return
|
2013-10-09 16:17:37 -04:00
|
|
|
consolesessions[sessid]['expiry'] = time.time() + 90
|
2014-02-06 14:21:11 -05:00
|
|
|
outdata = consolesessions[sessid]['session'].get_next_output(
|
|
|
|
timeout=45)
|
2014-10-06 15:12:16 -04:00
|
|
|
bufferage = False
|
|
|
|
if 'stampsent' not in consolesessions[sessid]:
|
2014-11-04 13:51:23 -05:00
|
|
|
consolesessions[sessid]['stampsent'] = True
|
2014-10-06 15:12:16 -04:00
|
|
|
bufferage = consolesessions[sessid]['session'].get_buffer_age()
|
2014-04-03 16:55:23 -04:00
|
|
|
if isinstance(outdata, dict):
|
|
|
|
rspdata = outdata
|
|
|
|
rspdata['session'] = querydict['session']
|
|
|
|
else:
|
|
|
|
rspdata = {'session': querydict['session'],
|
|
|
|
'data': outdata}
|
2014-10-06 15:12:16 -04:00
|
|
|
if bufferage is not False:
|
|
|
|
rspdata['bufferage'] = bufferage
|
2013-09-15 11:19:04 -04:00
|
|
|
try:
|
2014-04-03 16:55:23 -04:00
|
|
|
rsp = json.dumps(rspdata)
|
2013-09-15 11:19:04 -04:00
|
|
|
except UnicodeDecodeError:
|
2014-04-18 15:52:29 -04:00
|
|
|
try:
|
|
|
|
rsp = json.dumps(rspdata, encoding='cp437')
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
rsp = json.dumps({'session': querydict['session'],
|
|
|
|
'data': 'DECODEERROR'})
|
2013-10-09 16:17:37 -04:00
|
|
|
start_response('200 OK', headers)
|
2013-10-13 20:51:30 -04:00
|
|
|
yield rsp
|
|
|
|
return
|
2013-10-12 18:04:27 -04:00
|
|
|
else:
|
2013-11-02 18:57:51 -04:00
|
|
|
# normal request
|
2013-11-03 00:02:13 -04:00
|
|
|
url = env['PATH_INFO']
|
|
|
|
url = url.replace('.json', '')
|
|
|
|
url = url.replace('.html', '')
|
|
|
|
resource = '.' + url[url.rindex('/'):]
|
2014-04-04 08:52:40 -04:00
|
|
|
lquerydict = copy.deepcopy(querydict)
|
2013-10-15 21:13:48 -04:00
|
|
|
try:
|
2013-11-03 00:02:13 -04:00
|
|
|
hdlr = pluginapi.handle_path(url, operation,
|
2013-11-02 19:45:10 -04:00
|
|
|
cfgmgr, querydict)
|
2014-05-05 19:53:57 -04:00
|
|
|
|
|
|
|
pagecontent = ""
|
|
|
|
if mimetype == 'text/html':
|
|
|
|
for datum in _assemble_html(hdlr, resource, lquerydict, url,
|
|
|
|
extension):
|
|
|
|
pagecontent += datum
|
|
|
|
else:
|
|
|
|
for datum in _assemble_json(hdlr, resource, url, extension):
|
|
|
|
pagecontent += datum
|
2014-05-10 11:59:47 -04:00
|
|
|
start_response('200 OK', headers)
|
2014-05-05 19:53:57 -04:00
|
|
|
yield pagecontent
|
2015-03-25 09:57:25 -04:00
|
|
|
except exc.NotFoundException as ne:
|
2013-10-15 21:13:48 -04:00
|
|
|
start_response('404 Not found', headers)
|
2015-03-25 09:57:25 -04:00
|
|
|
yield "404 - Request path not recognized - " + str(ne)
|
2014-05-09 16:38:55 -04:00
|
|
|
except exc.InvalidArgumentException as e:
|
|
|
|
start_response('400 Bad Request - ' + str(e), headers)
|
|
|
|
yield '400 - Bad Request - ' + str(e)
|
2015-03-26 09:24:23 -04:00
|
|
|
except exc.TargetEndpointUnreachable as tu:
|
2014-05-05 19:53:57 -04:00
|
|
|
start_response('504 Unreachable Target', headers)
|
2015-03-26 09:24:23 -04:00
|
|
|
yield '504 - Unreachable Target - ' + str(tu)
|
2014-05-05 19:53:57 -04:00
|
|
|
except exc.TargetEndpointBadCredentials:
|
|
|
|
start_response('502 Bad Credentials', headers)
|
|
|
|
yield '502 - Bad Credentials'
|
2014-11-25 13:57:31 -05:00
|
|
|
except exc.NotImplementedException:
|
|
|
|
start_response('501 Not Implemented', headers)
|
|
|
|
yield '501 Not Implemented'
|
2013-11-03 08:44:28 -05:00
|
|
|
|
2014-04-30 15:54:22 -04:00
|
|
|
def _assemble_html(responses, resource, querydict, url, extension):
|
2015-01-19 14:45:24 -05:00
|
|
|
yield '<html><head><meta charset="UTF-8"><title>' \
|
2014-01-28 11:18:00 -05:00
|
|
|
'Confluent REST Explorer: ' + url + '</title></head>' \
|
2014-04-18 17:22:08 -04:00
|
|
|
'<body><form action="' + \
|
|
|
|
resource + '" method="post">'
|
2013-11-03 09:05:50 -05:00
|
|
|
if querydict:
|
2014-01-28 11:18:00 -05:00
|
|
|
yield 'Response to input data:<br>' + \
|
|
|
|
json.dumps(querydict, separators=(',', ': '),
|
|
|
|
indent=4, sort_keys=True) + '<hr>'
|
|
|
|
yield 'Only fields that have their boxes checked will have their ' \
|
|
|
|
'respective values honored by the confluent server.<hr>' \
|
|
|
|
'<input type="hidden" name="restexplorerhonorkey" value="">' + \
|
2014-04-30 15:58:31 -04:00
|
|
|
'<a rel="self" href="{0}{1}">{0}{1}</a><br>'.format(
|
|
|
|
resource, extension)
|
2013-11-07 14:49:16 -05:00
|
|
|
if url == '/':
|
2014-01-28 11:18:00 -05:00
|
|
|
iscollection = True
|
2013-11-07 14:49:16 -05:00
|
|
|
elif resource[-1] == '/':
|
2014-01-28 11:18:00 -05:00
|
|
|
iscollection = True
|
2014-04-30 15:54:22 -04:00
|
|
|
yield '<a rel="collection" href="../{0}">../{0}</a><br>'.format(
|
|
|
|
extension)
|
2013-11-04 10:20:51 -05:00
|
|
|
else:
|
2014-01-28 11:18:00 -05:00
|
|
|
iscollection = False
|
2014-04-30 15:54:22 -04:00
|
|
|
yield '<a rel="collection" href="./{0}">./{0}</a><br>'.format(
|
|
|
|
extension)
|
2013-11-04 10:20:51 -05:00
|
|
|
pendingrsp = []
|
2013-11-03 08:44:28 -05:00
|
|
|
for rsp in responses:
|
2013-11-04 10:20:51 -05:00
|
|
|
if isinstance(rsp, confluent.messages.LinkRelation):
|
2014-04-30 15:54:22 -04:00
|
|
|
yield rsp.html(extension) + "<br>"
|
2013-11-04 10:20:51 -05:00
|
|
|
else:
|
|
|
|
pendingrsp.append(rsp)
|
|
|
|
for rsp in pendingrsp:
|
2014-02-06 14:21:11 -05:00
|
|
|
yield rsp.html() + "<br>"
|
2014-01-28 11:18:00 -05:00
|
|
|
if iscollection:
|
2014-02-06 14:21:11 -05:00
|
|
|
# localpath = url[:-2] (why was this here??)
|
2014-01-28 11:18:00 -05:00
|
|
|
try:
|
|
|
|
firstpass = True
|
|
|
|
for y in create_resource_functions[url]():
|
|
|
|
if firstpass:
|
2014-04-18 15:52:29 -04:00
|
|
|
yield "<hr>Define new resource in %s:<BR>" % \
|
|
|
|
url.split("/")[-2]
|
2014-01-28 11:18:00 -05:00
|
|
|
firstpass = False
|
|
|
|
yield y
|
2014-02-06 14:21:11 -05:00
|
|
|
yield ('<input value="create" name="restexplorerop" type="submit">'
|
|
|
|
'</form></body></html>')
|
2014-01-28 11:18:00 -05:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
else:
|
2014-02-06 14:21:11 -05:00
|
|
|
yield ('<input value="update" name="restexplorerop" type="submit">'
|
|
|
|
'</form></body></html>')
|
2013-11-03 08:44:28 -05:00
|
|
|
|
|
|
|
|
2014-04-30 15:54:22 -04:00
|
|
|
def _assemble_json(responses, resource, url, extension):
|
2013-11-04 09:53:16 -05:00
|
|
|
#NOTE(jbjohnso) I'm considering giving up on yielding bit by bit
|
|
|
|
#in json case over http. Notably, duplicate key values from plugin
|
|
|
|
#overwrite, but we'd want to preserve them into an array instead.
|
|
|
|
#the downside is that http would just always blurt it ll out at
|
|
|
|
#once and hold on to all the data in memory
|
2013-11-03 17:07:17 -05:00
|
|
|
links = {
|
2014-04-30 15:54:22 -04:00
|
|
|
'self': {"href": resource + extension},
|
2013-11-03 17:07:17 -05:00
|
|
|
}
|
2013-11-07 14:49:16 -05:00
|
|
|
if url == '/':
|
|
|
|
pass
|
|
|
|
elif resource[-1] == '/':
|
2014-04-30 15:54:22 -04:00
|
|
|
links['collection'] = {"href": "../" + extension}
|
2013-11-04 10:20:51 -05:00
|
|
|
else:
|
2014-04-30 15:54:22 -04:00
|
|
|
links['collection'] = {"href": "./" + extension}
|
2014-01-28 11:18:00 -05:00
|
|
|
rspdata = {}
|
2013-11-03 08:44:28 -05:00
|
|
|
for rsp in responses:
|
2013-11-03 17:07:17 -05:00
|
|
|
if isinstance(rsp, confluent.messages.LinkRelation):
|
2014-02-10 10:00:10 -05:00
|
|
|
haldata = rsp.raw()
|
2013-11-03 17:07:17 -05:00
|
|
|
for hk in haldata.iterkeys():
|
2014-04-30 15:54:22 -04:00
|
|
|
if 'href' in haldata[hk]:
|
|
|
|
haldata[hk]['href'] += extension
|
2013-11-03 17:07:17 -05:00
|
|
|
if hk in links:
|
2014-01-28 11:18:00 -05:00
|
|
|
if isinstance(links[hk], list):
|
|
|
|
links[hk].append(haldata[hk])
|
|
|
|
else:
|
2014-02-06 14:21:11 -05:00
|
|
|
links[hk] = [links[hk], haldata[hk]]
|
2013-11-03 17:07:17 -05:00
|
|
|
else:
|
2014-01-28 11:18:00 -05:00
|
|
|
links[hk] = haldata[hk]
|
2013-11-03 17:07:17 -05:00
|
|
|
else:
|
2014-02-10 19:36:18 -05:00
|
|
|
rsp = rsp.raw()
|
2014-01-28 11:18:00 -05:00
|
|
|
for dk in rsp.iterkeys():
|
|
|
|
if dk in rspdata:
|
|
|
|
if isinstance(rspdata[dk], list):
|
|
|
|
rspdata[dk].append(rsp[dk])
|
|
|
|
else:
|
2014-02-06 14:21:11 -05:00
|
|
|
rspdata[dk] = [rspdata[dk], rsp[dk]]
|
2013-11-03 17:07:17 -05:00
|
|
|
else:
|
2014-01-28 11:18:00 -05:00
|
|
|
rspdata[dk] = rsp[dk]
|
|
|
|
rspdata["_links"] = links
|
2015-03-20 15:39:51 -04:00
|
|
|
tlvdata.unicode_dictvalues(rspdata)
|
|
|
|
yield json.dumps(
|
|
|
|
rspdata, sort_keys=True, indent=4, ensure_ascii=False).encode('utf-8')
|
2013-08-09 16:59:08 -04:00
|
|
|
|
2013-09-14 20:21:58 -04:00
|
|
|
|
|
|
|
def serve():
|
|
|
|
# TODO(jbjohnso): move to unix socket and explore
|
|
|
|
# either making apache deal with it
|
|
|
|
# or just supporting nginx or lighthttpd
|
|
|
|
# for now, http port access
|
2013-10-03 17:05:40 -04:00
|
|
|
#scgi.WSGIServer(resourcehandler, bindAddress=("localhost",4004)).run()
|
|
|
|
#based on a bakeoff perf wise, eventlet http support proxied actually did
|
|
|
|
#edge out patched flup. unpatched flup was about the same as eventlet http
|
|
|
|
#but deps are simpler without flup
|
|
|
|
#also, the potential for direct http can be handy
|
|
|
|
#todo remains unix domain socket for even http
|
2014-05-28 13:48:23 -04:00
|
|
|
eventlet.wsgi.server(
|
|
|
|
eventlet.listen(('::', 4005, 0, 0), family=socket.AF_INET6),
|
|
|
|
resourcehandler, log=False, log_output=False, debug=False)
|
2013-09-14 20:21:58 -04:00
|
|
|
|
|
|
|
|
2013-08-09 16:59:08 -04:00
|
|
|
class HttpApi(object):
|
2014-04-21 10:48:18 -04:00
|
|
|
def __init__(self):
|
|
|
|
self.server = None
|
|
|
|
|
2013-08-09 16:59:08 -04:00
|
|
|
def start(self):
|
2014-04-18 10:36:51 -04:00
|
|
|
global auditlog
|
|
|
|
global tracelog
|
|
|
|
tracelog = log.Logger('trace')
|
|
|
|
auditlog = log.Logger('audit')
|
2013-09-14 20:21:58 -04:00
|
|
|
self.server = eventlet.spawn(serve)
|
2013-08-09 16:59:08 -04:00
|
|
|
|
2014-04-18 15:52:29 -04:00
|
|
|
|
2013-10-09 16:17:37 -04:00
|
|
|
_cleaner = eventlet.spawn(_sessioncleaner)
|