Compare commits

...

22 Commits

Author SHA1 Message Date
d6d215d53d Improve boolean HTTP parameter handling 2013-07-15 14:38:28 -04:00
e02143ddb2 Remove duplicated test 2013-07-14 15:30:53 -04:00
e275384d03 Fix WSGI docs again 2013-07-11 16:36:32 -04:00
a6a67ec15c Update WSGI docs 2013-07-10 14:16:25 -04:00
fc43107307 Fill out test coverage 2013-07-09 19:06:26 -04:00
90633413bb Add nilmdb.utils.interval.human_string function 2013-07-09 19:01:53 -04:00
c7c3aff0fb Add nilmdb.utils.interval.optimize function 2013-07-09 17:50:21 -04:00
e2347c954e Split more CherrpyPy stuff into serverutil 2013-07-02 11:44:08 -04:00
222a5c6c53 Move server decorators and other utilities to a separate file
This will help with implementing nilmrun.
2013-07-02 11:32:19 -04:00
1ca2c143e5 Fix typo 2013-06-29 12:39:00 -04:00
b5df575c79 Fix tests 2013-05-09 22:27:10 -04:00
2768a5ad15 Show FQDN rather than hostname. 2013-05-09 13:33:05 -04:00
a105543c38 Show a more helpful message at the root nilmdb path 2013-05-09 13:30:10 -04:00
309f38d0ed Merge branch '32bit' 2013-05-08 17:20:31 -04:00
9a27b6ef6a Make rocket code suitable for 32-bit architectures 2013-05-08 16:35:32 -04:00
99532cf9e0 Fix coverage 2013-05-07 23:00:44 -04:00
dfdd0e5c74 Fix line parsing in http client 2013-05-07 22:56:00 -04:00
9a2699adfc Attempt at fixing up more Unicode issues with metadata. 2013-05-07 13:44:03 -04:00
9bbb95b18b Add unicode decode/encode helpers 2013-05-07 12:56:59 -04:00
6bbed322c5 Fix unicode in completion 2013-05-07 12:49:12 -04:00
2317894355 Tweak cache sizes to account for large numbers of decimated tables 2013-05-06 11:54:57 -04:00
539c92226c Add more disk space info 2013-05-06 11:36:28 -04:00
18 changed files with 457 additions and 257 deletions

View File

@@ -19,12 +19,12 @@ Then, set up Apache with a configuration like:
<VirtualHost> <VirtualHost>
WSGIScriptAlias /nilmdb /home/nilm/nilmdb.wsgi WSGIScriptAlias /nilmdb /home/nilm/nilmdb.wsgi
WSGIApplicationGroup nilmdb-appgroup
WSGIProcessGroup nilmdb-procgroup
WSGIDaemonProcess nilmdb-procgroup threads=32 user=nilm group=nilm WSGIDaemonProcess nilmdb-procgroup threads=32 user=nilm group=nilm
# Access control example:
<Location /nilmdb> <Location /nilmdb>
WSGIProcessGroup nilmdb-procgroup
WSGIApplicationGroup nilmdb-appgroup
# Access control example:
Order deny,allow Order deny,allow
Deny from all Deny from all
Allow from 1.2.3.4 Allow from 1.2.3.4

View File

@@ -123,14 +123,36 @@ class HTTPClient(object):
""" """
(response, isjson) = self._do_req(method, url, query, body, (response, isjson) = self._do_req(method, url, query, body,
stream = True, headers = headers) stream = True, headers = headers)
# Like the iter_lines function in Requests, but only splits on
# the specified line ending.
def lines(source, ending):
pending = None
for chunk in source:
if pending is not None:
chunk = pending + chunk
tmp = chunk.split(ending)
lines = tmp[:-1]
if chunk.endswith(ending):
pending = None
else:
pending = tmp[-1]
for line in lines:
yield line
if pending is not None: # pragma: no cover (missing newline)
yield pending
# Yield the chunks or lines as requested
if binary: if binary:
for chunk in response.iter_content(chunk_size = 65536): for chunk in response.iter_content(chunk_size = 65536):
yield chunk yield chunk
elif isjson: elif isjson:
for line in response.iter_lines(): for line in lines(response.iter_content(chunk_size = 1),
ending = '\r\n'):
yield json.loads(line) yield json.loads(line)
else: else:
for line in response.iter_lines(): for line in lines(response.iter_content(chunk_size = 65536),
ending = '\n'):
yield line yield line
def get_gen(self, url, params = None, binary = False): def get_gen(self, url, params = None, binary = False):

View File

@@ -72,10 +72,16 @@ class Complete(object): # pragma: no cover
path = parsed_args.path path = parsed_args.path
if not path: if not path:
return [] return []
return ( self.escape(k + '=' + v) results = []
for (k,v) in client.stream_get_metadata(path).iteritems() # prefix comes in as UTF-8, but results need to be Unicode,
if k.startswith(prefix) ) # weird. Still doesn't work in all cases, but that's bugs in
# argcomplete.
prefix = nilmdb.utils.unicode.decode(prefix)
for (k,v) in client.stream_get_metadata(path).iteritems():
kv = self.escape(k + '=' + v)
if kv.startswith(prefix):
results.append(kv)
return results
class Cmdline(object): class Cmdline(object):

View File

@@ -21,5 +21,8 @@ def cmd_info(self):
printf("Server URL: %s\n", self.client.geturl()) printf("Server URL: %s\n", self.client.geturl())
dbinfo = self.client.dbinfo() dbinfo = self.client.dbinfo()
printf("Server database path: %s\n", dbinfo["path"]) printf("Server database path: %s\n", dbinfo["path"])
printf("Server database size: %s\n", human_size(dbinfo["size"])) for (desc, field) in [("used by NilmDB", "size"),
printf("Server database free space: %s\n", human_size(dbinfo["free"])) ("used by other", "other"),
("reserved", "reserved"),
("free", "free")]:
printf("Server disk space %s: %s\n", desc, human_size(dbinfo[field]))

View File

@@ -41,10 +41,10 @@ def cmd_metadata(self):
if self.args.set is not None or self.args.update is not None: if self.args.set is not None or self.args.update is not None:
# Either set, or update # Either set, or update
if self.args.set is not None: if self.args.set is not None:
keyvals = self.args.set keyvals = map(nilmdb.utils.unicode.decode, self.args.set)
handler = self.client.stream_set_metadata handler = self.client.stream_set_metadata
else: else:
keyvals = self.args.update keyvals = map(nilmdb.utils.unicode.decode, self.args.update)
handler = self.client.stream_update_metadata handler = self.client.stream_update_metadata
# Extract key=value pairs # Extract key=value pairs
@@ -62,7 +62,9 @@ def cmd_metadata(self):
self.die("error setting/updating metadata: %s", str(e)) self.die("error setting/updating metadata: %s", str(e))
elif self.args.delete is not None: elif self.args.delete is not None:
# Delete (by setting values to empty strings) # Delete (by setting values to empty strings)
keys = self.args.delete or None keys = None
if self.args.delete:
keys = map(nilmdb.utils.unicode.decode, self.args.delete)
try: try:
data = self.client.stream_get_metadata(self.args.path, keys) data = self.client.stream_get_metadata(self.args.path, keys)
for key in data: for key in data:
@@ -72,7 +74,9 @@ def cmd_metadata(self):
self.die("error deleting metadata: %s", str(e)) self.die("error deleting metadata: %s", str(e))
else: else:
# Get (or unspecified) # Get (or unspecified)
keys = self.args.get or None keys = None
if self.args.get:
keys = map(nilmdb.utils.unicode.decode, self.args.get)
try: try:
data = self.client.stream_get_metadata(self.args.path, keys) data = self.client.stream_get_metadata(self.args.path, keys)
except nilmdb.client.ClientError as e: except nilmdb.client.ClientError as e:
@@ -81,4 +85,6 @@ def cmd_metadata(self):
# Print nonexistant keys as having empty value # Print nonexistant keys as having empty value
if value is None: if value is None:
value = "" value = ""
printf("%s=%s\n", key, value) printf("%s=%s\n",
nilmdb.utils.unicode.encode(key),
nilmdb.utils.unicode.encode(value))

View File

@@ -19,8 +19,8 @@ from . import rocket
# Up to 256 open file descriptors at any given time. # Up to 256 open file descriptors at any given time.
# These variables are global so they can be used in the decorator arguments. # These variables are global so they can be used in the decorator arguments.
table_cache_size = 16 table_cache_size = 32
fd_cache_size = 16 fd_cache_size = 8
@nilmdb.utils.must_close(wrap_verify = False) @nilmdb.utils.must_close(wrap_verify = False)
class BulkData(object): class BulkData(object):

View File

@@ -176,7 +176,7 @@ class NilmDB(object):
raise NilmDBError("start must precede end") raise NilmDBError("start must precede end")
return (start, end) return (start, end)
@nilmdb.utils.lru_cache(size = 16) @nilmdb.utils.lru_cache(size = 64)
def _get_intervals(self, stream_id): def _get_intervals(self, stream_id):
""" """
Return a mutable IntervalSet corresponding to the given stream ID. Return a mutable IntervalSet corresponding to the given stream ID.

View File

@@ -5,6 +5,9 @@
#include <ctype.h> #include <ctype.h>
#include <stdint.h> #include <stdint.h>
#define __STDC_FORMAT_MACROS
#include <inttypes.h>
/* Values missing from stdint.h */ /* Values missing from stdint.h */
#define UINT8_MIN 0 #define UINT8_MIN 0
#define UINT16_MIN 0 #define UINT16_MIN 0
@@ -19,13 +22,6 @@
typedef int64_t timestamp_t; typedef int64_t timestamp_t;
/* This code probably needs to be double-checked for the case where
sizeof(long) != 8, so enforce that here with something that will
fail at build time. We assume that the python integer type can
hold an int64_t. */
const static char __long_ok[1 - 2*!(sizeof(int64_t) ==
sizeof(long int))] = { 0 };
/* Somewhat arbitrary, just so we can use fixed sizes for strings /* Somewhat arbitrary, just so we can use fixed sizes for strings
etc. */ etc. */
static const int MAX_LAYOUT_COUNT = 1024; static const int MAX_LAYOUT_COUNT = 1024;
@@ -58,7 +54,7 @@ static PyObject *raise_str(int line, int col, int code, const char *string)
static PyObject *raise_int(int line, int col, int code, int64_t num) static PyObject *raise_int(int line, int col, int code, int64_t num)
{ {
PyObject *o; PyObject *o;
o = Py_BuildValue("(iiil)", line, col, code, num); o = Py_BuildValue("(iiiL)", line, col, code, (long long)num);
if (o != NULL) { if (o != NULL) {
PyErr_SetObject(ParseError, o); PyErr_SetObject(ParseError, o);
Py_DECREF(o); Py_DECREF(o);
@@ -249,11 +245,11 @@ static PyObject *Rocket_get_file_size(Rocket *self)
/**** /****
* Append from string * Append from string
*/ */
static inline long int strtol10(const char *nptr, char **endptr) { static inline long int strtoll10(const char *nptr, char **endptr) {
return strtol(nptr, endptr, 10); return strtoll(nptr, endptr, 10);
} }
static inline long int strtoul10(const char *nptr, char **endptr) { static inline long int strtoull10(const char *nptr, char **endptr) {
return strtoul(nptr, endptr, 10); return strtoull(nptr, endptr, 10);
} }
/* .append_string(count, data, offset, linenum, start, end, last_timestamp) */ /* .append_string(count, data, offset, linenum, start, end, last_timestamp) */
@@ -264,6 +260,7 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
int offset; int offset;
const char *linestart; const char *linestart;
int linenum; int linenum;
long long ll1, ll2, ll3;
timestamp_t start; timestamp_t start;
timestamp_t end; timestamp_t end;
timestamp_t last_timestamp; timestamp_t last_timestamp;
@@ -280,10 +277,13 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
but we need the null termination for strto*. If we had but we need the null termination for strto*. If we had
strnto* that took a length, we could use t# and not require strnto* that took a length, we could use t# and not require
a copy. */ a copy. */
if (!PyArg_ParseTuple(args, "isiilll:append_string", &count, if (!PyArg_ParseTuple(args, "isiiLLL:append_string", &count,
&data, &offset, &linenum, &data, &offset, &linenum,
&start, &end, &last_timestamp)) &ll1, &ll2, &ll3))
return NULL; return NULL;
start = ll1;
end = ll2;
last_timestamp = ll3;
/* Skip spaces, but don't skip over a newline. */ /* Skip spaces, but don't skip over a newline. */
#define SKIP_BLANK(buf) do { \ #define SKIP_BLANK(buf) do { \
@@ -372,14 +372,14 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
goto extra_data_on_line; \ goto extra_data_on_line; \
break break
CS(INT8, strtol10, t64.i, t8.i, t8.u, , 1); CS(INT8, strtoll10, t64.i, t8.i, t8.u, , 1);
CS(UINT8, strtoul10, t64.u, t8.u, t8.u, , 1); CS(UINT8, strtoull10, t64.u, t8.u, t8.u, , 1);
CS(INT16, strtol10, t64.i, t16.i, t16.u, le16toh, 2); CS(INT16, strtoll10, t64.i, t16.i, t16.u, le16toh, 2);
CS(UINT16, strtoul10, t64.u, t16.u, t16.u, le16toh, 2); CS(UINT16, strtoull10, t64.u, t16.u, t16.u, le16toh, 2);
CS(INT32, strtol10, t64.i, t32.i, t32.u, le32toh, 4); CS(INT32, strtoll10, t64.i, t32.i, t32.u, le32toh, 4);
CS(UINT32, strtoul10, t64.u, t32.u, t32.u, le32toh, 4); CS(UINT32, strtoull10, t64.u, t32.u, t32.u, le32toh, 4);
CS(INT64, strtol10, t64.i, t64.i, t64.u, le64toh, 8); CS(INT64, strtoll10, t64.i, t64.i, t64.u, le64toh, 8);
CS(UINT64, strtoul10, t64.u, t64.u, t64.u, le64toh, 8); CS(UINT64, strtoull10, t64.u, t64.u, t64.u, le64toh, 8);
CS(FLOAT32, strtod, t64.d, t32.f, t32.u, le32toh, 4); CS(FLOAT32, strtod, t64.d, t32.f, t32.u, le32toh, 4);
CS(FLOAT64, strtod, t64.d, t64.d, t64.u, le64toh, 8); CS(FLOAT64, strtod, t64.d, t64.d, t64.u, le64toh, 8);
#undef CS #undef CS
@@ -397,7 +397,8 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
/* Build return value and return */ /* Build return value and return */
offset = buf - data; offset = buf - data;
PyObject *o; PyObject *o;
o = Py_BuildValue("(iili)", written, offset, last_timestamp, linenum); o = Py_BuildValue("(iiLi)", written, offset,
(long long)last_timestamp, linenum);
return o; return o;
err: err:
PyErr_SetFromErrno(PyExc_OSError); PyErr_SetFromErrno(PyExc_OSError);
@@ -431,14 +432,18 @@ static PyObject *Rocket_append_binary(Rocket *self, PyObject *args)
int data_len; int data_len;
int linenum; int linenum;
int offset; int offset;
long long ll1, ll2, ll3;
timestamp_t start; timestamp_t start;
timestamp_t end; timestamp_t end;
timestamp_t last_timestamp; timestamp_t last_timestamp;
if (!PyArg_ParseTuple(args, "it#iilll:append_binary", if (!PyArg_ParseTuple(args, "it#iiLLL:append_binary",
&count, &data, &data_len, &offset, &count, &data, &data_len, &offset,
&linenum, &start, &end, &last_timestamp)) &linenum, &ll1, &ll2, &ll3))
return NULL; return NULL;
start = ll1;
end = ll2;
last_timestamp = ll3;
/* Advance to offset */ /* Advance to offset */
if (offset > data_len) if (offset > data_len)
@@ -476,8 +481,8 @@ static PyObject *Rocket_append_binary(Rocket *self, PyObject *args)
/* Build return value and return */ /* Build return value and return */
PyObject *o; PyObject *o;
o = Py_BuildValue("(iili)", rows, offset + rows * self->binary_size, o = Py_BuildValue("(iiLi)", rows, offset + rows * self->binary_size,
last_timestamp, linenum); (long long)last_timestamp, linenum);
return o; return o;
} }
@@ -534,7 +539,7 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
if (fread(&t64.u, 8, 1, self->file) != 1) if (fread(&t64.u, 8, 1, self->file) != 1)
goto err; goto err;
t64.u = le64toh(t64.u); t64.u = le64toh(t64.u);
ret = sprintf(&str[len], "%ld", t64.i); ret = sprintf(&str[len], "%" PRId64, t64.i);
if (ret <= 0) if (ret <= 0)
goto err; goto err;
len += ret; len += ret;
@@ -556,14 +561,14 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
len += ret; \ len += ret; \
} \ } \
break break
CASE(INT8, "%hhd", t8.i, t8.u, , 1); CASE(INT8, "%" PRId8, t8.i, t8.u, , 1);
CASE(UINT8, "%hhu", t8.u, t8.u, , 1); CASE(UINT8, "%" PRIu8, t8.u, t8.u, , 1);
CASE(INT16, "%hd", t16.i, t16.u, le16toh, 2); CASE(INT16, "%" PRId16, t16.i, t16.u, le16toh, 2);
CASE(UINT16, "%hu", t16.u, t16.u, le16toh, 2); CASE(UINT16, "%" PRIu16, t16.u, t16.u, le16toh, 2);
CASE(INT32, "%d", t32.i, t32.u, le32toh, 4); CASE(INT32, "%" PRId32, t32.i, t32.u, le32toh, 4);
CASE(UINT32, "%u", t32.u, t32.u, le32toh, 4); CASE(UINT32, "%" PRIu32, t32.u, t32.u, le32toh, 4);
CASE(INT64, "%ld", t64.i, t64.u, le64toh, 8); CASE(INT64, "%" PRId64, t64.i, t64.u, le64toh, 8);
CASE(UINT64, "%lu", t64.u, t64.u, le64toh, 8); CASE(UINT64, "%" PRIu64, t64.u, t64.u, le64toh, 8);
/* These next two are a bit debatable. floats /* These next two are a bit debatable. floats
are 6-9 significant figures, so we print 7. are 6-9 significant figures, so we print 7.
Doubles are 15-19, so we print 17. This is Doubles are 15-19, so we print 17. This is
@@ -653,7 +658,7 @@ static PyObject *Rocket_extract_timestamp(Rocket *self, PyObject *args)
/* Convert and return */ /* Convert and return */
t64.u = le64toh(t64.u); t64.u = le64toh(t64.u);
return Py_BuildValue("l", t64.i); return Py_BuildValue("L", (long long)t64.i);
} }
/**** /****

View File

@@ -17,126 +17,26 @@ import decorator
import psutil import psutil
import traceback import traceback
from nilmdb.server.serverutil import (
chunked_response,
response_type,
workaround_cp_bug_1200,
exception_to_httperror,
CORS_allow,
json_to_request_params,
json_error_page,
cherrypy_start,
cherrypy_stop,
bool_param,
)
# Add CORS_allow tool
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
class NilmApp(object): class NilmApp(object):
def __init__(self, db): def __init__(self, db):
self.db = db self.db = db
# Decorators
def chunked_response(func):
"""Decorator to enable chunked responses."""
# Set this to False to get better tracebacks from some requests
# (/stream/extract, /stream/intervals).
func._cp_config = { 'response.stream': True }
return func
def response_type(content_type):
"""Return a decorator-generating function that sets the
response type to the specified string."""
def wrapper(func, *args, **kwargs):
cherrypy.response.headers['Content-Type'] = content_type
return func(*args, **kwargs)
return decorator.decorator(wrapper)
@decorator.decorator
def workaround_cp_bug_1200(func, *args, **kwargs): # pragma: no cover
"""Decorator to work around CherryPy bug #1200 in a response
generator.
Even if chunked responses are disabled, LookupError or
UnicodeError exceptions may still be swallowed by CherryPy due to
bug #1200. This throws them as generic Exceptions instead so that
they make it through.
"""
exc_info = None
try:
for val in func(*args, **kwargs):
yield val
except (LookupError, UnicodeError):
# Re-raise it, but maintain the original traceback
exc_info = sys.exc_info()
new_exc = Exception(exc_info[0].__name__ + ": " + str(exc_info[1]))
raise new_exc, None, exc_info[2]
finally:
del exc_info
def exception_to_httperror(*expected):
"""Return a decorator-generating function that catches expected
errors and throws a HTTPError describing it instead.
@exception_to_httperror(NilmDBError, ValueError)
def foo():
pass
"""
def wrapper(func, *args, **kwargs):
exc_info = None
try:
return func(*args, **kwargs)
except expected:
# Re-raise it, but maintain the original traceback
exc_info = sys.exc_info()
new_exc = cherrypy.HTTPError("400 Bad Request", str(exc_info[1]))
raise new_exc, None, exc_info[2]
finally:
del exc_info
# We need to preserve the function's argspecs for CherryPy to
# handle argument errors correctly. Decorator.decorator takes
# care of that.
return decorator.decorator(wrapper)
# Custom CherryPy tools
def CORS_allow(methods):
"""This does several things:
Handles CORS preflight requests.
Adds Allow: header to all requests.
Raise 405 if request.method not in method.
It is similar to cherrypy.tools.allow, with the CORS stuff added.
"""
request = cherrypy.request.headers
response = cherrypy.response.headers
if not isinstance(methods, (tuple, list)): # pragma: no cover
methods = [ methods ]
methods = [ m.upper() for m in methods if m ]
if not methods: # pragma: no cover
methods = [ 'GET', 'HEAD' ]
elif 'GET' in methods and 'HEAD' not in methods: # pragma: no cover
methods.append('HEAD')
response['Allow'] = ', '.join(methods)
# Allow all origins
if 'Origin' in request:
response['Access-Control-Allow-Origin'] = request['Origin']
# If it's a CORS request, send response.
request_method = request.get("Access-Control-Request-Method", None)
request_headers = request.get("Access-Control-Request-Headers", None)
if (cherrypy.request.method == "OPTIONS" and
request_method and request_headers):
response['Access-Control-Allow-Headers'] = request_headers
response['Access-Control-Allow-Methods'] = ', '.join(methods)
# Try to stop further processing and return a 200 OK
cherrypy.response.status = "200 OK"
cherrypy.response.body = ""
cherrypy.request.handler = lambda: ""
return
# Reject methods that were not explicitly allowed
if cherrypy.request.method not in methods:
raise cherrypy.HTTPError(405)
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
# Helper for json_in tool to process JSON data into normal request
# parameters.
def json_to_request_params(body):
cherrypy.lib.jsontools.json_processor(body)
if not isinstance(cherrypy.request.json, dict):
raise cherrypy.HTTPError(415)
cherrypy.request.params.update(cherrypy.request.json)
# CherryPy apps # CherryPy apps
class Root(NilmApp): class Root(NilmApp):
"""Root application for NILM database""" """Root application for NILM database"""
@@ -147,7 +47,10 @@ class Root(NilmApp):
# / # /
@cherrypy.expose @cherrypy.expose
def index(self): def index(self):
raise cherrypy.NotFound() cherrypy.response.headers['Content-Type'] = 'text/plain'
msg = sprintf("This is NilmDB version %s, running on host %s.\n",
nilmdb.__version__, socket.getfqdn())
return msg
# /favicon.ico # /favicon.ico
@cherrypy.expose @cherrypy.expose
@@ -167,9 +70,13 @@ class Root(NilmApp):
"""Return a dictionary with the database path, """Return a dictionary with the database path,
size of the database in bytes, and free disk space in bytes""" size of the database in bytes, and free disk space in bytes"""
path = self.db.get_basepath() path = self.db.get_basepath()
usage = psutil.disk_usage(path)
dbsize = nilmdb.utils.du(path)
return { "path": path, return { "path": path,
"size": nilmdb.utils.du(path), "size": dbsize,
"free": psutil.disk_usage(path).free } "other": usage.used - dbsize,
"reserved": usage.total - usage.used - usage.free,
"free": usage.free }
class Stream(NilmApp): class Stream(NilmApp):
"""Stream-specific operations""" """Stream-specific operations"""
@@ -199,10 +106,10 @@ class Stream(NilmApp):
layout parameter, just list streams that match the given path layout parameter, just list streams that match the given path
or layout. or layout.
If extent is not given, returns a list of lists containing If extended is missing or zero, returns a list of lists
the path and layout: [ path, layout ] containing the path and layout: [ path, layout ]
If extended is provided, returns a list of lists containing If extended is true, returns a list of lists containing
extended info: [ path, layout, extent_min, extent_max, extended info: [ path, layout, extent_min, extent_max,
total_rows, total_seconds ]. More data may be added. total_rows, total_seconds ]. More data may be added.
""" """
@@ -315,6 +222,8 @@ class Stream(NilmApp):
little-endian and matches the database types (including an little-endian and matches the database types (including an
int64 timestamp). int64 timestamp).
""" """
binary = bool_param(binary)
# Important that we always read the input before throwing any # Important that we always read the input before throwing any
# errors, to keep lengths happy for persistent connections. # errors, to keep lengths happy for persistent connections.
# Note that CherryPy 3.2.2 has a bug where this fails for GET # Note that CherryPy 3.2.2 has a bug where this fails for GET
@@ -439,6 +348,10 @@ class Stream(NilmApp):
little-endian and matches the database types (including an little-endian and matches the database types (including an
int64 timestamp). int64 timestamp).
""" """
binary = bool_param(binary)
markup = bool_param(markup)
count = bool_param(count)
(start, end) = self._get_times(start, end) (start, end) = self._get_times(start, end)
# Check path and get layout # Check path and get layout
@@ -566,70 +479,14 @@ class Server(object):
def json_error_page(self, status, message, traceback, version): def json_error_page(self, status, message, traceback, version):
"""Return a custom error page in JSON so the client can parse it""" """Return a custom error page in JSON so the client can parse it"""
errordata = { "status" : status, return json_error_page(status, message, traceback, version,
"message" : message, self.force_traceback)
"traceback" : traceback }
# Don't send a traceback if the error was 400-499 (client's fault)
try:
code = int(status.split()[0])
if not self.force_traceback:
if code >= 400 and code <= 499:
errordata["traceback"] = ""
except Exception: # pragma: no cover
pass
# Override the response type, which was previously set to text/html
cherrypy.serving.response.headers['Content-Type'] = (
"application/json;charset=utf-8" )
# Undo the HTML escaping that cherrypy's get_error_page function applies
# (cherrypy issue 1135)
for k, v in errordata.iteritems():
v = v.replace("&lt;","<")
v = v.replace("&gt;",">")
v = v.replace("&amp;","&")
errordata[k] = v
return json.dumps(errordata, separators=(',',':'))
def start(self, blocking = False, event = None): def start(self, blocking = False, event = None):
cherrypy_start(blocking, event, self.embedded)
if not self.embedded: # pragma: no cover
# Handle signals nicely
if hasattr(cherrypy.engine, "signal_handler"):
cherrypy.engine.signal_handler.subscribe()
if hasattr(cherrypy.engine, "console_control_handler"):
cherrypy.engine.console_control_handler.subscribe()
# Cherrypy stupidly calls os._exit(70) when it can't bind the
# port. At least try to print a reasonable error and continue
# in this case, rather than just dying silently (as we would
# otherwise do in embedded mode)
real_exit = os._exit
def fake_exit(code): # pragma: no cover
if code == os.EX_SOFTWARE:
fprintf(sys.stderr, "error: CherryPy called os._exit!\n")
else:
real_exit(code)
os._exit = fake_exit
cherrypy.engine.start()
os._exit = real_exit
# Signal that the engine has started successfully
if event is not None:
event.set()
if blocking:
try:
cherrypy.engine.wait(cherrypy.engine.states.EXITING,
interval = 0.1, channel = 'main')
except (KeyboardInterrupt, IOError): # pragma: no cover
cherrypy.engine.log('Keyboard Interrupt: shutting down bus')
cherrypy.engine.exit()
except SystemExit: # pragma: no cover
cherrypy.engine.log('SystemExit raised: shutting down bus')
cherrypy.engine.exit()
raise
def stop(self): def stop(self):
cherrypy.engine.exit() cherrypy_stop()
# Use a single global nilmdb.server.NilmDB and nilmdb.server.Server # Use a single global nilmdb.server.NilmDB and nilmdb.server.Server
# instance since the database can only be opened once. For this to # instance since the database can only be opened once. For this to

214
nilmdb/server/serverutil.py Normal file
View File

@@ -0,0 +1,214 @@
"""Miscellaneous decorators and other helpers for running a CherryPy
server"""
import cherrypy
import sys
import os
import decorator
import simplejson as json
# Helper to parse parameters into booleans
def bool_param(s):
"""Return a bool indicating whether parameter 's' was True or False,
supporting a few different types for 's'."""
try:
ss = s.lower()
if ss in [ "0", "false", "f", "no", "n" ]:
return False
if ss in [ "1", "true", "t", "yes", "y" ]:
return True
except Exception:
return bool(s)
raise cherrypy.HTTPError("400 Bad Request",
"can't parse parameter: " + ss)
# Decorators
def chunked_response(func):
"""Decorator to enable chunked responses."""
# Set this to False to get better tracebacks from some requests
# (/stream/extract, /stream/intervals).
func._cp_config = { 'response.stream': True }
return func
def response_type(content_type):
"""Return a decorator-generating function that sets the
response type to the specified string."""
def wrapper(func, *args, **kwargs):
cherrypy.response.headers['Content-Type'] = content_type
return func(*args, **kwargs)
return decorator.decorator(wrapper)
@decorator.decorator
def workaround_cp_bug_1200(func, *args, **kwargs): # pragma: no cover
"""Decorator to work around CherryPy bug #1200 in a response
generator.
Even if chunked responses are disabled, LookupError or
UnicodeError exceptions may still be swallowed by CherryPy due to
bug #1200. This throws them as generic Exceptions instead so that
they make it through.
"""
exc_info = None
try:
for val in func(*args, **kwargs):
yield val
except (LookupError, UnicodeError):
# Re-raise it, but maintain the original traceback
exc_info = sys.exc_info()
new_exc = Exception(exc_info[0].__name__ + ": " + str(exc_info[1]))
raise new_exc, None, exc_info[2]
finally:
del exc_info
def exception_to_httperror(*expected):
"""Return a decorator-generating function that catches expected
errors and throws a HTTPError describing it instead.
@exception_to_httperror(NilmDBError, ValueError)
def foo():
pass
"""
def wrapper(func, *args, **kwargs):
exc_info = None
try:
return func(*args, **kwargs)
except expected:
# Re-raise it, but maintain the original traceback
exc_info = sys.exc_info()
new_exc = cherrypy.HTTPError("400 Bad Request", str(exc_info[1]))
raise new_exc, None, exc_info[2]
finally:
del exc_info
# We need to preserve the function's argspecs for CherryPy to
# handle argument errors correctly. Decorator.decorator takes
# care of that.
return decorator.decorator(wrapper)
# Custom CherryPy tools
def CORS_allow(methods):
"""This does several things:
Handles CORS preflight requests.
Adds Allow: header to all requests.
Raise 405 if request.method not in method.
It is similar to cherrypy.tools.allow, with the CORS stuff added.
Add this to CherryPy with:
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
"""
request = cherrypy.request.headers
response = cherrypy.response.headers
if not isinstance(methods, (tuple, list)): # pragma: no cover
methods = [ methods ]
methods = [ m.upper() for m in methods if m ]
if not methods: # pragma: no cover
methods = [ 'GET', 'HEAD' ]
elif 'GET' in methods and 'HEAD' not in methods: # pragma: no cover
methods.append('HEAD')
response['Allow'] = ', '.join(methods)
# Allow all origins
if 'Origin' in request:
response['Access-Control-Allow-Origin'] = request['Origin']
# If it's a CORS request, send response.
request_method = request.get("Access-Control-Request-Method", None)
request_headers = request.get("Access-Control-Request-Headers", None)
if (cherrypy.request.method == "OPTIONS" and
request_method and request_headers):
response['Access-Control-Allow-Headers'] = request_headers
response['Access-Control-Allow-Methods'] = ', '.join(methods)
# Try to stop further processing and return a 200 OK
cherrypy.response.status = "200 OK"
cherrypy.response.body = ""
cherrypy.request.handler = lambda: ""
return
# Reject methods that were not explicitly allowed
if cherrypy.request.method not in methods:
raise cherrypy.HTTPError(405)
# Helper for json_in tool to process JSON data into normal request
# parameters.
def json_to_request_params(body):
cherrypy.lib.jsontools.json_processor(body)
if not isinstance(cherrypy.request.json, dict):
raise cherrypy.HTTPError(415)
cherrypy.request.params.update(cherrypy.request.json)
# Used as an "error_page.default" handler
def json_error_page(status, message, traceback, version,
force_traceback = False):
"""Return a custom error page in JSON so the client can parse it"""
errordata = { "status" : status,
"message" : message,
"traceback" : traceback }
# Don't send a traceback if the error was 400-499 (client's fault)
try:
code = int(status.split()[0])
if not force_traceback:
if code >= 400 and code <= 499:
errordata["traceback"] = ""
except Exception: # pragma: no cover
pass
# Override the response type, which was previously set to text/html
cherrypy.serving.response.headers['Content-Type'] = (
"application/json;charset=utf-8" )
# Undo the HTML escaping that cherrypy's get_error_page function applies
# (cherrypy issue 1135)
for k, v in errordata.iteritems():
v = v.replace("&lt;","<")
v = v.replace("&gt;",">")
v = v.replace("&amp;","&")
errordata[k] = v
return json.dumps(errordata, separators=(',',':'))
# Start/stop CherryPy standalone server
def cherrypy_start(blocking = False, event = False, embedded = False):
"""Start the CherryPy server, handling errors and signals
somewhat gracefully."""
if not embedded: # pragma: no cover
# Handle signals nicely
if hasattr(cherrypy.engine, "signal_handler"):
cherrypy.engine.signal_handler.subscribe()
if hasattr(cherrypy.engine, "console_control_handler"):
cherrypy.engine.console_control_handler.subscribe()
# Cherrypy stupidly calls os._exit(70) when it can't bind the
# port. At least try to print a reasonable error and continue
# in this case, rather than just dying silently (as we would
# otherwise do in embedded mode)
real_exit = os._exit
def fake_exit(code): # pragma: no cover
if code == os.EX_SOFTWARE:
fprintf(sys.stderr, "error: CherryPy called os._exit!\n")
else:
real_exit(code)
os._exit = fake_exit
cherrypy.engine.start()
os._exit = real_exit
# Signal that the engine has started successfully
if event is not None:
event.set()
if blocking:
try:
cherrypy.engine.wait(cherrypy.engine.states.EXITING,
interval = 0.1, channel = 'main')
except (KeyboardInterrupt, IOError): # pragma: no cover
cherrypy.engine.log('Keyboard Interrupt: shutting down bus')
cherrypy.engine.exit()
except SystemExit: # pragma: no cover
cherrypy.engine.log('SystemExit raised: shutting down bus')
cherrypy.engine.exit()
raise
# Stop CherryPy server
def cherrypy_stop():
cherrypy.engine.exit()

View File

@@ -14,3 +14,4 @@ import nilmdb.utils.iterator
import nilmdb.utils.interval import nilmdb.utils.interval
import nilmdb.utils.lock import nilmdb.utils.lock
import nilmdb.utils.sort import nilmdb.utils.sort
import nilmdb.utils.unicode

View File

@@ -1,5 +1,6 @@
"""Interval. Like nilmdb.server.interval, but re-implemented here """Interval. Like nilmdb.server.interval, but re-implemented here
in plain Python so clients have easier access to it. in plain Python so clients have easier access to it, and with a few
helper functions.
Intervals are half-open, ie. they include data points with timestamps Intervals are half-open, ie. they include data points with timestamps
[start, end) [start, end)
@@ -34,6 +35,10 @@ class Interval:
return ("[" + nilmdb.utils.time.timestamp_to_string(self.start) + return ("[" + nilmdb.utils.time.timestamp_to_string(self.start) +
" -> " + nilmdb.utils.time.timestamp_to_string(self.end) + ")") " -> " + nilmdb.utils.time.timestamp_to_string(self.end) + ")")
def human_string(self):
return ("[ " + nilmdb.utils.time.timestamp_to_human(self.start) +
" -> " + nilmdb.utils.time.timestamp_to_human(self.end) + " ]")
def __cmp__(self, other): def __cmp__(self, other):
"""Compare two intervals. If non-equal, order by start then end""" """Compare two intervals. If non-equal, order by start then end"""
return cmp(self.start, other.start) or cmp(self.end, other.end) return cmp(self.start, other.start) or cmp(self.end, other.end)
@@ -104,3 +109,20 @@ def set_difference(a, b):
b_interval = None b_interval = None
if a_interval: if a_interval:
out_start = ts out_start = ts
def optimize(it):
"""
Given an iterable 'it' with intervals, optimize them by joining
together intervals that are adjacent in time, and return a generator
that yields the new intervals.
"""
saved_int = None
for interval in it:
if saved_int is not None:
if saved_int.end == interval.start:
interval.start = saved_int.start
else:
yield saved_int
saved_int = interval
if saved_int is not None:
yield saved_int

29
nilmdb/utils/unicode.py Normal file
View File

@@ -0,0 +1,29 @@
import sys
if sys.version_info[0] >= 3: # pragma: no cover (future Python3 compat)
text_type = str
else:
text_type = unicode
def encode(u):
"""Try to encode something from Unicode to a string using the
default encoding. If it fails, try encoding as UTF-8."""
if not isinstance(u, text_type):
return u
try:
return u.encode()
except UnicodeEncodeError:
return u.encode("utf-8")
def decode(s):
"""Try to decode someting from string to Unicode using the
default encoding. If it fails, try decoding as UTF-8."""
if isinstance(s, text_type):
return s
try:
return s.decode()
except UnicodeDecodeError:
try:
return s.decode("utf-8")
except UnicodeDecodeError:
return s # best we can do

8
tests/data/timestamped Normal file
View File

@@ -0,0 +1,8 @@
-10000000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
-100000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
-100000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
-1000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
1 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
1000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
1000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
1000000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03

View File

@@ -354,10 +354,6 @@ class TestClient(object):
with assert_raises(ServerError) as e: with assert_raises(ServerError) as e:
client.http.get_gen("http://nosuchurl.example.com./").next() client.http.get_gen("http://nosuchurl.example.com./").next()
# Trigger a curl error in generator
with assert_raises(ServerError) as e:
client.http.get_gen("http://nosuchurl.example.com./").next()
# Check 404 for missing streams # Check 404 for missing streams
for function in [ client.stream_intervals, client.stream_extract ]: for function in [ client.stream_intervals, client.stream_extract ]:
with assert_raises(ClientError) as e: with assert_raises(ClientError) as e:
@@ -396,27 +392,38 @@ class TestClient(object):
headers()) headers())
# Extract # Extract
x = http.get("stream/extract", x = http.get("stream/extract", { "path": "/newton/prep",
{ "path": "/newton/prep", "start": "123", "end": "124" })
"start": "123",
"end": "124" })
if "transfer-encoding: chunked" not in headers(): if "transfer-encoding: chunked" not in headers():
warnings.warn("Non-chunked HTTP response for /stream/extract") warnings.warn("Non-chunked HTTP response for /stream/extract")
if "content-type: text/plain;charset=utf-8" not in headers(): if "content-type: text/plain;charset=utf-8" not in headers():
raise AssertionError("/stream/extract is not text/plain:\n" + raise AssertionError("/stream/extract is not text/plain:\n" +
headers()) headers())
x = http.get("stream/extract", x = http.get("stream/extract", { "path": "/newton/prep",
{ "path": "/newton/prep", "start": "123", "end": "124",
"start": "123", "binary": "1" })
"end": "124",
"binary": "1" })
if "transfer-encoding: chunked" not in headers(): if "transfer-encoding: chunked" not in headers():
warnings.warn("Non-chunked HTTP response for /stream/extract") warnings.warn("Non-chunked HTTP response for /stream/extract")
if "content-type: application/octet-stream" not in headers(): if "content-type: application/octet-stream" not in headers():
raise AssertionError("/stream/extract is not binary:\n" + raise AssertionError("/stream/extract is not binary:\n" +
headers()) headers())
# Make sure a binary of "0" is really off
x = http.get("stream/extract", { "path": "/newton/prep",
"start": "123", "end": "124",
"binary": "0" })
if "content-type: application/octet-stream" in headers():
raise AssertionError("/stream/extract is not text:\n" +
headers())
# Invalid parameters
with assert_raises(ClientError) as e:
x = http.get("stream/extract", { "path": "/newton/prep",
"start": "123", "end": "124",
"binary": "asdfasfd" })
in_("can't parse parameter", str(e.exception))
client.close() client.close()
def test_client_08_unicode(self): def test_client_08_unicode(self):

View File

@@ -88,7 +88,7 @@ class TestCmdline(object):
sys.exit(0) sys.exit(0)
except SystemExit as e: except SystemExit as e:
exitcode = e.code exitcode = e.code
captured = outfile.getvalue() captured = nilmdb.utils.unicode.decode(outfile.getvalue())
self.captured = captured self.captured = captured
self.exitcode = exitcode self.exitcode = exitcode
@@ -245,8 +245,10 @@ class TestCmdline(object):
self.contain("Client version: " + nilmdb.__version__) self.contain("Client version: " + nilmdb.__version__)
self.contain("Server version: " + test_server.version) self.contain("Server version: " + test_server.version)
self.contain("Server database path") self.contain("Server database path")
self.contain("Server database size") self.contain("Server disk space used by NilmDB")
self.contain("Server database free space") self.contain("Server disk space used by other")
self.contain("Server disk space reserved")
self.contain("Server disk space free")
def test_04_createlist(self): def test_04_createlist(self):
# Basic stream tests, like those in test_client. # Basic stream tests, like those in test_client.
@@ -473,6 +475,13 @@ class TestCmdline(object):
# bad start time # bad start time
self.fail("insert -t -r 120 --start 'whatever' /newton/prep /dev/null") self.fail("insert -t -r 120 --start 'whatever' /newton/prep /dev/null")
# Test negative times
self.ok("insert --start @-10000000000 --end @1000000001 /newton/prep"
" tests/data/timestamped")
self.ok("extract -c /newton/prep --start min --end @1000000001")
self.match("8\n")
self.ok("remove /newton/prep --start min --end @1000000001")
def test_07_detail_extended(self): def test_07_detail_extended(self):
# Just count the number of lines, it's probably fine # Just count the number of lines, it's probably fine
self.ok("list --detail") self.ok("list --detail")

View File

@@ -59,6 +59,14 @@ class TestInterval:
self.test_interval_intersect() self.test_interval_intersect()
Interval = NilmdbInterval Interval = NilmdbInterval
# Other helpers in nilmdb.utils.interval
i = [ UtilsInterval(1,2), UtilsInterval(2,3), UtilsInterval(4,5) ]
eq_(list(nilmdb.utils.interval.optimize(i)),
[ UtilsInterval(1,3), UtilsInterval(4,5) ])
eq_(UtilsInterval(1234567890123456, 1234567890654321).human_string(),
"[ Fri, 13 Feb 2009 18:31:30.123456 -0500 -> " +
"Fri, 13 Feb 2009 18:31:30.654321 -0500 ]")
def test_interval(self): def test_interval(self):
# Test Interval class # Test Interval class
os.environ['TZ'] = "America/New_York" os.environ['TZ'] = "America/New_York"

View File

@@ -157,11 +157,14 @@ class TestServer(object):
def test_server(self): def test_server(self):
# Make sure we can't force an exit, and test other 404 errors # Make sure we can't force an exit, and test other 404 errors
for url in [ "/exit", "/", "/favicon.ico" ]: for url in [ "/exit", "/favicon.ico" ]:
with assert_raises(HTTPError) as e: with assert_raises(HTTPError) as e:
geturl(url) geturl(url)
eq_(e.exception.code, 404) eq_(e.exception.code, 404)
# Root page
in_("This is NilmDB", geturl("/"))
# Check version # Check version
eq_(distutils.version.LooseVersion(getjson("/version")), eq_(distutils.version.LooseVersion(getjson("/version")),
distutils.version.LooseVersion(nilmdb.__version__)) distutils.version.LooseVersion(nilmdb.__version__))