|
|
@@ -195,6 +195,8 @@ class Stream(NilmApp): |
|
|
|
raise cherrypy.HTTPError("400 Bad Request", |
|
|
|
"end before start") |
|
|
|
def content(start, end): |
|
|
|
# Note: disable response.stream below to get better debug info |
|
|
|
# from tracebacks in this subfunction. |
|
|
|
while True: |
|
|
|
(intervals, restart) = self.db.stream_intervals(path,start,end) |
|
|
|
response = ''.join([ json.dumps(i) + "\n" for i in intervals ]) |
|
|
@@ -207,16 +209,37 @@ class Stream(NilmApp): |
|
|
|
|
|
|
|
# /stream/extract?path=/newton/prep&start=1234567890.0&end=1234567899.0 |
|
|
|
@cherrypy.expose |
|
|
|
@cherrypy.tools.json_out() |
|
|
|
def extract(self, path, start, end): |
|
|
|
# If truncated is true, there is more data after the last row |
|
|
|
# row given. |
|
|
|
""" |
|
|
|
Extract data from backend database. Streams the resulting |
|
|
|
entries as ASCII text lines separated by newlines. This may |
|
|
|
make multiple requests to the nilmdb backend to avoid causing |
|
|
|
it to block for too long. |
|
|
|
""" |
|
|
|
if start is not None: |
|
|
|
start = float(start) |
|
|
|
if end is not None: |
|
|
|
end = float(end) |
|
|
|
(intervals, truncated) = self.db.stream_intervals(path, start, end) |
|
|
|
return (intervals, truncated) |
|
|
|
|
|
|
|
if start is not None and end is not None: |
|
|
|
if end < start: |
|
|
|
raise cherrypy.HTTPError("400 Bad Request", |
|
|
|
"end before start") |
|
|
|
|
|
|
|
def content(start, end): |
|
|
|
while True: |
|
|
|
# Note: disable response.stream below to get better debug info |
|
|
|
# from tracebacks in this subfunction. |
|
|
|
(data, restart) = self.db.stream_extract(path, start, end) |
|
|
|
# data is a list of rows; format it as text |
|
|
|
response = "timestamp foo bar baz XXX\n" |
|
|
|
yield response |
|
|
|
if restart == 0: |
|
|
|
break |
|
|
|
start = restart |
|
|
|
return content(start, end) |
|
|
|
extract._cp_config = { 'response.stream': False } |
|
|
|
|
|
|
|
|
|
|
|
class Exiter(object): |
|
|
|
"""App that exits the server, for testing""" |
|
|
|