2012-01-22 19:40:48 -05:00
|
|
|
import nilmdb
|
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
from nose.tools import *
|
2012-01-22 19:40:48 -05:00
|
|
|
from nose.tools import assert_raises
|
2012-03-19 17:36:24 -04:00
|
|
|
import distutils.version
|
2012-05-16 18:19:00 -04:00
|
|
|
import simplejson as json
|
|
|
|
import itertools
|
2012-01-22 19:40:48 -05:00
|
|
|
import os
|
2012-02-08 14:28:16 -05:00
|
|
|
import shutil
|
2012-01-22 19:40:48 -05:00
|
|
|
import sys
|
2012-01-25 19:13:44 -05:00
|
|
|
import cherrypy
|
|
|
|
import threading
|
|
|
|
import urllib2
|
2012-02-20 00:52:33 -05:00
|
|
|
from urllib2 import urlopen, HTTPError
|
2012-01-27 19:36:23 -05:00
|
|
|
import Queue
|
2012-03-29 17:43:05 -04:00
|
|
|
import cStringIO
|
2012-01-22 19:40:48 -05:00
|
|
|
|
2012-02-08 14:28:16 -05:00
|
|
|
testdb = "tests/testdb"
|
2012-01-22 19:40:48 -05:00
|
|
|
|
|
|
|
#@atexit.register
|
|
|
|
#def cleanup():
|
|
|
|
# os.unlink(testdb)
|
|
|
|
|
2012-03-29 17:43:05 -04:00
|
|
|
from test_helpers import *
|
|
|
|
|
2012-01-27 19:36:23 -05:00
|
|
|
class Test00Nilmdb(object): # named 00 so it runs first
|
2012-01-25 19:13:44 -05:00
|
|
|
def test_NilmDB(self):
|
2012-03-29 17:43:05 -04:00
|
|
|
recursive_unlink(testdb)
|
2012-01-25 19:13:44 -05:00
|
|
|
|
|
|
|
with assert_raises(IOError):
|
|
|
|
nilmdb.NilmDB("/nonexistant-db/foo")
|
|
|
|
|
|
|
|
db = nilmdb.NilmDB(testdb)
|
|
|
|
db.close()
|
2012-03-22 18:06:08 -04:00
|
|
|
db = nilmdb.NilmDB(testdb, sync=False)
|
2012-01-25 19:13:44 -05:00
|
|
|
db.close()
|
|
|
|
|
2012-03-29 17:43:05 -04:00
|
|
|
# test timer, just to get coverage
|
|
|
|
capture = cStringIO.StringIO()
|
|
|
|
old = sys.stdout
|
|
|
|
sys.stdout = capture
|
|
|
|
with nilmdb.Timer("test"):
|
2012-03-29 17:44:57 -04:00
|
|
|
nilmdb.timer.time.sleep(0.01)
|
2012-03-29 17:43:05 -04:00
|
|
|
sys.stdout = old
|
2012-03-29 17:44:57 -04:00
|
|
|
in_("test: ", capture.getvalue())
|
2012-03-29 17:43:05 -04:00
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
def test_stream(self):
|
2012-03-22 18:06:08 -04:00
|
|
|
db = nilmdb.NilmDB(testdb, sync=False)
|
2012-01-27 19:36:23 -05:00
|
|
|
eq_(db.stream_list(), [])
|
2012-01-25 19:13:44 -05:00
|
|
|
|
|
|
|
# Bad path
|
2012-02-08 14:28:16 -05:00
|
|
|
with assert_raises(ValueError):
|
|
|
|
db.stream_create("foo/bar/baz", "PrepData")
|
2012-01-25 19:13:44 -05:00
|
|
|
with assert_raises(ValueError):
|
2012-01-27 19:36:23 -05:00
|
|
|
db.stream_create("/foo", "PrepData")
|
|
|
|
# Bad layout type
|
2012-04-06 14:25:09 -04:00
|
|
|
with assert_raises(ValueError):
|
2012-01-27 19:36:23 -05:00
|
|
|
db.stream_create("/newton/prep", "NoSuchLayout")
|
|
|
|
db.stream_create("/newton/prep", "PrepData")
|
|
|
|
db.stream_create("/newton/raw", "RawData")
|
|
|
|
db.stream_create("/newton/zzz/rawnotch", "RawNotchedData")
|
2012-01-25 19:13:44 -05:00
|
|
|
|
|
|
|
# Verify we got 3 streams
|
2012-03-21 12:28:51 -04:00
|
|
|
eq_(db.stream_list(), [ ["/newton/prep", "PrepData"],
|
|
|
|
["/newton/raw", "RawData"],
|
|
|
|
["/newton/zzz/rawnotch", "RawNotchedData"]
|
2012-01-27 19:36:23 -05:00
|
|
|
])
|
2012-03-20 17:46:18 -04:00
|
|
|
# Match just one type or one path
|
2012-03-21 12:28:51 -04:00
|
|
|
eq_(db.stream_list(layout="RawData"), [ ["/newton/raw", "RawData"] ])
|
|
|
|
eq_(db.stream_list(path="/newton/raw"), [ ["/newton/raw", "RawData"] ])
|
2012-01-25 19:13:44 -05:00
|
|
|
|
|
|
|
# Verify that columns were made right
|
2012-01-27 19:36:23 -05:00
|
|
|
eq_(len(db.h5file.getNode("/newton/prep").cols), 9)
|
|
|
|
eq_(len(db.h5file.getNode("/newton/raw").cols), 7)
|
|
|
|
eq_(len(db.h5file.getNode("/newton/zzz/rawnotch").cols), 10)
|
2012-04-13 16:51:18 -04:00
|
|
|
assert(not db.h5file.getNode("/newton/prep").colindexed["timestamp"])
|
2012-08-06 18:05:09 -04:00
|
|
|
assert(not db.h5file.getNode("/newton/prep").colindexed["c1"])
|
2012-02-16 19:32:36 -05:00
|
|
|
|
|
|
|
# Set / get metadata
|
|
|
|
eq_(db.stream_get_metadata("/newton/prep"), {})
|
|
|
|
eq_(db.stream_get_metadata("/newton/raw"), {})
|
|
|
|
meta1 = { "description": "The Data",
|
|
|
|
"v_scale": "1.234" }
|
|
|
|
meta2 = { "description": "The Data" }
|
|
|
|
meta3 = { "v_scale": "1.234" }
|
|
|
|
db.stream_set_metadata("/newton/prep", meta1)
|
|
|
|
db.stream_update_metadata("/newton/prep", {})
|
|
|
|
db.stream_update_metadata("/newton/raw", meta2)
|
|
|
|
db.stream_update_metadata("/newton/raw", meta3)
|
|
|
|
eq_(db.stream_get_metadata("/newton/prep"), meta1)
|
|
|
|
eq_(db.stream_get_metadata("/newton/raw"), meta1)
|
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
db.close()
|
|
|
|
|
|
|
|
class TestBlockingServer(object):
|
2012-01-27 19:36:23 -05:00
|
|
|
def setUp(self):
|
2012-03-22 18:06:08 -04:00
|
|
|
self.db = nilmdb.NilmDB(testdb, sync=False)
|
2012-01-27 19:36:23 -05:00
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
self.db.close()
|
2012-04-24 17:59:33 -04:00
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
def test_blocking_server(self):
|
|
|
|
# Start web app on a custom port
|
2012-01-27 19:36:23 -05:00
|
|
|
self.server = nilmdb.Server(self.db, host = "127.0.0.1",
|
2012-01-25 19:13:44 -05:00
|
|
|
port = 12380, stoppable = True)
|
2012-01-27 19:36:23 -05:00
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
# Run it
|
|
|
|
event = threading.Event()
|
|
|
|
def run_server():
|
|
|
|
self.server.start(blocking = True, event = event)
|
|
|
|
thread = threading.Thread(target = run_server)
|
|
|
|
thread.start()
|
2012-01-27 19:36:23 -05:00
|
|
|
event.wait(timeout = 2)
|
2012-04-24 17:59:33 -04:00
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
# Send request to exit.
|
2012-02-20 00:52:33 -05:00
|
|
|
req = urlopen("http://127.0.0.1:12380/exit/", timeout = 1)
|
2012-01-25 19:13:44 -05:00
|
|
|
|
|
|
|
# Wait for it
|
|
|
|
thread.join()
|
2012-04-24 17:59:33 -04:00
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
def geturl(path):
|
2012-02-20 00:52:33 -05:00
|
|
|
req = urlopen("http://127.0.0.1:12380" + path, timeout = 10)
|
2012-01-25 19:13:44 -05:00
|
|
|
return req.read()
|
|
|
|
|
|
|
|
def getjson(path):
|
|
|
|
return json.loads(geturl(path))
|
|
|
|
|
|
|
|
class TestServer(object):
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
# Start web app on a custom port
|
2012-03-22 18:06:08 -04:00
|
|
|
self.db = nilmdb.NilmDB(testdb, sync=False)
|
2012-01-27 19:36:23 -05:00
|
|
|
self.server = nilmdb.Server(self.db, host = "127.0.0.1",
|
2012-01-25 19:13:44 -05:00
|
|
|
port = 12380, stoppable = False)
|
|
|
|
self.server.start(blocking = False)
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
# Close web app
|
|
|
|
self.server.stop()
|
2012-01-27 19:36:23 -05:00
|
|
|
self.db.close()
|
2012-01-25 19:13:44 -05:00
|
|
|
|
|
|
|
def test_server(self):
|
|
|
|
# Make sure we can't force an exit, and test other 404 errors
|
|
|
|
for url in [ "/exit", "/", "/favicon.ico" ]:
|
2012-02-20 00:52:33 -05:00
|
|
|
with assert_raises(HTTPError) as e:
|
2012-01-25 19:13:44 -05:00
|
|
|
geturl(url)
|
|
|
|
eq_(e.exception.code, 404)
|
|
|
|
|
|
|
|
# Check version
|
2012-03-19 17:36:24 -04:00
|
|
|
eq_(distutils.version.StrictVersion(getjson("/version")),
|
|
|
|
distutils.version.StrictVersion(self.server.version))
|
2012-01-25 19:13:44 -05:00
|
|
|
|
2012-01-27 19:36:23 -05:00
|
|
|
def test_stream_list(self):
|
|
|
|
# Known streams that got populated by an earlier test (test_nilmdb)
|
2012-01-25 19:13:44 -05:00
|
|
|
streams = getjson("/stream/list")
|
2012-01-27 19:36:23 -05:00
|
|
|
|
|
|
|
eq_(streams, [
|
2012-02-16 19:32:36 -05:00
|
|
|
['/newton/prep', 'PrepData'],
|
|
|
|
['/newton/raw', 'RawData'],
|
|
|
|
['/newton/zzz/rawnotch', 'RawNotchedData'],
|
2012-01-27 19:36:23 -05:00
|
|
|
])
|
|
|
|
|
|
|
|
streams = getjson("/stream/list?layout=RawData")
|
2012-02-16 19:32:36 -05:00
|
|
|
eq_(streams, [['/newton/raw', 'RawData']])
|
2012-01-27 19:36:23 -05:00
|
|
|
|
|
|
|
streams = getjson("/stream/list?layout=NoSuchLayout")
|
|
|
|
eq_(streams, [])
|
|
|
|
|
2012-02-20 00:52:33 -05:00
|
|
|
|
|
|
|
def test_stream_metadata(self):
|
|
|
|
with assert_raises(HTTPError) as e:
|
|
|
|
getjson("/stream/get_metadata?path=foo")
|
|
|
|
eq_(e.exception.code, 404)
|
|
|
|
|
|
|
|
data = getjson("/stream/get_metadata?path=/newton/prep")
|
|
|
|
eq_(data, {'description': 'The Data', 'v_scale': '1.234'})
|
|
|
|
|
|
|
|
data = getjson("/stream/get_metadata?path=/newton/prep"
|
|
|
|
"&key=v_scale")
|
|
|
|
eq_(data, {'v_scale': '1.234'})
|
|
|
|
|
|
|
|
data = getjson("/stream/get_metadata?path=/newton/prep"
|
|
|
|
"&key=v_scale&key=description")
|
|
|
|
eq_(data, {'description': 'The Data', 'v_scale': '1.234'})
|
|
|
|
|
|
|
|
data = getjson("/stream/get_metadata?path=/newton/prep"
|
|
|
|
"&key=v_scale&key=foo")
|
|
|
|
eq_(data, {'foo': None, 'v_scale': '1.234'})
|
|
|
|
|
|
|
|
data = getjson("/stream/get_metadata?path=/newton/prep"
|
|
|
|
"&key=foo")
|
|
|
|
eq_(data, {'foo': None})
|
2012-03-13 18:12:30 -04:00
|
|
|
|
|
|
|
|
|
|
|
def test_insert(self):
|
2012-03-26 14:06:15 -04:00
|
|
|
# GET instead of POST (no body)
|
|
|
|
# (actual POST test is done by client code)
|
2012-03-14 16:29:30 -04:00
|
|
|
with assert_raises(HTTPError) as e:
|
|
|
|
getjson("/stream/insert?path=/newton/prep")
|
2012-03-26 12:47:11 -04:00
|
|
|
eq_(e.exception.code, 400)
|
2012-03-29 17:43:05 -04:00
|
|
|
|