2012-01-22 19:40:48 -05:00
|
|
|
import nilmdb
|
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
from nose.tools import *
|
2012-01-22 19:40:48 -05:00
|
|
|
from nose.tools import assert_raises
|
2012-03-19 17:36:24 -04:00
|
|
|
import distutils.version
|
2012-05-16 18:19:00 -04:00
|
|
|
import simplejson as json
|
|
|
|
import itertools
|
2012-01-22 19:40:48 -05:00
|
|
|
import os
|
2012-02-08 14:28:16 -05:00
|
|
|
import shutil
|
2012-01-22 19:40:48 -05:00
|
|
|
import sys
|
2012-01-25 19:13:44 -05:00
|
|
|
import cherrypy
|
|
|
|
import threading
|
|
|
|
import urllib2
|
2012-02-20 00:52:33 -05:00
|
|
|
from urllib2 import urlopen, HTTPError
|
2012-01-27 19:36:23 -05:00
|
|
|
import Queue
|
2012-03-29 17:43:05 -04:00
|
|
|
import cStringIO
|
2012-12-31 15:52:28 -05:00
|
|
|
import time
|
2012-01-22 19:40:48 -05:00
|
|
|
|
2013-02-23 14:26:18 -05:00
|
|
|
from nilmdb.utils import serializer_proxy
|
|
|
|
|
2012-02-08 14:28:16 -05:00
|
|
|
testdb = "tests/testdb"
|
2012-01-22 19:40:48 -05:00
|
|
|
|
|
|
|
#@atexit.register
|
|
|
|
#def cleanup():
|
|
|
|
# os.unlink(testdb)
|
|
|
|
|
2013-01-05 15:00:34 -05:00
|
|
|
from testutil.helpers import *
|
2012-03-29 17:43:05 -04:00
|
|
|
|
2012-01-27 19:36:23 -05:00
|
|
|
class Test00Nilmdb(object): # named 00 so it runs first
|
2012-01-25 19:13:44 -05:00
|
|
|
def test_NilmDB(self):
|
2012-03-29 17:43:05 -04:00
|
|
|
recursive_unlink(testdb)
|
2012-01-25 19:13:44 -05:00
|
|
|
|
|
|
|
with assert_raises(IOError):
|
|
|
|
nilmdb.NilmDB("/nonexistant-db/foo")
|
|
|
|
|
|
|
|
db = nilmdb.NilmDB(testdb)
|
|
|
|
db.close()
|
2012-03-22 18:06:08 -04:00
|
|
|
db = nilmdb.NilmDB(testdb, sync=False)
|
2012-01-25 19:13:44 -05:00
|
|
|
db.close()
|
|
|
|
|
2012-03-29 17:43:05 -04:00
|
|
|
# test timer, just to get coverage
|
|
|
|
capture = cStringIO.StringIO()
|
|
|
|
old = sys.stdout
|
|
|
|
sys.stdout = capture
|
2012-12-31 15:52:28 -05:00
|
|
|
with nilmdb.utils.Timer("test"):
|
|
|
|
time.sleep(0.01)
|
2012-03-29 17:43:05 -04:00
|
|
|
sys.stdout = old
|
2012-03-29 17:44:57 -04:00
|
|
|
in_("test: ", capture.getvalue())
|
2012-03-29 17:43:05 -04:00
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
def test_stream(self):
|
2012-03-22 18:06:08 -04:00
|
|
|
db = nilmdb.NilmDB(testdb, sync=False)
|
2012-01-27 19:36:23 -05:00
|
|
|
eq_(db.stream_list(), [])
|
2012-01-25 19:13:44 -05:00
|
|
|
|
|
|
|
# Bad path
|
2012-02-08 14:28:16 -05:00
|
|
|
with assert_raises(ValueError):
|
|
|
|
db.stream_create("foo/bar/baz", "PrepData")
|
2012-01-25 19:13:44 -05:00
|
|
|
with assert_raises(ValueError):
|
2012-01-27 19:36:23 -05:00
|
|
|
db.stream_create("/foo", "PrepData")
|
|
|
|
# Bad layout type
|
2012-04-06 14:25:09 -04:00
|
|
|
with assert_raises(ValueError):
|
2012-01-27 19:36:23 -05:00
|
|
|
db.stream_create("/newton/prep", "NoSuchLayout")
|
|
|
|
db.stream_create("/newton/prep", "PrepData")
|
|
|
|
db.stream_create("/newton/raw", "RawData")
|
|
|
|
db.stream_create("/newton/zzz/rawnotch", "RawNotchedData")
|
2012-01-25 19:13:44 -05:00
|
|
|
|
|
|
|
# Verify we got 3 streams
|
2012-03-21 12:28:51 -04:00
|
|
|
eq_(db.stream_list(), [ ["/newton/prep", "PrepData"],
|
|
|
|
["/newton/raw", "RawData"],
|
|
|
|
["/newton/zzz/rawnotch", "RawNotchedData"]
|
2012-01-27 19:36:23 -05:00
|
|
|
])
|
2012-03-20 17:46:18 -04:00
|
|
|
# Match just one type or one path
|
2012-03-21 12:28:51 -04:00
|
|
|
eq_(db.stream_list(layout="RawData"), [ ["/newton/raw", "RawData"] ])
|
|
|
|
eq_(db.stream_list(path="/newton/raw"), [ ["/newton/raw", "RawData"] ])
|
2012-01-25 19:13:44 -05:00
|
|
|
|
2012-12-21 16:45:39 -05:00
|
|
|
# Verify that columns were made right (pytables specific)
|
|
|
|
if "h5file" in db.data.__dict__:
|
|
|
|
h5file = db.data.h5file
|
|
|
|
eq_(len(h5file.getNode("/newton/prep").cols), 9)
|
|
|
|
eq_(len(h5file.getNode("/newton/raw").cols), 7)
|
|
|
|
eq_(len(h5file.getNode("/newton/zzz/rawnotch").cols), 10)
|
|
|
|
assert(not h5file.getNode("/newton/prep").colindexed["timestamp"])
|
|
|
|
assert(not h5file.getNode("/newton/prep").colindexed["c1"])
|
2012-02-16 19:32:36 -05:00
|
|
|
|
|
|
|
# Set / get metadata
|
|
|
|
eq_(db.stream_get_metadata("/newton/prep"), {})
|
|
|
|
eq_(db.stream_get_metadata("/newton/raw"), {})
|
|
|
|
meta1 = { "description": "The Data",
|
|
|
|
"v_scale": "1.234" }
|
|
|
|
meta2 = { "description": "The Data" }
|
|
|
|
meta3 = { "v_scale": "1.234" }
|
|
|
|
db.stream_set_metadata("/newton/prep", meta1)
|
|
|
|
db.stream_update_metadata("/newton/prep", {})
|
|
|
|
db.stream_update_metadata("/newton/raw", meta2)
|
|
|
|
db.stream_update_metadata("/newton/raw", meta3)
|
|
|
|
eq_(db.stream_get_metadata("/newton/prep"), meta1)
|
|
|
|
eq_(db.stream_get_metadata("/newton/raw"), meta1)
|
|
|
|
|
2013-02-21 14:06:40 -05:00
|
|
|
# fill in some test coverage for start >= end
|
|
|
|
with assert_raises(nilmdb.server.NilmDBError):
|
|
|
|
db.stream_remove("/newton/prep", 0, 0)
|
|
|
|
with assert_raises(nilmdb.server.NilmDBError):
|
|
|
|
db.stream_remove("/newton/prep", 1, 0)
|
|
|
|
db.stream_remove("/newton/prep", 0, 1)
|
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
db.close()
|
|
|
|
|
|
|
|
class TestBlockingServer(object):
|
2012-01-27 19:36:23 -05:00
|
|
|
def setUp(self):
|
2013-02-23 14:26:18 -05:00
|
|
|
self.db = serializer_proxy(nilmdb.NilmDB)(testdb, sync=False)
|
2012-01-27 19:36:23 -05:00
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
self.db.close()
|
2012-04-24 17:59:33 -04:00
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
def test_blocking_server(self):
|
2013-02-22 18:48:42 -05:00
|
|
|
# Server should fail if the database doesn't have a "_thread_safe"
|
|
|
|
# property.
|
|
|
|
with assert_raises(KeyError):
|
|
|
|
nilmdb.Server(object())
|
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
# Start web app on a custom port
|
2012-01-27 19:36:23 -05:00
|
|
|
self.server = nilmdb.Server(self.db, host = "127.0.0.1",
|
2012-01-25 19:13:44 -05:00
|
|
|
port = 12380, stoppable = True)
|
2012-01-27 19:36:23 -05:00
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
# Run it
|
|
|
|
event = threading.Event()
|
|
|
|
def run_server():
|
|
|
|
self.server.start(blocking = True, event = event)
|
|
|
|
thread = threading.Thread(target = run_server)
|
|
|
|
thread.start()
|
2013-01-24 16:03:23 -05:00
|
|
|
if not event.wait(timeout = 10):
|
|
|
|
raise AssertionError("server didn't start in 10 seconds")
|
2012-04-24 17:59:33 -04:00
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
# Send request to exit.
|
2012-02-20 00:52:33 -05:00
|
|
|
req = urlopen("http://127.0.0.1:12380/exit/", timeout = 1)
|
2012-01-25 19:13:44 -05:00
|
|
|
|
|
|
|
# Wait for it
|
|
|
|
thread.join()
|
2012-04-24 17:59:33 -04:00
|
|
|
|
2012-01-25 19:13:44 -05:00
|
|
|
def geturl(path):
|
2012-02-20 00:52:33 -05:00
|
|
|
req = urlopen("http://127.0.0.1:12380" + path, timeout = 10)
|
2012-01-25 19:13:44 -05:00
|
|
|
return req.read()
|
|
|
|
|
|
|
|
def getjson(path):
|
|
|
|
return json.loads(geturl(path))
|
|
|
|
|
|
|
|
class TestServer(object):
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
# Start web app on a custom port
|
2013-02-23 14:26:18 -05:00
|
|
|
self.db = serializer_proxy(nilmdb.NilmDB)(testdb, sync=False)
|
2012-01-27 19:36:23 -05:00
|
|
|
self.server = nilmdb.Server(self.db, host = "127.0.0.1",
|
2012-01-25 19:13:44 -05:00
|
|
|
port = 12380, stoppable = False)
|
|
|
|
self.server.start(blocking = False)
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
# Close web app
|
|
|
|
self.server.stop()
|
2012-01-27 19:36:23 -05:00
|
|
|
self.db.close()
|
2012-01-25 19:13:44 -05:00
|
|
|
|
|
|
|
def test_server(self):
|
|
|
|
# Make sure we can't force an exit, and test other 404 errors
|
|
|
|
for url in [ "/exit", "/", "/favicon.ico" ]:
|
2012-02-20 00:52:33 -05:00
|
|
|
with assert_raises(HTTPError) as e:
|
2012-01-25 19:13:44 -05:00
|
|
|
geturl(url)
|
|
|
|
eq_(e.exception.code, 404)
|
|
|
|
|
|
|
|
# Check version
|
2013-02-05 19:07:38 -05:00
|
|
|
eq_(distutils.version.LooseVersion(getjson("/version")),
|
|
|
|
distutils.version.LooseVersion(nilmdb.__version__))
|
2012-01-25 19:13:44 -05:00
|
|
|
|
2012-01-27 19:36:23 -05:00
|
|
|
def test_stream_list(self):
|
|
|
|
# Known streams that got populated by an earlier test (test_nilmdb)
|
2012-01-25 19:13:44 -05:00
|
|
|
streams = getjson("/stream/list")
|
2012-01-27 19:36:23 -05:00
|
|
|
|
|
|
|
eq_(streams, [
|
2012-02-16 19:32:36 -05:00
|
|
|
['/newton/prep', 'PrepData'],
|
|
|
|
['/newton/raw', 'RawData'],
|
|
|
|
['/newton/zzz/rawnotch', 'RawNotchedData'],
|
2012-01-27 19:36:23 -05:00
|
|
|
])
|
|
|
|
|
|
|
|
streams = getjson("/stream/list?layout=RawData")
|
2012-02-16 19:32:36 -05:00
|
|
|
eq_(streams, [['/newton/raw', 'RawData']])
|
2012-01-27 19:36:23 -05:00
|
|
|
|
|
|
|
streams = getjson("/stream/list?layout=NoSuchLayout")
|
|
|
|
eq_(streams, [])
|
|
|
|
|
2012-02-20 00:52:33 -05:00
|
|
|
|
|
|
|
def test_stream_metadata(self):
|
|
|
|
with assert_raises(HTTPError) as e:
|
|
|
|
getjson("/stream/get_metadata?path=foo")
|
|
|
|
eq_(e.exception.code, 404)
|
|
|
|
|
|
|
|
data = getjson("/stream/get_metadata?path=/newton/prep")
|
|
|
|
eq_(data, {'description': 'The Data', 'v_scale': '1.234'})
|
|
|
|
|
|
|
|
data = getjson("/stream/get_metadata?path=/newton/prep"
|
|
|
|
"&key=v_scale")
|
|
|
|
eq_(data, {'v_scale': '1.234'})
|
|
|
|
|
|
|
|
data = getjson("/stream/get_metadata?path=/newton/prep"
|
|
|
|
"&key=v_scale&key=description")
|
|
|
|
eq_(data, {'description': 'The Data', 'v_scale': '1.234'})
|
|
|
|
|
|
|
|
data = getjson("/stream/get_metadata?path=/newton/prep"
|
|
|
|
"&key=v_scale&key=foo")
|
|
|
|
eq_(data, {'foo': None, 'v_scale': '1.234'})
|
|
|
|
|
|
|
|
data = getjson("/stream/get_metadata?path=/newton/prep"
|
|
|
|
"&key=foo")
|
|
|
|
eq_(data, {'foo': None})
|