2013-01-02 00:00:30 -05:00
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
2012-03-20 10:26:29 -04:00
|
|
|
|
import nilmdb
|
2012-12-31 15:52:28 -05:00
|
|
|
|
from nilmdb.utils.printf import *
|
2013-01-28 19:04:52 -05:00
|
|
|
|
from nilmdb.utils import timestamper
|
2012-03-20 18:21:16 -04:00
|
|
|
|
from nilmdb.client import ClientError, ServerError
|
2013-01-30 19:03:42 -05:00
|
|
|
|
from nilmdb.utils import datetime_tz
|
2012-03-24 13:32:11 -04:00
|
|
|
|
|
2012-03-20 10:26:29 -04:00
|
|
|
|
from nose.tools import *
|
|
|
|
|
from nose.tools import assert_raises
|
2012-03-20 17:46:18 -04:00
|
|
|
|
import itertools
|
|
|
|
|
import distutils.version
|
2012-03-20 10:26:29 -04:00
|
|
|
|
import os
|
|
|
|
|
import sys
|
|
|
|
|
import threading
|
2012-03-31 00:15:29 -04:00
|
|
|
|
import cStringIO
|
2012-05-23 16:00:01 -04:00
|
|
|
|
import simplejson as json
|
2012-05-25 12:44:24 -04:00
|
|
|
|
import unittest
|
2012-06-04 19:46:33 -04:00
|
|
|
|
import warnings
|
2013-01-03 19:20:51 -05:00
|
|
|
|
import resource
|
2012-03-20 10:26:29 -04:00
|
|
|
|
|
2013-01-05 15:00:34 -05:00
|
|
|
|
from testutil.helpers import *
|
2012-03-20 10:26:29 -04:00
|
|
|
|
|
2012-03-29 17:43:05 -04:00
|
|
|
|
testdb = "tests/client-testdb"
|
2012-03-20 10:26:29 -04:00
|
|
|
|
|
2012-03-21 20:14:13 -04:00
|
|
|
|
def setup_module():
|
|
|
|
|
global test_server, test_db
|
|
|
|
|
# Clear out DB
|
2012-03-29 17:43:05 -04:00
|
|
|
|
recursive_unlink(testdb)
|
2012-03-21 20:14:13 -04:00
|
|
|
|
|
|
|
|
|
# Start web app on a custom port
|
2012-03-26 12:47:11 -04:00
|
|
|
|
test_db = nilmdb.NilmDB(testdb, sync = False)
|
2012-03-21 20:14:13 -04:00
|
|
|
|
test_server = nilmdb.Server(test_db, host = "127.0.0.1",
|
|
|
|
|
port = 12380, stoppable = False,
|
2012-03-30 18:15:04 -04:00
|
|
|
|
fast_shutdown = True,
|
|
|
|
|
force_traceback = False)
|
2012-03-21 20:14:13 -04:00
|
|
|
|
test_server.start(blocking = False)
|
|
|
|
|
|
|
|
|
|
def teardown_module():
|
|
|
|
|
global test_server, test_db
|
|
|
|
|
# Close web app
|
|
|
|
|
test_server.stop()
|
|
|
|
|
test_db.close()
|
2012-03-20 10:26:29 -04:00
|
|
|
|
|
2012-03-21 20:14:13 -04:00
|
|
|
|
class TestClient(object):
|
2012-03-20 10:26:29 -04:00
|
|
|
|
|
2012-03-24 13:32:11 -04:00
|
|
|
|
def test_client_1_basic(self):
|
2012-03-20 10:26:29 -04:00
|
|
|
|
# Test a fake host
|
|
|
|
|
client = nilmdb.Client(url = "http://localhost:1/")
|
2012-03-26 18:28:33 -04:00
|
|
|
|
with assert_raises(nilmdb.client.ServerError):
|
2012-03-20 17:46:18 -04:00
|
|
|
|
client.version()
|
|
|
|
|
|
2012-03-31 00:15:29 -04:00
|
|
|
|
# Trigger same error with a PUT request
|
|
|
|
|
client = nilmdb.Client(url = "http://localhost:1/")
|
|
|
|
|
with assert_raises(nilmdb.client.ServerError):
|
|
|
|
|
client.version()
|
|
|
|
|
|
2012-03-20 17:46:18 -04:00
|
|
|
|
# Then a fake URL on a real host
|
|
|
|
|
client = nilmdb.Client(url = "http://localhost:12380/fake/")
|
2012-03-26 18:28:33 -04:00
|
|
|
|
with assert_raises(nilmdb.client.ClientError):
|
2012-03-20 10:26:29 -04:00
|
|
|
|
client.version()
|
|
|
|
|
|
2012-04-03 18:21:42 -04:00
|
|
|
|
# Now a real URL with no http:// prefix
|
|
|
|
|
client = nilmdb.Client(url = "localhost:12380")
|
|
|
|
|
version = client.version()
|
|
|
|
|
|
2012-03-20 17:46:18 -04:00
|
|
|
|
# Now use the real URL
|
2012-03-20 10:26:29 -04:00
|
|
|
|
client = nilmdb.Client(url = "http://localhost:12380/")
|
2012-03-20 17:46:18 -04:00
|
|
|
|
version = client.version()
|
|
|
|
|
eq_(distutils.version.StrictVersion(version),
|
2012-03-21 20:14:13 -04:00
|
|
|
|
distutils.version.StrictVersion(test_server.version))
|
2012-03-20 17:46:18 -04:00
|
|
|
|
|
2013-01-17 19:58:48 -05:00
|
|
|
|
# Bad URLs should give 404, not 500
|
|
|
|
|
with assert_raises(ClientError):
|
|
|
|
|
client.http.get("/stream/create")
|
|
|
|
|
|
2013-01-03 19:20:51 -05:00
|
|
|
|
def test_client_2_createlist(self):
|
2012-03-20 17:46:18 -04:00
|
|
|
|
# Basic stream tests, like those in test_nilmdb:test_stream
|
|
|
|
|
client = nilmdb.Client(url = "http://localhost:12380/")
|
|
|
|
|
|
|
|
|
|
# Database starts empty
|
|
|
|
|
eq_(client.stream_list(), [])
|
|
|
|
|
|
|
|
|
|
# Bad path
|
|
|
|
|
with assert_raises(ClientError):
|
|
|
|
|
client.stream_create("foo/bar/baz", "PrepData")
|
|
|
|
|
with assert_raises(ClientError):
|
|
|
|
|
client.stream_create("/foo", "PrepData")
|
|
|
|
|
# Bad layout type
|
|
|
|
|
with assert_raises(ClientError):
|
|
|
|
|
client.stream_create("/newton/prep", "NoSuchLayout")
|
2013-01-02 00:00:30 -05:00
|
|
|
|
|
|
|
|
|
# Create three streams
|
2012-03-20 17:46:18 -04:00
|
|
|
|
client.stream_create("/newton/prep", "PrepData")
|
|
|
|
|
client.stream_create("/newton/raw", "RawData")
|
|
|
|
|
client.stream_create("/newton/zzz/rawnotch", "RawNotchedData")
|
|
|
|
|
|
|
|
|
|
# Verify we got 3 streams
|
2012-03-21 12:28:51 -04:00
|
|
|
|
eq_(client.stream_list(), [ ["/newton/prep", "PrepData"],
|
|
|
|
|
["/newton/raw", "RawData"],
|
|
|
|
|
["/newton/zzz/rawnotch", "RawNotchedData"]
|
|
|
|
|
])
|
2012-03-20 17:46:18 -04:00
|
|
|
|
# Match just one type or one path
|
2012-03-21 12:28:51 -04:00
|
|
|
|
eq_(client.stream_list(layout="RawData"), [ ["/newton/raw", "RawData"] ])
|
|
|
|
|
eq_(client.stream_list(path="/newton/raw"), [ ["/newton/raw", "RawData"] ])
|
2012-03-20 17:46:18 -04:00
|
|
|
|
|
2013-01-03 19:20:51 -05:00
|
|
|
|
# Try messing with resource limits to trigger errors and get
|
|
|
|
|
# more coverage. Here, make it so we can only create files 1
|
|
|
|
|
# byte in size, which will trigger an IOError in the server when
|
|
|
|
|
# we create a table.
|
|
|
|
|
limit = resource.getrlimit(resource.RLIMIT_FSIZE)
|
|
|
|
|
resource.setrlimit(resource.RLIMIT_FSIZE, (1, limit[1]))
|
|
|
|
|
with assert_raises(ServerError) as e:
|
|
|
|
|
client.stream_create("/newton/hello", "RawData")
|
|
|
|
|
resource.setrlimit(resource.RLIMIT_FSIZE, limit)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_client_3_metadata(self):
|
|
|
|
|
client = nilmdb.Client(url = "http://localhost:12380/")
|
|
|
|
|
|
2012-03-20 17:46:18 -04:00
|
|
|
|
# Set / get metadata
|
|
|
|
|
eq_(client.stream_get_metadata("/newton/prep"), {})
|
|
|
|
|
eq_(client.stream_get_metadata("/newton/raw"), {})
|
|
|
|
|
meta1 = { "description": "The Data",
|
|
|
|
|
"v_scale": "1.234" }
|
|
|
|
|
meta2 = { "description": "The Data" }
|
|
|
|
|
meta3 = { "v_scale": "1.234" }
|
|
|
|
|
client.stream_set_metadata("/newton/prep", meta1)
|
|
|
|
|
client.stream_update_metadata("/newton/prep", {})
|
|
|
|
|
client.stream_update_metadata("/newton/raw", meta2)
|
|
|
|
|
client.stream_update_metadata("/newton/raw", meta3)
|
|
|
|
|
eq_(client.stream_get_metadata("/newton/prep"), meta1)
|
|
|
|
|
eq_(client.stream_get_metadata("/newton/raw"), meta1)
|
2012-03-21 12:28:51 -04:00
|
|
|
|
eq_(client.stream_get_metadata("/newton/raw", [ "description" ] ), meta2)
|
|
|
|
|
eq_(client.stream_get_metadata("/newton/raw", [ "description",
|
|
|
|
|
"v_scale" ] ), meta1)
|
|
|
|
|
|
2012-04-04 18:34:01 -04:00
|
|
|
|
# missing key
|
|
|
|
|
eq_(client.stream_get_metadata("/newton/raw", "descr"),
|
|
|
|
|
{ "descr": None })
|
|
|
|
|
eq_(client.stream_get_metadata("/newton/raw", [ "descr" ]),
|
|
|
|
|
{ "descr": None })
|
|
|
|
|
|
2012-03-21 12:28:51 -04:00
|
|
|
|
# test wrong types (list instead of dict)
|
|
|
|
|
with assert_raises(ClientError):
|
|
|
|
|
client.stream_set_metadata("/newton/prep", [1,2,3])
|
|
|
|
|
with assert_raises(ClientError):
|
|
|
|
|
client.stream_update_metadata("/newton/prep", [1,2,3])
|
2012-03-20 17:46:18 -04:00
|
|
|
|
|
2013-01-03 19:20:51 -05:00
|
|
|
|
def test_client_4_insert(self):
|
2012-03-23 11:50:33 -04:00
|
|
|
|
client = nilmdb.Client(url = "http://localhost:12380/")
|
|
|
|
|
|
2012-03-24 13:32:11 -04:00
|
|
|
|
datetime_tz.localtz_set("America/New_York")
|
|
|
|
|
|
2012-03-24 17:15:24 -04:00
|
|
|
|
testfile = "tests/data/prep-20120323T1000"
|
|
|
|
|
start = datetime_tz.datetime_tz.smartparse("20120323T1000")
|
2012-12-11 18:08:00 -05:00
|
|
|
|
start = start.totimestamp()
|
2012-03-24 17:15:24 -04:00
|
|
|
|
rate = 120
|
2012-03-26 14:06:15 -04:00
|
|
|
|
|
|
|
|
|
# First try a nonexistent path
|
2013-01-28 19:04:52 -05:00
|
|
|
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
2012-03-26 14:06:15 -04:00
|
|
|
|
with assert_raises(ClientError) as e:
|
|
|
|
|
result = client.stream_insert("/newton/no-such-path", data)
|
2012-03-29 12:46:34 -04:00
|
|
|
|
in_("404 Not Found", str(e.exception))
|
2012-03-26 14:06:15 -04:00
|
|
|
|
|
2012-03-29 12:46:34 -04:00
|
|
|
|
# Now try reversed timestamps
|
2013-01-28 19:04:52 -05:00
|
|
|
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
2012-03-29 12:46:34 -04:00
|
|
|
|
data = reversed(list(data))
|
|
|
|
|
with assert_raises(ClientError) as e:
|
|
|
|
|
result = client.stream_insert("/newton/prep", data)
|
|
|
|
|
in_("400 Bad Request", str(e.exception))
|
|
|
|
|
in_("timestamp is not monotonically increasing", str(e.exception))
|
2012-04-06 14:25:09 -04:00
|
|
|
|
|
2012-03-31 00:15:29 -04:00
|
|
|
|
# Now try empty data (no server request made)
|
|
|
|
|
empty = cStringIO.StringIO("")
|
2013-01-28 19:04:52 -05:00
|
|
|
|
data = timestamper.TimestamperRate(empty, start, 120)
|
2012-03-31 00:15:29 -04:00
|
|
|
|
result = client.stream_insert("/newton/prep", data)
|
|
|
|
|
eq_(result, None)
|
|
|
|
|
|
|
|
|
|
# Try forcing a server request with empty data
|
|
|
|
|
with assert_raises(ClientError) as e:
|
2012-12-07 20:30:39 -05:00
|
|
|
|
client.http.put("stream/insert", "", { "path": "/newton/prep",
|
|
|
|
|
"start": 0, "end": 0 })
|
2012-03-31 00:15:29 -04:00
|
|
|
|
in_("400 Bad Request", str(e.exception))
|
2012-04-06 14:25:09 -04:00
|
|
|
|
in_("no data provided", str(e.exception))
|
2012-03-31 00:15:29 -04:00
|
|
|
|
|
2012-12-11 18:08:00 -05:00
|
|
|
|
# Specify start/end (starts too late)
|
2013-01-28 19:04:52 -05:00
|
|
|
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
2012-12-11 18:08:00 -05:00
|
|
|
|
with assert_raises(ClientError) as e:
|
|
|
|
|
result = client.stream_insert("/newton/prep", data,
|
|
|
|
|
start + 5, start + 120)
|
|
|
|
|
in_("400 Bad Request", str(e.exception))
|
|
|
|
|
in_("Data timestamp 1332511200.0 < start time 1332511205.0",
|
|
|
|
|
str(e.exception))
|
|
|
|
|
|
|
|
|
|
# Specify start/end (ends too early)
|
2013-01-28 19:04:52 -05:00
|
|
|
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
2012-12-11 18:08:00 -05:00
|
|
|
|
with assert_raises(ClientError) as e:
|
|
|
|
|
result = client.stream_insert("/newton/prep", data,
|
|
|
|
|
start, start + 1)
|
|
|
|
|
in_("400 Bad Request", str(e.exception))
|
|
|
|
|
# Client chunks the input, so the exact timestamp here might change
|
|
|
|
|
# if the chunk positions change.
|
|
|
|
|
in_("Data timestamp 1332511271.016667 >= end time 1332511201.0",
|
|
|
|
|
str(e.exception))
|
|
|
|
|
|
2012-03-26 14:06:15 -04:00
|
|
|
|
# Now do the real load
|
2013-01-28 19:04:52 -05:00
|
|
|
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
2012-12-11 18:08:00 -05:00
|
|
|
|
result = client.stream_insert("/newton/prep", data,
|
|
|
|
|
start, start + 119.999777)
|
2012-12-07 20:30:39 -05:00
|
|
|
|
eq_(result, "ok")
|
2012-03-31 00:15:29 -04:00
|
|
|
|
|
2012-12-11 18:08:00 -05:00
|
|
|
|
# Verify the intervals. Should be just one, even if the data
|
|
|
|
|
# was inserted in chunks, due to nilmdb interval concatenation.
|
|
|
|
|
intervals = list(client.stream_intervals("/newton/prep"))
|
|
|
|
|
eq_(intervals, [[start, start + 119.999777]])
|
|
|
|
|
|
2012-03-31 00:15:29 -04:00
|
|
|
|
# Try some overlapping data -- just insert it again
|
2013-01-28 19:04:52 -05:00
|
|
|
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
2012-03-31 00:15:29 -04:00
|
|
|
|
with assert_raises(ClientError) as e:
|
|
|
|
|
result = client.stream_insert("/newton/prep", data)
|
|
|
|
|
in_("400 Bad Request", str(e.exception))
|
2013-01-16 17:31:31 -05:00
|
|
|
|
in_("verlap", str(e.exception))
|
2012-05-23 16:00:01 -04:00
|
|
|
|
|
2013-01-08 21:07:52 -05:00
|
|
|
|
def test_client_5_extractremove(self):
|
|
|
|
|
# Misc tests for extract and remove. Most of them are in test_cmdline.
|
2012-06-04 19:46:33 -04:00
|
|
|
|
client = nilmdb.Client(url = "http://localhost:12380/")
|
|
|
|
|
|
|
|
|
|
for x in client.stream_extract("/newton/prep", 123, 123):
|
2013-01-22 12:47:06 -05:00
|
|
|
|
raise AssertionError("shouldn't be any data for this request")
|
2012-06-04 19:46:33 -04:00
|
|
|
|
|
2013-01-08 21:07:52 -05:00
|
|
|
|
with assert_raises(ClientError) as e:
|
|
|
|
|
client.stream_remove("/newton/prep", 123, 120)
|
|
|
|
|
|
2013-01-03 19:20:51 -05:00
|
|
|
|
def test_client_6_generators(self):
|
2012-05-23 16:00:01 -04:00
|
|
|
|
# A lot of the client functionality is already tested by test_cmdline,
|
|
|
|
|
# but this gets a bit more coverage that cmdline misses.
|
|
|
|
|
client = nilmdb.Client(url = "http://localhost:12380/")
|
|
|
|
|
|
|
|
|
|
# Trigger a client error in generator
|
|
|
|
|
start = datetime_tz.datetime_tz.smartparse("20120323T2000")
|
|
|
|
|
end = datetime_tz.datetime_tz.smartparse("20120323T1000")
|
2012-05-30 14:24:36 -04:00
|
|
|
|
for function in [ client.stream_intervals, client.stream_extract ]:
|
|
|
|
|
with assert_raises(ClientError) as e:
|
|
|
|
|
function("/newton/prep",
|
|
|
|
|
start.totimestamp(),
|
|
|
|
|
end.totimestamp()).next()
|
|
|
|
|
in_("400 Bad Request", str(e.exception))
|
|
|
|
|
in_("end before start", str(e.exception))
|
|
|
|
|
|
|
|
|
|
# Trigger a curl error in generator
|
|
|
|
|
with assert_raises(ServerError) as e:
|
|
|
|
|
client.http.get_gen("http://nosuchurl/").next()
|
2012-05-23 16:00:01 -04:00
|
|
|
|
|
|
|
|
|
# Trigger a curl error in generator
|
|
|
|
|
with assert_raises(ServerError) as e:
|
|
|
|
|
client.http.get_gen("http://nosuchurl/").next()
|
|
|
|
|
|
|
|
|
|
# Check non-json version of string output
|
|
|
|
|
eq_(json.loads(client.http.get("/stream/list",retjson=False)),
|
|
|
|
|
client.http.get("/stream/list",retjson=True))
|
|
|
|
|
|
|
|
|
|
# Check non-json version of generator output
|
|
|
|
|
for (a, b) in itertools.izip(
|
|
|
|
|
client.http.get_gen("/stream/list",retjson=False),
|
|
|
|
|
client.http.get_gen("/stream/list",retjson=True)):
|
|
|
|
|
eq_(json.loads(a), b)
|
|
|
|
|
|
|
|
|
|
# Check PUT with generator out
|
|
|
|
|
with assert_raises(ClientError) as e:
|
|
|
|
|
client.http.put_gen("stream/insert", "",
|
2012-12-07 20:30:39 -05:00
|
|
|
|
{ "path": "/newton/prep",
|
|
|
|
|
"start": 0, "end": 0 }).next()
|
2012-05-23 16:00:01 -04:00
|
|
|
|
in_("400 Bad Request", str(e.exception))
|
|
|
|
|
in_("no data provided", str(e.exception))
|
2012-05-23 19:18:24 -04:00
|
|
|
|
|
2012-05-30 14:24:36 -04:00
|
|
|
|
# Check 404 for missing streams
|
|
|
|
|
for function in [ client.stream_intervals, client.stream_extract ]:
|
|
|
|
|
with assert_raises(ClientError) as e:
|
|
|
|
|
function("/no/such/stream").next()
|
|
|
|
|
in_("404 Not Found", str(e.exception))
|
|
|
|
|
in_("No such stream", str(e.exception))
|
|
|
|
|
|
2013-01-22 12:47:06 -05:00
|
|
|
|
def test_client_7_headers(self):
|
2012-05-25 12:44:24 -04:00
|
|
|
|
# Make sure that /stream/intervals and /stream/extract
|
2013-01-22 12:47:06 -05:00
|
|
|
|
# properly return streaming, chunked, text/plain response.
|
|
|
|
|
# Pokes around in client.http internals a bit to look at the
|
|
|
|
|
# response headers.
|
2012-05-25 12:44:24 -04:00
|
|
|
|
|
|
|
|
|
client = nilmdb.Client(url = "http://localhost:12380/")
|
2013-01-22 12:47:06 -05:00
|
|
|
|
http = client.http
|
2012-05-25 12:44:24 -04:00
|
|
|
|
|
2012-06-04 19:46:33 -04:00
|
|
|
|
# Use a warning rather than returning a test failure, so that we can
|
|
|
|
|
# still disable chunked responses for debugging.
|
2013-01-22 12:47:06 -05:00
|
|
|
|
|
|
|
|
|
# Intervals
|
|
|
|
|
x = http.get("stream/intervals", { "path": "/newton/prep" },
|
2012-05-23 19:18:24 -04:00
|
|
|
|
retjson=False)
|
2012-12-12 19:25:27 -05:00
|
|
|
|
lines_(x, 1)
|
2013-01-22 12:47:06 -05:00
|
|
|
|
if "Transfer-Encoding: chunked" not in http._headers:
|
2012-06-04 19:46:33 -04:00
|
|
|
|
warnings.warn("Non-chunked HTTP response for /stream/intervals")
|
2013-01-22 12:47:06 -05:00
|
|
|
|
if "Content-Type: text/plain;charset=utf-8" not in http._headers:
|
|
|
|
|
raise AssertionError("/stream/intervals is not text/plain:\n" +
|
|
|
|
|
http._headers)
|
2012-05-24 17:05:38 -04:00
|
|
|
|
|
2013-01-22 12:47:06 -05:00
|
|
|
|
# Extract
|
|
|
|
|
x = http.get("stream/extract",
|
2012-05-24 17:05:38 -04:00
|
|
|
|
{ "path": "/newton/prep",
|
|
|
|
|
"start": "123",
|
|
|
|
|
"end": "123" }, retjson=False)
|
2013-01-22 12:47:06 -05:00
|
|
|
|
if "Transfer-Encoding: chunked" not in http._headers:
|
2012-06-04 19:46:33 -04:00
|
|
|
|
warnings.warn("Non-chunked HTTP response for /stream/extract")
|
2013-01-22 12:47:06 -05:00
|
|
|
|
if "Content-Type: text/plain;charset=utf-8" not in http._headers:
|
|
|
|
|
raise AssertionError("/stream/extract is not text/plain:\n" +
|
|
|
|
|
http._headers)
|
2013-01-02 00:00:30 -05:00
|
|
|
|
|
2013-01-22 14:42:03 -05:00
|
|
|
|
# Make sure Access-Control-Allow-Origin gets set
|
|
|
|
|
if "Access-Control-Allow-Origin: " not in http._headers:
|
|
|
|
|
raise AssertionError("No Access-Control-Allow-Origin (CORS) "
|
|
|
|
|
"header in /stream/extract response:\n" +
|
|
|
|
|
http._headers)
|
|
|
|
|
|
2013-01-03 19:20:51 -05:00
|
|
|
|
def test_client_8_unicode(self):
|
2013-01-02 00:00:30 -05:00
|
|
|
|
# Basic Unicode tests
|
|
|
|
|
client = nilmdb.Client(url = "http://localhost:12380/")
|
|
|
|
|
|
|
|
|
|
# Delete streams that exist
|
|
|
|
|
for stream in client.stream_list():
|
|
|
|
|
client.stream_destroy(stream[0])
|
|
|
|
|
|
|
|
|
|
# Database is empty
|
|
|
|
|
eq_(client.stream_list(), [])
|
|
|
|
|
|
|
|
|
|
# Create Unicode stream, match it
|
|
|
|
|
raw = [ u"/düsseldorf/raw", u"uint16_6" ]
|
|
|
|
|
prep = [ u"/düsseldorf/prep", u"uint16_6" ]
|
|
|
|
|
client.stream_create(*raw)
|
|
|
|
|
eq_(client.stream_list(), [raw])
|
|
|
|
|
eq_(client.stream_list(layout=raw[1]), [raw])
|
|
|
|
|
eq_(client.stream_list(path=raw[0]), [raw])
|
|
|
|
|
client.stream_create(*prep)
|
|
|
|
|
eq_(client.stream_list(), [prep, raw])
|
|
|
|
|
|
|
|
|
|
# Set / get metadata with Unicode keys and values
|
|
|
|
|
eq_(client.stream_get_metadata(raw[0]), {})
|
|
|
|
|
eq_(client.stream_get_metadata(prep[0]), {})
|
|
|
|
|
meta1 = { u"alpha": u"α",
|
|
|
|
|
u"β": u"beta" }
|
|
|
|
|
meta2 = { u"alpha": u"α" }
|
|
|
|
|
meta3 = { u"β": u"beta" }
|
|
|
|
|
client.stream_set_metadata(prep[0], meta1)
|
|
|
|
|
client.stream_update_metadata(prep[0], {})
|
|
|
|
|
client.stream_update_metadata(raw[0], meta2)
|
|
|
|
|
client.stream_update_metadata(raw[0], meta3)
|
|
|
|
|
eq_(client.stream_get_metadata(prep[0]), meta1)
|
|
|
|
|
eq_(client.stream_get_metadata(raw[0]), meta1)
|
|
|
|
|
eq_(client.stream_get_metadata(raw[0], [ "alpha" ]), meta2)
|
|
|
|
|
eq_(client.stream_get_metadata(raw[0], [ "alpha", "β" ]), meta1)
|