Compare commits
18 Commits
nilmdb-1.8
...
nilmdb-1.9
Author | SHA1 | Date | |
---|---|---|---|
71cd7ed9b7 | |||
a79d6104d5 | |||
8e8ec59e30 | |||
b89b945a0f | |||
bd7bdb2eb8 | |||
840cd2fd13 | |||
bbd59c8b50 | |||
405c110fd7 | |||
274adcd856 | |||
a1850c9c2c | |||
6cd28b67b1 | |||
d6d215d53d | |||
e02143ddb2 | |||
e275384d03 | |||
a6a67ec15c | |||
fc43107307 | |||
90633413bb | |||
c7c3aff0fb |
@@ -7,4 +7,4 @@
|
|||||||
exclude_lines =
|
exclude_lines =
|
||||||
pragma: no cover
|
pragma: no cover
|
||||||
if 0:
|
if 0:
|
||||||
omit = nilmdb/utils/datetime_tz*,nilmdb/scripts,nilmdb/_version.py
|
omit = nilmdb/utils/datetime_tz*,nilmdb/scripts,nilmdb/_version.py,nilmdb/fsck
|
||||||
|
4
Makefile
4
Makefile
@@ -23,6 +23,10 @@ docs:
|
|||||||
lint:
|
lint:
|
||||||
pylint --rcfile=.pylintrc nilmdb
|
pylint --rcfile=.pylintrc nilmdb
|
||||||
|
|
||||||
|
fscktest:
|
||||||
|
# python -c "import nilmdb.fsck; nilmdb.fsck.Fsck('/home/jim/wsgi/db').check()"
|
||||||
|
python -c "import nilmdb.fsck; nilmdb.fsck.Fsck('/home/jim/mnt/bucket/mnt/sharon/data/db').check()"
|
||||||
|
|
||||||
test:
|
test:
|
||||||
ifeq ($(INSIDE_EMACS), t)
|
ifeq ($(INSIDE_EMACS), t)
|
||||||
# Use the slightly more flexible script
|
# Use the slightly more flexible script
|
||||||
|
@@ -19,12 +19,12 @@ Then, set up Apache with a configuration like:
|
|||||||
|
|
||||||
<VirtualHost>
|
<VirtualHost>
|
||||||
WSGIScriptAlias /nilmdb /home/nilm/nilmdb.wsgi
|
WSGIScriptAlias /nilmdb /home/nilm/nilmdb.wsgi
|
||||||
WSGIApplicationGroup nilmdb-appgroup
|
|
||||||
WSGIProcessGroup nilmdb-procgroup
|
|
||||||
WSGIDaemonProcess nilmdb-procgroup threads=32 user=nilm group=nilm
|
WSGIDaemonProcess nilmdb-procgroup threads=32 user=nilm group=nilm
|
||||||
|
|
||||||
# Access control example:
|
|
||||||
<Location /nilmdb>
|
<Location /nilmdb>
|
||||||
|
WSGIProcessGroup nilmdb-procgroup
|
||||||
|
WSGIApplicationGroup nilmdb-appgroup
|
||||||
|
|
||||||
|
# Access control example:
|
||||||
Order deny,allow
|
Order deny,allow
|
||||||
Deny from all
|
Deny from all
|
||||||
Allow from 1.2.3.4
|
Allow from 1.2.3.4
|
||||||
|
@@ -58,6 +58,11 @@ class Client(object):
|
|||||||
return self.http.get("dbinfo")
|
return self.http.get("dbinfo")
|
||||||
|
|
||||||
def stream_list(self, path = None, layout = None, extended = False):
|
def stream_list(self, path = None, layout = None, extended = False):
|
||||||
|
"""Return a sorted list of [path, layout] lists. If 'path' or
|
||||||
|
'layout' are specified, only return streams that match those
|
||||||
|
exact values. If 'extended' is True, the returned lists have
|
||||||
|
extended info, e.g.: [path, layout, extent_min, extent_max,
|
||||||
|
total_rows, total_seconds."""
|
||||||
params = {}
|
params = {}
|
||||||
if path is not None:
|
if path is not None:
|
||||||
params["path"] = path
|
params["path"] = path
|
||||||
@@ -69,6 +74,7 @@ class Client(object):
|
|||||||
return nilmdb.utils.sort.sort_human(streams, key = lambda s: s[0])
|
return nilmdb.utils.sort.sort_human(streams, key = lambda s: s[0])
|
||||||
|
|
||||||
def stream_get_metadata(self, path, keys = None):
|
def stream_get_metadata(self, path, keys = None):
|
||||||
|
"""Get stream metadata"""
|
||||||
params = { "path": path }
|
params = { "path": path }
|
||||||
if keys is not None:
|
if keys is not None:
|
||||||
params["key"] = keys
|
params["key"] = keys
|
||||||
|
@@ -29,6 +29,14 @@ for cmd in subcommands:
|
|||||||
subcmd_mods[cmd] = __import__("nilmdb.cmdline." + cmd, fromlist = [ cmd ])
|
subcmd_mods[cmd] = __import__("nilmdb.cmdline." + cmd, fromlist = [ cmd ])
|
||||||
|
|
||||||
class JimArgumentParser(argparse.ArgumentParser):
|
class JimArgumentParser(argparse.ArgumentParser):
|
||||||
|
def parse_args(self, args=None, namespace=None):
|
||||||
|
# Look for --version anywhere and change it to just "nilmtool
|
||||||
|
# --version". This makes "nilmtool cmd --version" work, which
|
||||||
|
# is needed by help2man.
|
||||||
|
if "--version" in (args or sys.argv[1:]):
|
||||||
|
args = [ "--version" ]
|
||||||
|
return argparse.ArgumentParser.parse_args(self, args, namespace)
|
||||||
|
|
||||||
def error(self, message):
|
def error(self, message):
|
||||||
self.print_usage(sys.stderr)
|
self.print_usage(sys.stderr)
|
||||||
self.exit(2, sprintf("error: %s\n", message))
|
self.exit(2, sprintf("error: %s\n", message))
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import *
|
||||||
import nilmdb.utils.time
|
import nilmdb.utils.time
|
||||||
|
from nilmdb.utils.interval import Interval
|
||||||
|
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import argparse
|
import argparse
|
||||||
@@ -42,6 +43,8 @@ def setup(self, sub):
|
|||||||
group = cmd.add_argument_group("Misc options")
|
group = cmd.add_argument_group("Misc options")
|
||||||
group.add_argument("-T", "--timestamp-raw", action="store_true",
|
group.add_argument("-T", "--timestamp-raw", action="store_true",
|
||||||
help="Show raw timestamps when printing times")
|
help="Show raw timestamps when printing times")
|
||||||
|
group.add_argument("-o", "--optimize", action="store_true",
|
||||||
|
help="Optimize (merge adjacent) intervals")
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
@@ -58,9 +61,16 @@ def cmd_intervals(self):
|
|||||||
time_string = nilmdb.utils.time.timestamp_to_human
|
time_string = nilmdb.utils.time.timestamp_to_human
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for (start, end) in self.client.stream_intervals(
|
intervals = ( Interval(start, end) for (start, end) in
|
||||||
self.args.path, self.args.start, self.args.end, self.args.diff):
|
self.client.stream_intervals(self.args.path,
|
||||||
printf("[ %s -> %s ]\n", time_string(start), time_string(end))
|
self.args.start,
|
||||||
|
self.args.end,
|
||||||
|
self.args.diff) )
|
||||||
|
if self.args.optimize:
|
||||||
|
intervals = nilmdb.utils.interval.optimize(intervals)
|
||||||
|
for i in intervals:
|
||||||
|
printf("[ %s -> %s ]\n", time_string(i.start), time_string(i.end))
|
||||||
|
|
||||||
except nilmdb.client.ClientError as e:
|
except nilmdb.client.ClientError as e:
|
||||||
self.die("error listing intervals: %s", str(e))
|
self.die("error listing intervals: %s", str(e))
|
||||||
|
|
||||||
|
1
nilmdb/fsck/.#fsck.py
Symbolic link
1
nilmdb/fsck/.#fsck.py
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
jim@pilot.lees.18066:1373305995
|
5
nilmdb/fsck/__init__.py
Normal file
5
nilmdb/fsck/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
"""nilmdb.fsck"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from nilmdb.fsck.fsck import Fsck
|
194
nilmdb/fsck/fsck.py
Normal file
194
nilmdb/fsck/fsck.py
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""Check database consistency"""
|
||||||
|
|
||||||
|
import nilmdb.utils
|
||||||
|
import nilmdb.server
|
||||||
|
from nilmdb.utils.interval import IntervalError
|
||||||
|
from nilmdb.server.interval import Interval, IntervalSet
|
||||||
|
from nilmdb.utils.printf import *
|
||||||
|
from nilmdb.utils.time import timestamp_to_string
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
import sqlite3
|
||||||
|
import os
|
||||||
|
import progressbar
|
||||||
|
import time
|
||||||
|
import cPickle as pickle
|
||||||
|
|
||||||
|
class FsckError(Exception):
|
||||||
|
def __init__(self, format, *args):
|
||||||
|
Exception.__init__(self, sprintf(format, *args))
|
||||||
|
|
||||||
|
def log(format, *args):
|
||||||
|
printf(format, *args)
|
||||||
|
|
||||||
|
def err(format, *args):
|
||||||
|
fprintf(sys.stderr, format, *args)
|
||||||
|
|
||||||
|
class Progress(object):
|
||||||
|
def __init__(self, maxval):
|
||||||
|
self.bar = progressbar.ProgressBar(maxval = maxval)
|
||||||
|
if self.bar.term_width == 0:
|
||||||
|
self.bar.term_width = 75
|
||||||
|
def __enter__(self):
|
||||||
|
self.bar.start()
|
||||||
|
self.last_update = 0
|
||||||
|
return self
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
if exc_type is None:
|
||||||
|
self.bar.finish()
|
||||||
|
else:
|
||||||
|
printf("\n")
|
||||||
|
def update(self, val):
|
||||||
|
self.bar.update(val)
|
||||||
|
#now = time.time()
|
||||||
|
#if now - self.last_update < 0.005:
|
||||||
|
# time.sleep(0.005)
|
||||||
|
#self.last_update = now
|
||||||
|
|
||||||
|
class Fsck(object):
|
||||||
|
|
||||||
|
def __init__(self, path):
|
||||||
|
self.basepath = path
|
||||||
|
self.sqlpath = os.path.join(path, "data.sql")
|
||||||
|
self.bulkpath = os.path.join(path, "data")
|
||||||
|
self.bulklock = os.path.join(path, "data.lock")
|
||||||
|
|
||||||
|
def check(self):
|
||||||
|
self.check_paths()
|
||||||
|
self.check_sql()
|
||||||
|
self.check_streams()
|
||||||
|
log("ok\n")
|
||||||
|
|
||||||
|
def check_paths(self):
|
||||||
|
log("checking paths\n")
|
||||||
|
if not os.path.isfile(self.sqlpath):
|
||||||
|
raise FsckError("SQL database missing")
|
||||||
|
if not os.path.isdir(self.bulkpath):
|
||||||
|
raise FsckError("Bulk data directory missing")
|
||||||
|
with open(self.bulklock, "w") as lockfile:
|
||||||
|
if not nilmdb.utils.lock.exclusive_lock(lockfile):
|
||||||
|
raise FsckError('database already locked by another process')
|
||||||
|
self.bulk = nilmdb.server.bulkdata.BulkData(self.basepath)
|
||||||
|
# override must_close warning
|
||||||
|
if "_must_close" in dir(self.bulk):
|
||||||
|
del self.bulk._must_close
|
||||||
|
|
||||||
|
def check_sql(self):
|
||||||
|
log("checking sqlite database\n")
|
||||||
|
|
||||||
|
self.sql = sqlite3.connect(self.sqlpath)
|
||||||
|
with self.sql as con:
|
||||||
|
ver = con.execute("PRAGMA user_version").fetchone()[0]
|
||||||
|
good = max(nilmdb.server.nilmdb._sql_schema_updates.keys())
|
||||||
|
if ver != good:
|
||||||
|
raise FsckError("database version %d too old, should be %d",
|
||||||
|
ver, good)
|
||||||
|
self.stream_path = {}
|
||||||
|
self.stream_layout = {}
|
||||||
|
log(" loading paths\n")
|
||||||
|
result = con.execute("SELECT id, path, layout FROM streams")
|
||||||
|
for r in result:
|
||||||
|
if r[0] in self.stream_path:
|
||||||
|
raise FsckError("duplicated ID %d in stream IDs", r[0])
|
||||||
|
self.stream_path[r[0]] = r[1]
|
||||||
|
self.stream_layout[r[0]] = r[2]
|
||||||
|
|
||||||
|
log(" loading intervals\n")
|
||||||
|
self.stream_interval = defaultdict(list)
|
||||||
|
result = con.execute("SELECT stream_id, start_time, end_time, "
|
||||||
|
"start_pos, end_pos FROM ranges")
|
||||||
|
for r in result:
|
||||||
|
if r[0] not in self.stream_path:
|
||||||
|
raise FsckError("interval ID %d not in streams", k)
|
||||||
|
self.stream_interval[r[0]].append((r[1], r[2], r[3], r[4]))
|
||||||
|
|
||||||
|
log(" loading metadata\n")
|
||||||
|
self.stream_meta = defaultdict(dict)
|
||||||
|
result = con.execute("SELECT stream_id, key, value FROM metadata")
|
||||||
|
for r in result:
|
||||||
|
if r[0] not in self.stream_path:
|
||||||
|
raise FsckError("metadata ID %d not in streams", k)
|
||||||
|
if r[1] in self.stream_meta[r[0]]:
|
||||||
|
raise FsckError("duplicate metadata key '%s' for stream %d",
|
||||||
|
r[1], r[0])
|
||||||
|
self.stream_meta[r[0]][r[1]] = r[2]
|
||||||
|
|
||||||
|
def check_streams(self):
|
||||||
|
log("checking streams\n")
|
||||||
|
ids = self.stream_path.keys()
|
||||||
|
with Progress(len(ids)) as pbar:
|
||||||
|
for i, sid in enumerate(ids):
|
||||||
|
pbar.update(i)
|
||||||
|
path = self.stream_path[sid]
|
||||||
|
|
||||||
|
# unique path, valid layout
|
||||||
|
if self.stream_path.values().count(path) != 1:
|
||||||
|
raise FsckError("duplicated path %s", path)
|
||||||
|
layout = self.stream_layout[sid].split('_')[0]
|
||||||
|
if layout not in ('int8', 'int16', 'int32', 'int64',
|
||||||
|
'uint8', 'uint16', 'uint32', 'uint64',
|
||||||
|
'float32', 'float64'):
|
||||||
|
raise FsckError("bad layout %s for %s", layout, path)
|
||||||
|
count = int(self.stream_layout[sid].split('_')[1])
|
||||||
|
if count < 1 or count > 1024:
|
||||||
|
raise FsckError("bad count %d for %s", count, path)
|
||||||
|
|
||||||
|
# must exist in bulkdata
|
||||||
|
bulk = self.bulkpath + path
|
||||||
|
if not os.path.isdir(bulk):
|
||||||
|
raise FsckError("%s: missing bulkdata dir", path)
|
||||||
|
if not nilmdb.server.bulkdata.Table.exists(bulk):
|
||||||
|
raise FsckError("%s: bad bulkdata table", path)
|
||||||
|
|
||||||
|
# intervals don't overlap. Abuse IntervalSet to check
|
||||||
|
# for intervals in file positions, too.
|
||||||
|
timeiset = IntervalSet()
|
||||||
|
posiset = IntervalSet()
|
||||||
|
for (stime, etime, spos, epos) in self.stream_interval[sid]:
|
||||||
|
new = Interval(stime, etime)
|
||||||
|
try:
|
||||||
|
timeiset += new
|
||||||
|
except IntervalError:
|
||||||
|
raise FsckError("%s: overlap in intervals:\n"
|
||||||
|
"set: %s\nnew: %s\n",
|
||||||
|
path, str(timeiset), str(new))
|
||||||
|
if spos != epos:
|
||||||
|
new = Interval(spos, epos)
|
||||||
|
try:
|
||||||
|
posiset += new
|
||||||
|
except IntervalError:
|
||||||
|
raise FsckError("%s: overlap in file offsets:\n"
|
||||||
|
"set: %s\nnew: %s\n",
|
||||||
|
path, str(posiset), str(new))
|
||||||
|
|
||||||
|
# check bulkdata
|
||||||
|
self.check_bulkdata(sid, path, bulk)
|
||||||
|
|
||||||
|
continue
|
||||||
|
# verify we can can open it with bulkdata
|
||||||
|
try:
|
||||||
|
tab = None
|
||||||
|
try:
|
||||||
|
tab = nilmdb.server.bulkdata.Table(bulk)
|
||||||
|
except Exception as e:
|
||||||
|
raise FsckError("%s: can't open bulkdata: %s",
|
||||||
|
path, str(e))
|
||||||
|
self.check_bulkdata(path, tab)
|
||||||
|
finally:
|
||||||
|
if tab:
|
||||||
|
tab.close()
|
||||||
|
|
||||||
|
def check_bulkdata(self, sid, path, bulk):
|
||||||
|
with open(os.path.join(bulk, "_format"), "rb") as f:
|
||||||
|
fmt = pickle.load(f)
|
||||||
|
if fmt["version"] != 3:
|
||||||
|
raise FsckError("%s: bad or unsupported bulkdata version %d",
|
||||||
|
path, fmt["version"])
|
||||||
|
row_per_file = int(fmt["rows_per_file"])
|
||||||
|
files_per_dir = int(fmt["files_per_dir"])
|
||||||
|
layout = fmt["layout"]
|
||||||
|
if layout != self.stream_layout[sid]:
|
||||||
|
raise FsckError("%s: layout mismatch %s != %s", path,
|
||||||
|
layout, self.stream_layout[sid])
|
23
nilmdb/scripts/nilmdb_fsck.py
Executable file
23
nilmdb/scripts/nilmdb_fsck.py
Executable file
@@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
import nilmdb.fsck
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main entry point for the 'nilmdb-fsck' command line script"""
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description = 'Check database consistency',
|
||||||
|
formatter_class = argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
parser.add_argument("-V", "--version", action="version",
|
||||||
|
version = nilmdb.__version__)
|
||||||
|
parser.add_argument('-d', '--database', help = 'Database directory',
|
||||||
|
default = "./db")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
nilmdb.fsck.Fsck(args.database).check()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@@ -330,7 +330,8 @@ class Table(object):
|
|||||||
|
|
||||||
# Find the last directory. We sort and loop through all of them,
|
# Find the last directory. We sort and loop through all of them,
|
||||||
# starting with the numerically greatest, because the dirs could be
|
# starting with the numerically greatest, because the dirs could be
|
||||||
# empty if something was deleted.
|
# empty if something was deleted but the directory was unexpectedly
|
||||||
|
# not deleted.
|
||||||
subdirs = sorted(filter(regex.search, os.listdir(self.root)),
|
subdirs = sorted(filter(regex.search, os.listdir(self.root)),
|
||||||
key = lambda x: int(x, 16), reverse = True)
|
key = lambda x: int(x, 16), reverse = True)
|
||||||
|
|
||||||
|
@@ -27,6 +27,7 @@ from nilmdb.server.serverutil import (
|
|||||||
json_error_page,
|
json_error_page,
|
||||||
cherrypy_start,
|
cherrypy_start,
|
||||||
cherrypy_stop,
|
cherrypy_stop,
|
||||||
|
bool_param,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add CORS_allow tool
|
# Add CORS_allow tool
|
||||||
@@ -83,10 +84,18 @@ class Stream(NilmApp):
|
|||||||
# Helpers
|
# Helpers
|
||||||
def _get_times(self, start_param, end_param):
|
def _get_times(self, start_param, end_param):
|
||||||
(start, end) = (None, None)
|
(start, end) = (None, None)
|
||||||
if start_param is not None:
|
try:
|
||||||
start = string_to_timestamp(start_param)
|
if start_param is not None:
|
||||||
if end_param is not None:
|
start = string_to_timestamp(start_param)
|
||||||
end = string_to_timestamp(end_param)
|
except Exception:
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request", sprintf(
|
||||||
|
"invalid start (%s): must be a numeric timestamp", start_param))
|
||||||
|
try:
|
||||||
|
if end_param is not None:
|
||||||
|
end = string_to_timestamp(end_param)
|
||||||
|
except Exception:
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request", sprintf(
|
||||||
|
"invalid end (%s): must be a numeric timestamp", end_param))
|
||||||
if start is not None and end is not None:
|
if start is not None and end is not None:
|
||||||
if start >= end:
|
if start >= end:
|
||||||
raise cherrypy.HTTPError(
|
raise cherrypy.HTTPError(
|
||||||
@@ -221,6 +230,8 @@ class Stream(NilmApp):
|
|||||||
little-endian and matches the database types (including an
|
little-endian and matches the database types (including an
|
||||||
int64 timestamp).
|
int64 timestamp).
|
||||||
"""
|
"""
|
||||||
|
binary = bool_param(binary)
|
||||||
|
|
||||||
# Important that we always read the input before throwing any
|
# Important that we always read the input before throwing any
|
||||||
# errors, to keep lengths happy for persistent connections.
|
# errors, to keep lengths happy for persistent connections.
|
||||||
# Note that CherryPy 3.2.2 has a bug where this fails for GET
|
# Note that CherryPy 3.2.2 has a bug where this fails for GET
|
||||||
@@ -345,6 +356,10 @@ class Stream(NilmApp):
|
|||||||
little-endian and matches the database types (including an
|
little-endian and matches the database types (including an
|
||||||
int64 timestamp).
|
int64 timestamp).
|
||||||
"""
|
"""
|
||||||
|
binary = bool_param(binary)
|
||||||
|
markup = bool_param(markup)
|
||||||
|
count = bool_param(count)
|
||||||
|
|
||||||
(start, end) = self._get_times(start, end)
|
(start, end) = self._get_times(start, end)
|
||||||
|
|
||||||
# Check path and get layout
|
# Check path and get layout
|
||||||
|
@@ -7,6 +7,21 @@ import os
|
|||||||
import decorator
|
import decorator
|
||||||
import simplejson as json
|
import simplejson as json
|
||||||
|
|
||||||
|
# Helper to parse parameters into booleans
|
||||||
|
def bool_param(s):
|
||||||
|
"""Return a bool indicating whether parameter 's' was True or False,
|
||||||
|
supporting a few different types for 's'."""
|
||||||
|
try:
|
||||||
|
ss = s.lower()
|
||||||
|
if ss in [ "0", "false", "f", "no", "n" ]:
|
||||||
|
return False
|
||||||
|
if ss in [ "1", "true", "t", "yes", "y" ]:
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
return bool(s)
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request",
|
||||||
|
"can't parse parameter: " + ss)
|
||||||
|
|
||||||
# Decorators
|
# Decorators
|
||||||
def chunked_response(func):
|
def chunked_response(func):
|
||||||
"""Decorator to enable chunked responses."""
|
"""Decorator to enable chunked responses."""
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
"""Interval. Like nilmdb.server.interval, but re-implemented here
|
"""Interval. Like nilmdb.server.interval, but re-implemented here
|
||||||
in plain Python so clients have easier access to it.
|
in plain Python so clients have easier access to it, and with a few
|
||||||
|
helper functions.
|
||||||
|
|
||||||
Intervals are half-open, ie. they include data points with timestamps
|
Intervals are half-open, ie. they include data points with timestamps
|
||||||
[start, end)
|
[start, end)
|
||||||
@@ -34,6 +35,10 @@ class Interval:
|
|||||||
return ("[" + nilmdb.utils.time.timestamp_to_string(self.start) +
|
return ("[" + nilmdb.utils.time.timestamp_to_string(self.start) +
|
||||||
" -> " + nilmdb.utils.time.timestamp_to_string(self.end) + ")")
|
" -> " + nilmdb.utils.time.timestamp_to_string(self.end) + ")")
|
||||||
|
|
||||||
|
def human_string(self):
|
||||||
|
return ("[ " + nilmdb.utils.time.timestamp_to_human(self.start) +
|
||||||
|
" -> " + nilmdb.utils.time.timestamp_to_human(self.end) + " ]")
|
||||||
|
|
||||||
def __cmp__(self, other):
|
def __cmp__(self, other):
|
||||||
"""Compare two intervals. If non-equal, order by start then end"""
|
"""Compare two intervals. If non-equal, order by start then end"""
|
||||||
return cmp(self.start, other.start) or cmp(self.end, other.end)
|
return cmp(self.start, other.start) or cmp(self.end, other.end)
|
||||||
@@ -53,18 +58,11 @@ class Interval:
|
|||||||
raise IntervalError("not a subset")
|
raise IntervalError("not a subset")
|
||||||
return Interval(start, end)
|
return Interval(start, end)
|
||||||
|
|
||||||
def set_difference(a, b):
|
def _interval_math_helper(a, b, op, subset = True):
|
||||||
"""
|
"""Helper for set_difference, intersection functions,
|
||||||
Compute the difference (a \\ b) between the intervals in 'a' and
|
to compute interval subsets based on a math operator on ranges
|
||||||
the intervals in 'b'; i.e., the ranges that are present in 'self'
|
present in A and B. Subsets are computed from A, or new intervals
|
||||||
but not 'other'.
|
are generated if subset = False."""
|
||||||
|
|
||||||
'a' and 'b' must both be iterables.
|
|
||||||
|
|
||||||
Returns a generator that yields each interval in turn.
|
|
||||||
Output intervals are built as subsets of the intervals in the
|
|
||||||
first argument (a).
|
|
||||||
"""
|
|
||||||
# Iterate through all starts and ends in sorted order. Add a
|
# Iterate through all starts and ends in sorted order. Add a
|
||||||
# tag to the iterator so that we can figure out which one they
|
# tag to the iterator so that we can figure out which one they
|
||||||
# were, after sorting.
|
# were, after sorting.
|
||||||
@@ -79,28 +77,71 @@ def set_difference(a, b):
|
|||||||
# At each point, evaluate which type of end it is, to determine
|
# At each point, evaluate which type of end it is, to determine
|
||||||
# how to build up the output intervals.
|
# how to build up the output intervals.
|
||||||
a_interval = None
|
a_interval = None
|
||||||
b_interval = None
|
in_a = False
|
||||||
|
in_b = False
|
||||||
out_start = None
|
out_start = None
|
||||||
for (ts, k, i) in nilmdb.utils.iterator.imerge(a_iter, b_iter):
|
for (ts, k, i) in nilmdb.utils.iterator.imerge(a_iter, b_iter):
|
||||||
if k == 0:
|
if k == 0:
|
||||||
# start a interval
|
|
||||||
a_interval = i
|
a_interval = i
|
||||||
if b_interval is None:
|
in_a = True
|
||||||
out_start = ts
|
|
||||||
elif k == 1:
|
elif k == 1:
|
||||||
# start b interval
|
in_b = True
|
||||||
b_interval = i
|
|
||||||
if out_start is not None and out_start != ts:
|
|
||||||
yield a_interval.subset(out_start, ts)
|
|
||||||
out_start = None
|
|
||||||
elif k == 2:
|
elif k == 2:
|
||||||
# end a interval
|
in_a = False
|
||||||
if out_start is not None and out_start != ts:
|
|
||||||
yield a_interval.subset(out_start, ts)
|
|
||||||
out_start = None
|
|
||||||
a_interval = None
|
|
||||||
elif k == 3:
|
elif k == 3:
|
||||||
# end b interval
|
in_b = False
|
||||||
b_interval = None
|
include = op(in_a, in_b)
|
||||||
if a_interval:
|
if include and out_start is None:
|
||||||
out_start = ts
|
out_start = ts
|
||||||
|
elif not include:
|
||||||
|
if out_start is not None and out_start != ts:
|
||||||
|
if subset:
|
||||||
|
yield a_interval.subset(out_start, ts)
|
||||||
|
else:
|
||||||
|
yield Interval(out_start, ts)
|
||||||
|
out_start = None
|
||||||
|
|
||||||
|
def set_difference(a, b):
|
||||||
|
"""
|
||||||
|
Compute the difference (a \\ b) between the intervals in 'a' and
|
||||||
|
the intervals in 'b'; i.e., the ranges that are present in 'self'
|
||||||
|
but not 'other'.
|
||||||
|
|
||||||
|
'a' and 'b' must both be iterables.
|
||||||
|
|
||||||
|
Returns a generator that yields each interval in turn.
|
||||||
|
Output intervals are built as subsets of the intervals in the
|
||||||
|
first argument (a).
|
||||||
|
"""
|
||||||
|
return _interval_math_helper(a, b, (lambda a, b: a and not b))
|
||||||
|
|
||||||
|
def intersection(a, b):
|
||||||
|
"""
|
||||||
|
Compute the intersection between the intervals in 'a' and the
|
||||||
|
intervals in 'b'; i.e., the ranges that are present in both 'a'
|
||||||
|
and 'b'.
|
||||||
|
|
||||||
|
'a' and 'b' must both be iterables.
|
||||||
|
|
||||||
|
Returns a generator that yields each interval in turn.
|
||||||
|
Output intervals are built as subsets of the intervals in the
|
||||||
|
first argument (a).
|
||||||
|
"""
|
||||||
|
return _interval_math_helper(a, b, (lambda a, b: a and b))
|
||||||
|
|
||||||
|
def optimize(it):
|
||||||
|
"""
|
||||||
|
Given an iterable 'it' with intervals, optimize them by joining
|
||||||
|
together intervals that are adjacent in time, and return a generator
|
||||||
|
that yields the new intervals.
|
||||||
|
"""
|
||||||
|
saved_int = None
|
||||||
|
for interval in it:
|
||||||
|
if saved_int is not None:
|
||||||
|
if saved_int.end == interval.start:
|
||||||
|
interval.start = saved_int.start
|
||||||
|
else:
|
||||||
|
yield saved_int
|
||||||
|
saved_int = interval
|
||||||
|
if saved_int is not None:
|
||||||
|
yield saved_int
|
||||||
|
@@ -91,6 +91,20 @@ def serializer_proxy(obj_or_type):
|
|||||||
r = SerializerCallProxy(self.__call_queue, attr, self)
|
r = SerializerCallProxy(self.__call_queue, attr, self)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
# For an interable object, on __iter__(), save the object's
|
||||||
|
# iterator and return this proxy. On next(), call the object's
|
||||||
|
# iterator through this proxy.
|
||||||
|
def __iter__(self):
|
||||||
|
attr = getattr(self.__object, "__iter__")
|
||||||
|
self.__iter = SerializerCallProxy(self.__call_queue, attr, self)()
|
||||||
|
return self
|
||||||
|
def next(self):
|
||||||
|
return SerializerCallProxy(self.__call_queue,
|
||||||
|
self.__iter.next, self)()
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
return self.__getattr__("__getitem__")(key)
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
"""Call this to instantiate the type, if a type was passed
|
"""Call this to instantiate the type, if a type was passed
|
||||||
to serializer_proxy. Otherwise, pass the call through."""
|
to serializer_proxy. Otherwise, pass the call through."""
|
||||||
|
@@ -60,7 +60,7 @@ def rate_to_period(hz, cycles = 1):
|
|||||||
def parse_time(toparse):
|
def parse_time(toparse):
|
||||||
"""
|
"""
|
||||||
Parse a free-form time string and return a nilmdb timestamp
|
Parse a free-form time string and return a nilmdb timestamp
|
||||||
(integer seconds since epoch). If the string doesn't contain a
|
(integer microseconds since epoch). If the string doesn't contain a
|
||||||
timestamp, the current local timezone is assumed (e.g. from the TZ
|
timestamp, the current local timezone is assumed (e.g. from the TZ
|
||||||
env var).
|
env var).
|
||||||
"""
|
"""
|
||||||
|
@@ -1,7 +1,14 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
|
if sys.version_info[0] >= 3: # pragma: no cover (future Python3 compat)
|
||||||
|
text_type = str
|
||||||
|
else:
|
||||||
|
text_type = unicode
|
||||||
|
|
||||||
def encode(u):
|
def encode(u):
|
||||||
"""Try to encode something from Unicode to a string using the
|
"""Try to encode something from Unicode to a string using the
|
||||||
default encoding. If it fails, try encoding as UTF-8."""
|
default encoding. If it fails, try encoding as UTF-8."""
|
||||||
if not isinstance(u, unicode):
|
if not isinstance(u, text_type):
|
||||||
return u
|
return u
|
||||||
try:
|
try:
|
||||||
return u.encode()
|
return u.encode()
|
||||||
@@ -11,7 +18,7 @@ def encode(u):
|
|||||||
def decode(s):
|
def decode(s):
|
||||||
"""Try to decode someting from string to Unicode using the
|
"""Try to decode someting from string to Unicode using the
|
||||||
default encoding. If it fails, try decoding as UTF-8."""
|
default encoding. If it fails, try decoding as UTF-8."""
|
||||||
if isinstance(s, unicode):
|
if isinstance(s, text_type):
|
||||||
return s
|
return s
|
||||||
try:
|
try:
|
||||||
return s.decode()
|
return s.decode()
|
||||||
|
2
setup.py
2
setup.py
@@ -126,11 +126,13 @@ setup(name='nilmdb',
|
|||||||
'nilmdb.client',
|
'nilmdb.client',
|
||||||
'nilmdb.cmdline',
|
'nilmdb.cmdline',
|
||||||
'nilmdb.scripts',
|
'nilmdb.scripts',
|
||||||
|
'nilmdb.fsck',
|
||||||
],
|
],
|
||||||
entry_points = {
|
entry_points = {
|
||||||
'console_scripts': [
|
'console_scripts': [
|
||||||
'nilmtool = nilmdb.scripts.nilmtool:main',
|
'nilmtool = nilmdb.scripts.nilmtool:main',
|
||||||
'nilmdb-server = nilmdb.scripts.nilmdb_server:main',
|
'nilmdb-server = nilmdb.scripts.nilmdb_server:main',
|
||||||
|
'nilmdb-fsck = nilmdb.scripts.nilmdb_fsck:main',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
ext_modules = ext_modules,
|
ext_modules = ext_modules,
|
||||||
|
@@ -242,6 +242,19 @@ class TestClient(object):
|
|||||||
in_("400 Bad Request", str(e.exception))
|
in_("400 Bad Request", str(e.exception))
|
||||||
in_("start must precede end", str(e.exception))
|
in_("start must precede end", str(e.exception))
|
||||||
|
|
||||||
|
# Invalid times in HTTP request
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.http.put("stream/insert", "", { "path": "/newton/prep",
|
||||||
|
"start": "asdf", "end": 0 })
|
||||||
|
in_("400 Bad Request", str(e.exception))
|
||||||
|
in_("invalid start", str(e.exception))
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.http.put("stream/insert", "", { "path": "/newton/prep",
|
||||||
|
"start": 0, "end": "asdf" })
|
||||||
|
in_("400 Bad Request", str(e.exception))
|
||||||
|
in_("invalid end", str(e.exception))
|
||||||
|
|
||||||
# Good content type
|
# Good content type
|
||||||
with assert_raises(ClientError) as e:
|
with assert_raises(ClientError) as e:
|
||||||
client.http.put("stream/insert", "",
|
client.http.put("stream/insert", "",
|
||||||
@@ -354,10 +367,6 @@ class TestClient(object):
|
|||||||
with assert_raises(ServerError) as e:
|
with assert_raises(ServerError) as e:
|
||||||
client.http.get_gen("http://nosuchurl.example.com./").next()
|
client.http.get_gen("http://nosuchurl.example.com./").next()
|
||||||
|
|
||||||
# Trigger a curl error in generator
|
|
||||||
with assert_raises(ServerError) as e:
|
|
||||||
client.http.get_gen("http://nosuchurl.example.com./").next()
|
|
||||||
|
|
||||||
# Check 404 for missing streams
|
# Check 404 for missing streams
|
||||||
for function in [ client.stream_intervals, client.stream_extract ]:
|
for function in [ client.stream_intervals, client.stream_extract ]:
|
||||||
with assert_raises(ClientError) as e:
|
with assert_raises(ClientError) as e:
|
||||||
@@ -396,27 +405,38 @@ class TestClient(object):
|
|||||||
headers())
|
headers())
|
||||||
|
|
||||||
# Extract
|
# Extract
|
||||||
x = http.get("stream/extract",
|
x = http.get("stream/extract", { "path": "/newton/prep",
|
||||||
{ "path": "/newton/prep",
|
"start": "123", "end": "124" })
|
||||||
"start": "123",
|
|
||||||
"end": "124" })
|
|
||||||
if "transfer-encoding: chunked" not in headers():
|
if "transfer-encoding: chunked" not in headers():
|
||||||
warnings.warn("Non-chunked HTTP response for /stream/extract")
|
warnings.warn("Non-chunked HTTP response for /stream/extract")
|
||||||
if "content-type: text/plain;charset=utf-8" not in headers():
|
if "content-type: text/plain;charset=utf-8" not in headers():
|
||||||
raise AssertionError("/stream/extract is not text/plain:\n" +
|
raise AssertionError("/stream/extract is not text/plain:\n" +
|
||||||
headers())
|
headers())
|
||||||
|
|
||||||
x = http.get("stream/extract",
|
x = http.get("stream/extract", { "path": "/newton/prep",
|
||||||
{ "path": "/newton/prep",
|
"start": "123", "end": "124",
|
||||||
"start": "123",
|
"binary": "1" })
|
||||||
"end": "124",
|
|
||||||
"binary": "1" })
|
|
||||||
if "transfer-encoding: chunked" not in headers():
|
if "transfer-encoding: chunked" not in headers():
|
||||||
warnings.warn("Non-chunked HTTP response for /stream/extract")
|
warnings.warn("Non-chunked HTTP response for /stream/extract")
|
||||||
if "content-type: application/octet-stream" not in headers():
|
if "content-type: application/octet-stream" not in headers():
|
||||||
raise AssertionError("/stream/extract is not binary:\n" +
|
raise AssertionError("/stream/extract is not binary:\n" +
|
||||||
headers())
|
headers())
|
||||||
|
|
||||||
|
# Make sure a binary of "0" is really off
|
||||||
|
x = http.get("stream/extract", { "path": "/newton/prep",
|
||||||
|
"start": "123", "end": "124",
|
||||||
|
"binary": "0" })
|
||||||
|
if "content-type: application/octet-stream" in headers():
|
||||||
|
raise AssertionError("/stream/extract is not text:\n" +
|
||||||
|
headers())
|
||||||
|
|
||||||
|
# Invalid parameters
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
x = http.get("stream/extract", { "path": "/newton/prep",
|
||||||
|
"start": "123", "end": "124",
|
||||||
|
"binary": "asdfasfd" })
|
||||||
|
in_("can't parse parameter", str(e.exception))
|
||||||
|
|
||||||
client.close()
|
client.close()
|
||||||
|
|
||||||
def test_client_08_unicode(self):
|
def test_client_08_unicode(self):
|
||||||
|
@@ -59,8 +59,7 @@ class TestCmdline(object):
|
|||||||
|
|
||||||
def run(self, arg_string, infile=None, outfile=None):
|
def run(self, arg_string, infile=None, outfile=None):
|
||||||
"""Run a cmdline client with the specified argument string,
|
"""Run a cmdline client with the specified argument string,
|
||||||
passing the given input. Returns a tuple with the output and
|
passing the given input. Save the output and exit code."""
|
||||||
exit code"""
|
|
||||||
# printf("TZ=UTC ./nilmtool.py %s\n", arg_string)
|
# printf("TZ=UTC ./nilmtool.py %s\n", arg_string)
|
||||||
os.environ['NILMDB_URL'] = "http://localhost:32180/"
|
os.environ['NILMDB_URL'] = "http://localhost:32180/"
|
||||||
class stdio_wrapper:
|
class stdio_wrapper:
|
||||||
@@ -160,6 +159,12 @@ class TestCmdline(object):
|
|||||||
self.ok("--help")
|
self.ok("--help")
|
||||||
self.contain("usage:")
|
self.contain("usage:")
|
||||||
|
|
||||||
|
# help
|
||||||
|
self.ok("--version")
|
||||||
|
ver = self.captured
|
||||||
|
self.ok("list --version")
|
||||||
|
eq_(self.captured, ver)
|
||||||
|
|
||||||
# fail for no args
|
# fail for no args
|
||||||
self.fail("")
|
self.fail("")
|
||||||
|
|
||||||
@@ -1011,6 +1016,18 @@ class TestCmdline(object):
|
|||||||
self.match("[ Thu, 01 Jan 2004 00:00:00.000000 +0000 -"
|
self.match("[ Thu, 01 Jan 2004 00:00:00.000000 +0000 -"
|
||||||
"> Sat, 01 Jan 2005 00:00:00.000000 +0000 ]\n")
|
"> Sat, 01 Jan 2005 00:00:00.000000 +0000 ]\n")
|
||||||
|
|
||||||
|
# optimize
|
||||||
|
self.ok("insert -s 01-01-2002 -e 01-01-2004 /diff/1 /dev/null")
|
||||||
|
self.ok("intervals /diff/1")
|
||||||
|
self.match("[ Sat, 01 Jan 2000 00:00:00.000000 +0000 -"
|
||||||
|
"> Thu, 01 Jan 2004 00:00:00.000000 +0000 ]\n"
|
||||||
|
"[ Thu, 01 Jan 2004 00:00:00.000000 +0000 -"
|
||||||
|
"> Sat, 01 Jan 2005 00:00:00.000000 +0000 ]\n")
|
||||||
|
self.ok("intervals /diff/1 --optimize")
|
||||||
|
self.ok("intervals /diff/1 -o")
|
||||||
|
self.match("[ Sat, 01 Jan 2000 00:00:00.000000 +0000 -"
|
||||||
|
"> Sat, 01 Jan 2005 00:00:00.000000 +0000 ]\n")
|
||||||
|
|
||||||
self.ok("destroy -R /diff/1")
|
self.ok("destroy -R /diff/1")
|
||||||
self.ok("destroy -R /diff/2")
|
self.ok("destroy -R /diff/2")
|
||||||
|
|
||||||
|
@@ -59,6 +59,14 @@ class TestInterval:
|
|||||||
self.test_interval_intersect()
|
self.test_interval_intersect()
|
||||||
Interval = NilmdbInterval
|
Interval = NilmdbInterval
|
||||||
|
|
||||||
|
# Other helpers in nilmdb.utils.interval
|
||||||
|
i = [ UtilsInterval(1,2), UtilsInterval(2,3), UtilsInterval(4,5) ]
|
||||||
|
eq_(list(nilmdb.utils.interval.optimize(i)),
|
||||||
|
[ UtilsInterval(1,3), UtilsInterval(4,5) ])
|
||||||
|
eq_(UtilsInterval(1234567890123456, 1234567890654321).human_string(),
|
||||||
|
"[ Fri, 13 Feb 2009 18:31:30.123456 -0500 -> " +
|
||||||
|
"Fri, 13 Feb 2009 18:31:30.654321 -0500 ]")
|
||||||
|
|
||||||
def test_interval(self):
|
def test_interval(self):
|
||||||
# Test Interval class
|
# Test Interval class
|
||||||
os.environ['TZ'] = "America/New_York"
|
os.environ['TZ'] = "America/New_York"
|
||||||
@@ -226,13 +234,16 @@ class TestInterval:
|
|||||||
x = makeset("[--)") & 1234
|
x = makeset("[--)") & 1234
|
||||||
|
|
||||||
def do_test(a, b, c, d):
|
def do_test(a, b, c, d):
|
||||||
# a & b == c
|
# a & b == c (using nilmdb.server.interval)
|
||||||
ab = IntervalSet()
|
ab = IntervalSet()
|
||||||
for x in b:
|
for x in b:
|
||||||
for i in (a & x):
|
for i in (a & x):
|
||||||
ab += i
|
ab += i
|
||||||
eq_(ab,c)
|
eq_(ab,c)
|
||||||
|
|
||||||
|
# a & b == c (using nilmdb.utils.interval)
|
||||||
|
eq_(IntervalSet(nilmdb.utils.interval.intersection(a,b)), c)
|
||||||
|
|
||||||
# a \ b == d
|
# a \ b == d
|
||||||
eq_(IntervalSet(nilmdb.utils.interval.set_difference(a,b)), d)
|
eq_(IntervalSet(nilmdb.utils.interval.set_difference(a,b)), d)
|
||||||
|
|
||||||
@@ -302,6 +313,17 @@ class TestInterval:
|
|||||||
eq_(nilmdb.utils.interval.set_difference(
|
eq_(nilmdb.utils.interval.set_difference(
|
||||||
a.intersection(list(c)[0]), b.intersection(list(c)[0])), d)
|
a.intersection(list(c)[0]), b.intersection(list(c)[0])), d)
|
||||||
|
|
||||||
|
# Fill out test coverage for non-subsets
|
||||||
|
def diff2(a,b, subset):
|
||||||
|
return nilmdb.utils.interval._interval_math_helper(
|
||||||
|
a, b, (lambda a, b: b and not a), subset=subset)
|
||||||
|
with assert_raises(nilmdb.utils.interval.IntervalError):
|
||||||
|
list(diff2(a,b,True))
|
||||||
|
list(diff2(a,b,False))
|
||||||
|
|
||||||
|
# Empty second set
|
||||||
|
eq_(nilmdb.utils.interval.set_difference(a, IntervalSet()), a)
|
||||||
|
|
||||||
# Empty second set
|
# Empty second set
|
||||||
eq_(nilmdb.utils.interval.set_difference(a, IntervalSet()), a)
|
eq_(nilmdb.utils.interval.set_difference(a, IntervalSet()), a)
|
||||||
|
|
||||||
|
@@ -62,6 +62,28 @@ class Base(object):
|
|||||||
eq_(self.foo.val, 20)
|
eq_(self.foo.val, 20)
|
||||||
eq_(self.foo.init_thread, self.foo.test_thread)
|
eq_(self.foo.init_thread, self.foo.test_thread)
|
||||||
|
|
||||||
|
class ListLike(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.thread = threading.current_thread().name
|
||||||
|
self.foo = 0
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
eq_(threading.current_thread().name, self.thread)
|
||||||
|
self.foo = 0
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
eq_(threading.current_thread().name, self.thread)
|
||||||
|
return key
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
eq_(threading.current_thread().name, self.thread)
|
||||||
|
if self.foo < 5:
|
||||||
|
self.foo += 1
|
||||||
|
return self.foo
|
||||||
|
else:
|
||||||
|
raise StopIteration
|
||||||
|
|
||||||
class TestUnserialized(Base):
|
class TestUnserialized(Base):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.foo = Foo()
|
self.foo = Foo()
|
||||||
@@ -84,3 +106,9 @@ class TestSerializer(Base):
|
|||||||
sp(sp(Foo("x"))).t()
|
sp(sp(Foo("x"))).t()
|
||||||
sp(sp(Foo)("x")).t()
|
sp(sp(Foo)("x")).t()
|
||||||
sp(sp(Foo))("x").t()
|
sp(sp(Foo))("x").t()
|
||||||
|
|
||||||
|
def test_iter(self):
|
||||||
|
sp = nilmdb.utils.serializer_proxy
|
||||||
|
i = sp(ListLike)()
|
||||||
|
eq_(list(i), [1,2,3,4,5])
|
||||||
|
eq_(i[3], 3)
|
||||||
|
Reference in New Issue
Block a user