2013-01-02 00:00:30 -05:00
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
2013-03-08 17:10:18 -05:00
|
|
|
|
import nilmdb.server
|
|
|
|
|
|
2012-12-31 15:52:28 -05:00
|
|
|
|
from nilmdb.utils.printf import *
|
2012-03-22 17:00:01 -04:00
|
|
|
|
import nilmdb.cmdline
|
2013-01-30 19:03:42 -05:00
|
|
|
|
from nilmdb.utils import datetime_tz
|
2012-03-19 21:18:30 -04:00
|
|
|
|
|
2013-01-06 19:25:07 -05:00
|
|
|
|
import unittest
|
2012-03-19 21:18:30 -04:00
|
|
|
|
from nose.tools import *
|
|
|
|
|
from nose.tools import assert_raises
|
2012-04-06 14:25:09 -04:00
|
|
|
|
import itertools
|
2012-03-19 21:18:30 -04:00
|
|
|
|
import os
|
2013-01-16 16:52:43 -05:00
|
|
|
|
import re
|
2012-03-19 21:18:30 -04:00
|
|
|
|
import sys
|
2013-01-03 17:00:23 -05:00
|
|
|
|
import StringIO
|
2012-03-19 21:18:30 -04:00
|
|
|
|
import shlex
|
2013-03-14 17:26:37 -04:00
|
|
|
|
import warnings
|
2012-03-19 21:18:30 -04:00
|
|
|
|
|
2013-01-05 15:00:34 -05:00
|
|
|
|
from testutil.helpers import *
|
2012-03-19 21:18:30 -04:00
|
|
|
|
|
2012-03-29 17:43:05 -04:00
|
|
|
|
testdb = "tests/cmdline-testdb"
|
2012-03-19 21:18:30 -04:00
|
|
|
|
|
2013-03-18 18:22:45 -04:00
|
|
|
|
def server_start(max_results = None, max_removals = None, bulkdata_args = {}):
|
2012-04-04 14:54:24 -04:00
|
|
|
|
global test_server, test_db
|
|
|
|
|
# Start web app on a custom port
|
2013-03-08 17:10:18 -05:00
|
|
|
|
test_db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(
|
2013-03-10 17:09:01 -04:00
|
|
|
|
testdb,
|
2013-02-23 14:26:18 -05:00
|
|
|
|
max_results = max_results,
|
2013-03-18 18:22:45 -04:00
|
|
|
|
max_removals = max_removals,
|
2013-02-23 14:26:18 -05:00
|
|
|
|
bulkdata_args = bulkdata_args)
|
2013-03-08 17:10:18 -05:00
|
|
|
|
test_server = nilmdb.server.Server(test_db, host = "127.0.0.1",
|
|
|
|
|
port = 32180, stoppable = False,
|
|
|
|
|
fast_shutdown = True,
|
|
|
|
|
force_traceback = False)
|
2012-04-04 14:54:24 -04:00
|
|
|
|
test_server.start(blocking = False)
|
2012-03-19 21:18:30 -04:00
|
|
|
|
|
2012-04-24 17:59:33 -04:00
|
|
|
|
def server_stop():
|
2012-04-04 14:54:24 -04:00
|
|
|
|
global test_server, test_db
|
|
|
|
|
# Close web app
|
|
|
|
|
test_server.stop()
|
|
|
|
|
test_db.close()
|
2012-03-19 21:18:30 -04:00
|
|
|
|
|
2012-04-24 17:59:33 -04:00
|
|
|
|
def setup_module():
|
|
|
|
|
global test_server, test_db
|
|
|
|
|
# Clear out DB
|
|
|
|
|
recursive_unlink(testdb)
|
|
|
|
|
server_start()
|
|
|
|
|
|
|
|
|
|
def teardown_module():
|
|
|
|
|
server_stop()
|
|
|
|
|
|
2013-01-03 17:00:23 -05:00
|
|
|
|
# Add an encoding property to StringIO so Python will convert Unicode
|
|
|
|
|
# properly when writing or reading.
|
|
|
|
|
class UTF8StringIO(StringIO.StringIO):
|
|
|
|
|
encoding = 'utf-8'
|
|
|
|
|
|
2012-04-04 14:54:24 -04:00
|
|
|
|
class TestCmdline(object):
|
2012-03-19 21:18:30 -04:00
|
|
|
|
|
2012-04-06 14:25:09 -04:00
|
|
|
|
def run(self, arg_string, infile=None, outfile=None):
|
2012-03-20 10:21:55 -04:00
|
|
|
|
"""Run a cmdline client with the specified argument string,
|
|
|
|
|
passing the given input. Returns a tuple with the output and
|
|
|
|
|
exit code"""
|
2013-01-03 16:58:26 -05:00
|
|
|
|
# printf("TZ=UTC ./nilmtool.py %s\n", arg_string)
|
2013-03-02 13:19:44 -05:00
|
|
|
|
os.environ['NILMDB_URL'] = "http://localhost:32180/"
|
2012-03-19 21:18:30 -04:00
|
|
|
|
class stdio_wrapper:
|
|
|
|
|
def __init__(self, stdin, stdout, stderr):
|
|
|
|
|
self.io = (stdin, stdout, stderr)
|
|
|
|
|
def __enter__(self):
|
|
|
|
|
self.saved = ( sys.stdin, sys.stdout, sys.stderr )
|
|
|
|
|
( sys.stdin, sys.stdout, sys.stderr ) = self.io
|
|
|
|
|
def __exit__(self, type, value, traceback):
|
|
|
|
|
( sys.stdin, sys.stdout, sys.stderr ) = self.saved
|
2012-04-06 14:25:09 -04:00
|
|
|
|
# Empty input if none provided
|
|
|
|
|
if infile is None:
|
2013-01-03 17:00:23 -05:00
|
|
|
|
infile = UTF8StringIO("")
|
2012-04-06 14:25:09 -04:00
|
|
|
|
# Capture stderr
|
2013-01-03 17:00:23 -05:00
|
|
|
|
errfile = UTF8StringIO()
|
2012-04-06 14:25:09 -04:00
|
|
|
|
if outfile is None:
|
|
|
|
|
# If no output file, capture stdout with stderr
|
|
|
|
|
outfile = errfile
|
2012-04-04 18:34:01 -04:00
|
|
|
|
with stdio_wrapper(infile, outfile, errfile) as s:
|
2012-03-19 21:18:30 -04:00
|
|
|
|
try:
|
2013-01-03 17:00:23 -05:00
|
|
|
|
# shlex doesn't support Unicode very well. Encode the
|
|
|
|
|
# string as UTF-8 explicitly before splitting.
|
|
|
|
|
args = shlex.split(arg_string.encode('utf-8'))
|
|
|
|
|
nilmdb.cmdline.Cmdline(args).run()
|
2012-03-19 21:18:30 -04:00
|
|
|
|
sys.exit(0)
|
|
|
|
|
except SystemExit as e:
|
|
|
|
|
exitcode = e.code
|
2012-04-06 14:25:09 -04:00
|
|
|
|
captured = outfile.getvalue()
|
|
|
|
|
self.captured = captured
|
2012-04-04 14:54:24 -04:00
|
|
|
|
self.exitcode = exitcode
|
|
|
|
|
|
2012-04-06 14:25:09 -04:00
|
|
|
|
def ok(self, arg_string, infile = None):
|
|
|
|
|
self.run(arg_string, infile)
|
2012-04-04 14:54:24 -04:00
|
|
|
|
if self.exitcode != 0:
|
|
|
|
|
self.dump()
|
|
|
|
|
eq_(self.exitcode, 0)
|
|
|
|
|
|
2013-01-16 16:52:43 -05:00
|
|
|
|
def fail(self, arg_string, infile = None,
|
|
|
|
|
exitcode = None, require_error = True):
|
2012-04-06 14:25:09 -04:00
|
|
|
|
self.run(arg_string, infile)
|
2012-05-30 14:24:36 -04:00
|
|
|
|
if exitcode is not None and self.exitcode != exitcode:
|
2013-01-16 16:52:43 -05:00
|
|
|
|
# Wrong exit code
|
2012-05-30 14:24:36 -04:00
|
|
|
|
self.dump()
|
|
|
|
|
eq_(self.exitcode, exitcode)
|
2012-04-04 14:54:24 -04:00
|
|
|
|
if self.exitcode == 0:
|
2013-01-16 16:52:43 -05:00
|
|
|
|
# Success, when we wanted failure
|
2012-04-04 14:54:24 -04:00
|
|
|
|
self.dump()
|
|
|
|
|
ne_(self.exitcode, 0)
|
2013-01-16 16:52:43 -05:00
|
|
|
|
# Make sure the output contains the word "error" at the
|
|
|
|
|
# beginning of a line, but only if an exitcode wasn't
|
|
|
|
|
# specified.
|
|
|
|
|
if require_error and not re.search("^error",
|
|
|
|
|
self.captured, re.MULTILINE):
|
|
|
|
|
raise AssertionError("command failed, but output doesn't "
|
|
|
|
|
"contain the string 'error'")
|
2012-04-04 14:54:24 -04:00
|
|
|
|
|
2012-04-06 14:25:09 -04:00
|
|
|
|
def contain(self, checkstring):
|
|
|
|
|
in_(checkstring, self.captured)
|
2012-04-04 14:54:24 -04:00
|
|
|
|
|
2012-04-04 18:34:01 -04:00
|
|
|
|
def match(self, checkstring):
|
2012-04-06 14:25:09 -04:00
|
|
|
|
eq_(checkstring, self.captured)
|
2012-04-04 18:34:01 -04:00
|
|
|
|
|
2012-05-30 14:24:36 -04:00
|
|
|
|
def matchfile(self, file):
|
2012-06-25 14:52:50 -04:00
|
|
|
|
# Captured data should match file contents exactly
|
2012-05-30 14:24:36 -04:00
|
|
|
|
with open(file) as f:
|
2012-08-06 17:46:09 -04:00
|
|
|
|
contents = f.read()
|
|
|
|
|
if contents != self.captured:
|
2013-03-03 21:25:00 -05:00
|
|
|
|
print "--- reference file (first 1000 bytes):\n"
|
|
|
|
|
print contents[0:1000] + "\n"
|
|
|
|
|
print "--- captured data (first 1000 bytes):\n"
|
|
|
|
|
print self.captured[0:1000] + "\n"
|
|
|
|
|
zipped = itertools.izip_longest(contents, self.captured)
|
|
|
|
|
for (n, (a, b)) in enumerate(zipped):
|
|
|
|
|
if a != b:
|
|
|
|
|
print "--- first difference is at offset", n
|
|
|
|
|
print "--- reference:", repr(a)
|
|
|
|
|
print "--- captured:", repr(b)
|
|
|
|
|
break
|
2012-05-30 14:24:36 -04:00
|
|
|
|
raise AssertionError("captured data doesn't match " + file)
|
|
|
|
|
|
2012-06-25 14:52:50 -04:00
|
|
|
|
def matchfilecount(self, file):
|
|
|
|
|
# Last line of captured data should match the number of
|
|
|
|
|
# non-commented lines in file
|
|
|
|
|
count = 0
|
|
|
|
|
with open(file) as f:
|
|
|
|
|
for line in f:
|
|
|
|
|
if line[0] != '#':
|
|
|
|
|
count += 1
|
|
|
|
|
eq_(self.captured.splitlines()[-1], sprintf("%d", count))
|
|
|
|
|
|
2012-04-04 14:54:24 -04:00
|
|
|
|
def dump(self):
|
2012-04-06 14:25:09 -04:00
|
|
|
|
printf("-----dump start-----\n%s-----dump end-----\n", self.captured)
|
2012-03-19 21:18:30 -04:00
|
|
|
|
|
2013-01-06 19:25:07 -05:00
|
|
|
|
def test_01_basic(self):
|
2012-04-06 14:25:09 -04:00
|
|
|
|
|
2012-04-04 14:54:24 -04:00
|
|
|
|
# help
|
|
|
|
|
self.ok("--help")
|
2012-04-06 14:25:09 -04:00
|
|
|
|
self.contain("usage:")
|
2012-04-04 14:54:24 -04:00
|
|
|
|
|
|
|
|
|
# fail for no args
|
|
|
|
|
self.fail("")
|
|
|
|
|
|
|
|
|
|
# fail for no such option
|
|
|
|
|
self.fail("--nosuchoption")
|
|
|
|
|
|
|
|
|
|
# fail for bad command
|
|
|
|
|
self.fail("badcommand")
|
|
|
|
|
|
|
|
|
|
# try some URL constructions
|
|
|
|
|
self.fail("--url http://nosuchurl/ info")
|
2013-02-26 15:45:50 -05:00
|
|
|
|
self.contain("error connecting to server")
|
2012-04-04 14:54:24 -04:00
|
|
|
|
|
|
|
|
|
self.fail("--url nosuchurl info")
|
2013-02-26 15:45:50 -05:00
|
|
|
|
self.contain("error connecting to server")
|
2012-04-04 14:54:24 -04:00
|
|
|
|
|
|
|
|
|
self.fail("-u nosuchurl/foo info")
|
2013-02-26 15:45:50 -05:00
|
|
|
|
self.contain("error connecting to server")
|
2012-04-04 14:54:24 -04:00
|
|
|
|
|
2013-02-26 15:45:50 -05:00
|
|
|
|
self.fail("-u localhost:1 info")
|
|
|
|
|
self.contain("error connecting to server")
|
2012-04-04 14:54:24 -04:00
|
|
|
|
|
2013-03-02 13:19:44 -05:00
|
|
|
|
self.ok("-u localhost:32180 info")
|
2012-04-04 14:54:24 -04:00
|
|
|
|
self.ok("info")
|
2012-03-19 21:18:30 -04:00
|
|
|
|
|
2012-05-29 19:49:08 -04:00
|
|
|
|
# Duplicated arguments should fail, but this isn't implemented
|
|
|
|
|
# due to it being kind of a pain with argparse.
|
|
|
|
|
if 0:
|
|
|
|
|
self.fail("-u url1 -u url2 info")
|
|
|
|
|
self.contain("duplicated argument")
|
|
|
|
|
|
|
|
|
|
self.fail("list --detail --detail")
|
|
|
|
|
self.contain("duplicated argument")
|
|
|
|
|
|
|
|
|
|
self.fail("list --detail --path path1 --path path2")
|
|
|
|
|
self.contain("duplicated argument")
|
|
|
|
|
|
|
|
|
|
self.fail("extract --start 2000-01-01 --start 2001-01-02")
|
|
|
|
|
self.contain("duplicated argument")
|
|
|
|
|
|
2013-03-02 13:56:03 -05:00
|
|
|
|
# Verify that "help command" and "command --help" are identical
|
|
|
|
|
# for all commands.
|
|
|
|
|
self.fail("")
|
|
|
|
|
m = re.search(r"{(.*)}", self.captured)
|
|
|
|
|
for command in [""] + m.group(1).split(','):
|
|
|
|
|
self.ok(command + " --help")
|
|
|
|
|
cap1 = self.captured
|
|
|
|
|
self.ok("help " + command)
|
|
|
|
|
cap2 = self.captured
|
|
|
|
|
self.ok("help " + command + " asdf --url --zxcv -")
|
|
|
|
|
cap3 = self.captured
|
|
|
|
|
eq_(cap1, cap2)
|
|
|
|
|
eq_(cap2, cap3)
|
|
|
|
|
|
2013-02-28 17:09:26 -05:00
|
|
|
|
def test_02_parsetime(self):
|
|
|
|
|
os.environ['TZ'] = "America/New_York"
|
|
|
|
|
test = datetime_tz.datetime_tz.now()
|
2013-03-14 17:26:37 -04:00
|
|
|
|
u2ts = nilmdb.utils.time.unix_to_timestamp
|
2013-02-28 17:09:26 -05:00
|
|
|
|
parse_time = nilmdb.utils.time.parse_time
|
2013-03-14 17:26:37 -04:00
|
|
|
|
eq_(parse_time(str(test)), u2ts(test.totimestamp()))
|
|
|
|
|
test = u2ts(datetime_tz.datetime_tz.smartparse("20120405 1400-0400").
|
|
|
|
|
totimestamp())
|
2013-02-28 17:09:26 -05:00
|
|
|
|
eq_(parse_time("hi there 20120405 1400-0400 testing! 123"), test)
|
|
|
|
|
eq_(parse_time("20120405 1800 UTC"), test)
|
|
|
|
|
eq_(parse_time("20120405 1400-0400 UTC"), test)
|
|
|
|
|
for badtime in [ "20120405 1400-9999", "hello", "-", "", "4:00" ]:
|
|
|
|
|
with assert_raises(ValueError):
|
|
|
|
|
x = parse_time(badtime)
|
|
|
|
|
x = parse_time("now")
|
|
|
|
|
eq_(parse_time("snapshot-20120405-140000.raw.gz"), test)
|
|
|
|
|
eq_(parse_time("prep-20120405T1400"), test)
|
2013-03-14 17:26:37 -04:00
|
|
|
|
eq_(parse_time("1333648800.0"), test)
|
2013-03-15 18:07:40 -04:00
|
|
|
|
eq_(parse_time("1333648800000000"), test)
|
|
|
|
|
eq_(parse_time("@1333648800000000"), test)
|
2013-03-18 18:19:24 -04:00
|
|
|
|
eq_(parse_time("min"), nilmdb.utils.time.min_timestamp)
|
|
|
|
|
eq_(parse_time("max"), nilmdb.utils.time.max_timestamp)
|
2013-03-15 18:07:40 -04:00
|
|
|
|
with assert_raises(ValueError):
|
|
|
|
|
parse_time("@hashtag12345")
|
2013-02-28 17:09:26 -05:00
|
|
|
|
|
|
|
|
|
def test_03_info(self):
|
2012-04-04 14:54:24 -04:00
|
|
|
|
self.ok("info")
|
2013-03-02 13:19:44 -05:00
|
|
|
|
self.contain("Server URL: http://localhost:32180/")
|
2013-02-14 16:57:33 -05:00
|
|
|
|
self.contain("Client version: " + nilmdb.__version__)
|
2012-04-06 14:25:09 -04:00
|
|
|
|
self.contain("Server version: " + test_server.version)
|
2012-07-24 17:52:38 -04:00
|
|
|
|
self.contain("Server database path")
|
|
|
|
|
self.contain("Server database size")
|
2013-02-14 16:57:33 -05:00
|
|
|
|
self.contain("Server database free space")
|
2012-03-19 21:18:30 -04:00
|
|
|
|
|
2013-02-28 17:09:26 -05:00
|
|
|
|
def test_04_createlist(self):
|
2012-04-04 18:34:01 -04:00
|
|
|
|
# Basic stream tests, like those in test_client.
|
|
|
|
|
|
|
|
|
|
# No streams
|
|
|
|
|
self.ok("list")
|
|
|
|
|
self.match("")
|
|
|
|
|
|
|
|
|
|
# Bad paths
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.fail("create foo/bar/baz float32_8")
|
2012-04-06 14:25:09 -04:00
|
|
|
|
self.contain("paths must start with /")
|
2012-04-04 18:34:01 -04:00
|
|
|
|
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.fail("create /foo float32_8")
|
2012-04-06 14:25:09 -04:00
|
|
|
|
self.contain("invalid path")
|
2013-03-13 17:45:47 -04:00
|
|
|
|
self.fail("create /newton/prep/ float32_8")
|
|
|
|
|
self.contain("invalid path")
|
|
|
|
|
|
|
|
|
|
self.fail("create /newton/_format/prep float32_8")
|
|
|
|
|
self.contain("path name is invalid")
|
|
|
|
|
self.fail("create /_format/newton/prep float32_8")
|
|
|
|
|
self.contain("path name is invalid")
|
|
|
|
|
self.fail("create /newton/prep/_format float32_8")
|
|
|
|
|
self.contain("path name is invalid")
|
2012-04-04 18:34:01 -04:00
|
|
|
|
|
|
|
|
|
# Bad layout type
|
|
|
|
|
self.fail("create /newton/prep NoSuchLayout")
|
2012-04-06 14:25:09 -04:00
|
|
|
|
self.contain("no such layout")
|
2013-01-04 16:51:05 -05:00
|
|
|
|
self.fail("create /newton/prep float32_0")
|
|
|
|
|
self.contain("no such layout")
|
|
|
|
|
self.fail("create /newton/prep float33_1")
|
|
|
|
|
self.contain("no such layout")
|
2012-04-04 18:34:01 -04:00
|
|
|
|
|
|
|
|
|
# Create a few streams
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.ok("create /newton/zzz/rawnotch uint16_9")
|
|
|
|
|
self.ok("create /newton/prep float32_8")
|
|
|
|
|
self.ok("create /newton/raw uint16_6")
|
2012-04-04 18:34:01 -04:00
|
|
|
|
|
2013-03-13 10:14:28 -04:00
|
|
|
|
# Create a stream that already exists
|
|
|
|
|
self.fail("create /newton/raw uint16_6")
|
|
|
|
|
self.contain("stream already exists at this path")
|
|
|
|
|
|
2012-12-04 22:15:00 -05:00
|
|
|
|
# Should not be able to create a stream with another stream as
|
|
|
|
|
# its parent
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.fail("create /newton/prep/blah float32_8")
|
2012-12-30 15:36:57 -05:00
|
|
|
|
self.contain("path is subdir of existing node")
|
|
|
|
|
|
|
|
|
|
# Should not be able to create a stream at a location that
|
|
|
|
|
# has other nodes as children
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.fail("create /newton/zzz float32_8")
|
2012-12-30 15:36:57 -05:00
|
|
|
|
self.contain("subdirs of this path already exist")
|
2012-12-04 22:15:00 -05:00
|
|
|
|
|
|
|
|
|
# Verify we got those 3 streams and they're returned in
|
|
|
|
|
# alphabetical order.
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list -l")
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.match("/newton/prep float32_8\n"
|
|
|
|
|
"/newton/raw uint16_6\n"
|
|
|
|
|
"/newton/zzz/rawnotch uint16_9\n")
|
2012-04-06 14:25:09 -04:00
|
|
|
|
|
2013-01-06 19:25:07 -05:00
|
|
|
|
# Match just one type or one path. Also check
|
|
|
|
|
# that --path is optional
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list --layout /newton/raw")
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.match("/newton/raw uint16_6\n")
|
2012-04-04 18:34:01 -04:00
|
|
|
|
|
|
|
|
|
# Wildcard matches
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list *zzz*")
|
|
|
|
|
self.match("/newton/zzz/rawnotch\n")
|
2013-01-06 19:25:07 -05:00
|
|
|
|
|
2013-01-06 20:12:43 -05:00
|
|
|
|
# reversed range
|
|
|
|
|
self.fail("list /newton/prep --start 2020-01-01 --end 2000-01-01")
|
2013-02-21 14:06:40 -05:00
|
|
|
|
self.contain("start must precede end")
|
2013-01-06 19:25:07 -05:00
|
|
|
|
|
2013-02-28 17:09:26 -05:00
|
|
|
|
def test_05_metadata(self):
|
2012-04-04 18:34:01 -04:00
|
|
|
|
# Set / get metadata
|
|
|
|
|
self.fail("metadata")
|
|
|
|
|
self.fail("metadata --get")
|
|
|
|
|
|
|
|
|
|
self.ok("metadata /newton/prep")
|
|
|
|
|
self.match("")
|
|
|
|
|
|
|
|
|
|
self.ok("metadata /newton/raw --get")
|
|
|
|
|
self.match("")
|
|
|
|
|
|
|
|
|
|
self.ok("metadata /newton/prep --set "
|
|
|
|
|
"'description=The Data' "
|
|
|
|
|
"v_scale=1.234")
|
|
|
|
|
self.ok("metadata /newton/raw --update "
|
|
|
|
|
"'description=The Data'")
|
|
|
|
|
self.ok("metadata /newton/raw --update "
|
|
|
|
|
"v_scale=1.234")
|
|
|
|
|
|
2012-04-06 14:37:05 -04:00
|
|
|
|
# various parsing tests
|
|
|
|
|
self.ok("metadata /newton/raw --update foo=")
|
|
|
|
|
self.fail("metadata /newton/raw --update =bar")
|
|
|
|
|
self.fail("metadata /newton/raw --update foo==bar")
|
|
|
|
|
self.fail("metadata /newton/raw --update foo;bar")
|
|
|
|
|
|
|
|
|
|
# errors
|
|
|
|
|
self.fail("metadata /newton/nosuchstream foo=bar")
|
|
|
|
|
self.contain("unrecognized arguments")
|
|
|
|
|
self.fail("metadata /newton/nosuchstream")
|
|
|
|
|
self.contain("No stream at path")
|
|
|
|
|
self.fail("metadata /newton/nosuchstream --set foo=bar")
|
|
|
|
|
self.contain("No stream at path")
|
2013-04-02 16:04:47 -04:00
|
|
|
|
self.fail("metadata /newton/nosuchstream --delete")
|
|
|
|
|
self.contain("No stream at path")
|
2012-04-06 14:37:05 -04:00
|
|
|
|
|
2012-04-04 18:34:01 -04:00
|
|
|
|
self.ok("metadata /newton/prep")
|
|
|
|
|
self.match("description=The Data\nv_scale=1.234\n")
|
2012-04-06 14:25:09 -04:00
|
|
|
|
|
2012-04-04 18:34:01 -04:00
|
|
|
|
self.ok("metadata /newton/prep --get")
|
|
|
|
|
self.match("description=The Data\nv_scale=1.234\n")
|
2012-04-06 14:25:09 -04:00
|
|
|
|
|
2012-04-04 18:34:01 -04:00
|
|
|
|
self.ok("metadata /newton/prep --get descr")
|
2012-04-06 14:25:09 -04:00
|
|
|
|
self.match("descr=\n")
|
|
|
|
|
|
2012-04-04 18:34:01 -04:00
|
|
|
|
self.ok("metadata /newton/prep --get description")
|
|
|
|
|
self.match("description=The Data\n")
|
|
|
|
|
|
2012-04-06 14:25:09 -04:00
|
|
|
|
self.ok("metadata /newton/prep --get description v_scale")
|
|
|
|
|
self.match("description=The Data\nv_scale=1.234\n")
|
2012-04-04 18:34:01 -04:00
|
|
|
|
|
2012-04-06 14:25:09 -04:00
|
|
|
|
self.ok("metadata /newton/prep --set "
|
|
|
|
|
"'description=The Data'")
|
|
|
|
|
|
|
|
|
|
self.ok("metadata /newton/prep --get")
|
|
|
|
|
self.match("description=The Data\n")
|
2012-04-04 18:34:01 -04:00
|
|
|
|
|
2012-04-06 14:25:09 -04:00
|
|
|
|
self.fail("metadata /newton/nosuchpath")
|
|
|
|
|
self.contain("No stream at path /newton/nosuchpath")
|
2012-04-04 18:34:01 -04:00
|
|
|
|
|
2013-04-02 16:04:47 -04:00
|
|
|
|
self.ok("metadata /newton/prep --delete")
|
|
|
|
|
self.ok("metadata /newton/prep --get")
|
|
|
|
|
self.match("")
|
|
|
|
|
self.ok("metadata /newton/prep --set "
|
|
|
|
|
"'description=The Data' "
|
|
|
|
|
"v_scale=1.234")
|
|
|
|
|
self.ok("metadata /newton/prep --delete v_scale")
|
|
|
|
|
self.ok("metadata /newton/prep --get")
|
|
|
|
|
self.match("description=The Data\n")
|
|
|
|
|
self.ok("metadata /newton/prep --set description=")
|
|
|
|
|
self.ok("metadata /newton/prep --get")
|
|
|
|
|
self.match("")
|
|
|
|
|
|
2013-01-06 19:25:07 -05:00
|
|
|
|
def test_06_insert(self):
|
2012-04-06 14:25:09 -04:00
|
|
|
|
self.ok("insert --help")
|
|
|
|
|
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.fail("insert -s 2000 -e 2001 /foo/bar baz")
|
2013-01-16 16:52:43 -05:00
|
|
|
|
self.contain("error getting stream info")
|
2012-04-06 14:25:09 -04:00
|
|
|
|
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.fail("insert -s 2000 -e 2001 /newton/prep baz")
|
2013-01-16 16:52:43 -05:00
|
|
|
|
self.match("error opening input file baz\n")
|
2012-04-06 14:25:09 -04:00
|
|
|
|
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.fail("insert /newton/prep --timestamp -f -r 120")
|
|
|
|
|
self.contain("error extracting start time")
|
2012-04-04 18:34:01 -04:00
|
|
|
|
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.fail("insert /newton/prep --timestamp -r 120")
|
|
|
|
|
self.contain("need --start or --filename")
|
2012-04-09 17:31:39 -04:00
|
|
|
|
|
2012-08-06 18:05:09 -04:00
|
|
|
|
self.fail("insert /newton/prep "
|
|
|
|
|
"tests/data/prep-20120323T1000")
|
|
|
|
|
|
2013-01-22 18:35:18 -05:00
|
|
|
|
# insert pre-timestamped data, with bad times (non-monotonic)
|
|
|
|
|
os.environ['TZ'] = "UTC"
|
|
|
|
|
with open("tests/data/prep-20120323T1004-badtimes") as input:
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.fail("insert -s 20120323T1004 -e 20120323T1006 /newton/prep",
|
|
|
|
|
input)
|
2013-01-22 18:35:18 -05:00
|
|
|
|
self.contain("error parsing input data")
|
2013-03-12 16:03:38 -04:00
|
|
|
|
self.contain("line 7")
|
2013-01-22 18:35:18 -05:00
|
|
|
|
self.contain("timestamp is not monotonically increasing")
|
|
|
|
|
|
2013-03-04 11:44:17 -05:00
|
|
|
|
# insert pre-timestamped data, from stdin
|
|
|
|
|
os.environ['TZ'] = "UTC"
|
|
|
|
|
with open("tests/data/prep-20120323T1004-timestamped") as input:
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.ok("insert -s 20120323T1004 -e 20120323T1006 /newton/prep",
|
|
|
|
|
input)
|
2013-03-04 11:44:17 -05:00
|
|
|
|
|
2012-04-09 17:31:39 -04:00
|
|
|
|
# insert data with normal timestamper from filename
|
2012-05-04 18:36:27 -04:00
|
|
|
|
os.environ['TZ'] = "UTC"
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.ok("insert --timestamp -f --rate 120 /newton/prep "
|
|
|
|
|
"tests/data/prep-20120323T1000")
|
|
|
|
|
self.fail("insert -t --filename /newton/prep "
|
|
|
|
|
"tests/data/prep-20120323T1002")
|
|
|
|
|
self.contain("rate is needed")
|
|
|
|
|
self.ok("insert -t --filename --rate 120 /newton/prep "
|
2012-04-09 17:31:39 -04:00
|
|
|
|
"tests/data/prep-20120323T1002")
|
|
|
|
|
|
|
|
|
|
# overlap
|
2012-05-04 18:36:27 -04:00
|
|
|
|
os.environ['TZ'] = "UTC"
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.fail("insert --timestamp -f --rate 120 /newton/prep "
|
2012-04-09 17:31:39 -04:00
|
|
|
|
"tests/data/prep-20120323T1004")
|
|
|
|
|
self.contain("overlap")
|
|
|
|
|
|
2012-04-24 17:59:33 -04:00
|
|
|
|
# Just to help test more situations -- stop and restart
|
|
|
|
|
# the server now. This tests nilmdb's interval caching,
|
|
|
|
|
# at the very least.
|
|
|
|
|
server_stop()
|
|
|
|
|
server_start()
|
|
|
|
|
|
2012-05-07 18:32:02 -04:00
|
|
|
|
# still an overlap if we specify a different start
|
2012-05-04 18:36:27 -04:00
|
|
|
|
os.environ['TZ'] = "America/New_York"
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.fail("insert -t -r 120 --start '03/23/2012 06:05:00' /newton/prep"
|
2012-08-06 17:46:09 -04:00
|
|
|
|
" tests/data/prep-20120323T1004")
|
2012-04-09 17:31:39 -04:00
|
|
|
|
self.contain("overlap")
|
|
|
|
|
|
|
|
|
|
# wrong format
|
2012-05-04 18:36:27 -04:00
|
|
|
|
os.environ['TZ'] = "UTC"
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.fail("insert -t -r 120 -f /newton/raw "
|
2012-04-09 17:31:39 -04:00
|
|
|
|
"tests/data/prep-20120323T1004")
|
2013-01-16 16:52:43 -05:00
|
|
|
|
self.contain("error parsing input data")
|
2013-03-12 16:44:36 -04:00
|
|
|
|
self.contain("can't parse value")
|
|
|
|
|
|
|
|
|
|
# too few rows per line
|
|
|
|
|
self.ok("create /insert/test float32_20")
|
|
|
|
|
self.fail("insert -t -r 120 -f /insert/test "
|
|
|
|
|
"tests/data/prep-20120323T1004")
|
|
|
|
|
self.contain("error parsing input data")
|
|
|
|
|
self.contain("wrong number of values")
|
|
|
|
|
self.ok("destroy /insert/test")
|
2012-04-09 17:31:39 -04:00
|
|
|
|
|
|
|
|
|
# empty data does nothing
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.ok("insert -t -r 120 --start '03/23/2012 06:05:00' /newton/prep "
|
2012-04-09 17:31:39 -04:00
|
|
|
|
"/dev/null")
|
|
|
|
|
|
|
|
|
|
# bad start time
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.fail("insert -t -r 120 --start 'whatever' /newton/prep /dev/null")
|
2012-05-04 18:36:27 -04:00
|
|
|
|
|
2013-03-09 15:51:26 -05:00
|
|
|
|
def test_07_detail_extended(self):
|
2012-05-07 18:32:02 -04:00
|
|
|
|
# Just count the number of lines, it's probably fine
|
2012-05-04 18:36:27 -04:00
|
|
|
|
self.ok("list --detail")
|
2012-12-12 19:25:27 -05:00
|
|
|
|
lines_(self.captured, 8)
|
2012-05-07 18:32:02 -04:00
|
|
|
|
|
2013-04-10 14:48:23 -04:00
|
|
|
|
self.ok("list --detail *prep")
|
2012-12-12 19:25:27 -05:00
|
|
|
|
lines_(self.captured, 4)
|
2012-05-07 18:32:02 -04:00
|
|
|
|
|
2013-04-10 14:48:23 -04:00
|
|
|
|
self.ok("list --detail *prep --start='23 Mar 2012 10:02'")
|
2012-12-12 19:25:27 -05:00
|
|
|
|
lines_(self.captured, 3)
|
2012-05-07 18:32:02 -04:00
|
|
|
|
|
2013-04-10 14:48:23 -04:00
|
|
|
|
self.ok("list --detail *prep --start='23 Mar 2012 10:05'")
|
2012-12-12 19:25:27 -05:00
|
|
|
|
lines_(self.captured, 2)
|
2012-05-07 18:32:02 -04:00
|
|
|
|
|
2013-04-10 14:48:23 -04:00
|
|
|
|
self.ok("list --detail *prep --start='23 Mar 2012 10:05:15'")
|
2012-12-11 23:31:55 -05:00
|
|
|
|
lines_(self.captured, 2)
|
2012-05-07 18:32:02 -04:00
|
|
|
|
self.contain("10:05:15.000")
|
|
|
|
|
|
2013-04-10 14:48:23 -04:00
|
|
|
|
self.ok("list --detail *prep --start='23 Mar 2012 10:05:15.50'")
|
2012-12-11 23:31:55 -05:00
|
|
|
|
lines_(self.captured, 2)
|
2012-05-07 18:32:02 -04:00
|
|
|
|
self.contain("10:05:15.500")
|
|
|
|
|
|
2013-04-10 14:48:23 -04:00
|
|
|
|
self.ok("list --detail *prep --start='23 Mar 2012 19:05:15.50'")
|
2012-12-11 23:31:55 -05:00
|
|
|
|
lines_(self.captured, 2)
|
2012-05-07 18:32:02 -04:00
|
|
|
|
self.contain("no intervals")
|
|
|
|
|
|
2013-04-10 14:48:23 -04:00
|
|
|
|
self.ok("list --detail *prep --start='23 Mar 2012 10:05:15.50'"
|
2013-02-21 14:06:40 -05:00
|
|
|
|
+ " --end='23 Mar 2012 10:05:15.51'")
|
2012-12-11 23:31:55 -05:00
|
|
|
|
lines_(self.captured, 2)
|
2012-05-07 18:32:02 -04:00
|
|
|
|
self.contain("10:05:15.500")
|
|
|
|
|
|
2012-05-11 14:31:11 -04:00
|
|
|
|
self.ok("list --detail")
|
2012-12-12 19:25:27 -05:00
|
|
|
|
lines_(self.captured, 8)
|
2012-05-11 14:31:11 -04:00
|
|
|
|
|
2013-01-24 16:03:08 -05:00
|
|
|
|
# Verify the "raw timestamp" output
|
2013-04-10 14:48:23 -04:00
|
|
|
|
self.ok("list --detail *prep --timestamp-raw "
|
2013-01-24 16:03:08 -05:00
|
|
|
|
"--start='23 Mar 2012 10:05:15.50'")
|
|
|
|
|
lines_(self.captured, 2)
|
2013-03-14 19:46:06 -04:00
|
|
|
|
self.contain("[ 1332497115500000 -> 1332497160000000 ]")
|
2013-01-24 16:03:08 -05:00
|
|
|
|
|
2013-03-06 20:32:24 -05:00
|
|
|
|
# bad time
|
2013-04-10 14:48:23 -04:00
|
|
|
|
self.fail("list --detail *prep -T --start='9332497115.612'")
|
2013-03-06 20:32:24 -05:00
|
|
|
|
# good time
|
2013-04-10 14:48:23 -04:00
|
|
|
|
self.ok("list --detail *prep -T --start='1332497115.612'")
|
2013-01-24 16:03:08 -05:00
|
|
|
|
lines_(self.captured, 2)
|
2013-03-14 19:46:06 -04:00
|
|
|
|
self.contain("[ 1332497115612000 -> 1332497160000000 ]")
|
2013-01-24 16:03:08 -05:00
|
|
|
|
|
2013-03-09 15:51:26 -05:00
|
|
|
|
# Check --ext output
|
|
|
|
|
self.ok("list --ext")
|
|
|
|
|
lines_(self.captured, 9)
|
2013-03-02 15:19:25 -05:00
|
|
|
|
|
|
|
|
|
self.ok("list -E -T")
|
2013-03-09 15:51:26 -05:00
|
|
|
|
c = self.contain
|
2013-03-14 19:46:06 -04:00
|
|
|
|
c("\n interval extents: 1332496800000000 -> 1332497160000000\n")
|
2013-03-09 15:51:26 -05:00
|
|
|
|
c("\n total data: 43200 rows, 359.983336 seconds\n")
|
|
|
|
|
c("\n interval extents: (no data)\n")
|
|
|
|
|
c("\n total data: 0 rows, 0.000000 seconds\n")
|
2013-03-02 15:19:25 -05:00
|
|
|
|
|
|
|
|
|
# Misc
|
2013-03-09 15:51:26 -05:00
|
|
|
|
self.fail("list --ext --start='23 Mar 2012 10:05:15.50'")
|
2013-03-02 15:19:25 -05:00
|
|
|
|
self.contain("--start and --end only make sense with --detail")
|
|
|
|
|
|
2013-01-06 19:25:07 -05:00
|
|
|
|
def test_08_extract(self):
|
2012-05-16 18:19:00 -04:00
|
|
|
|
# nonexistent stream
|
2012-05-11 14:31:11 -04:00
|
|
|
|
self.fail("extract /no/such/foo --start 2000-01-01 --end 2020-01-01")
|
2013-01-16 16:52:43 -05:00
|
|
|
|
self.contain("error getting stream info")
|
2012-05-11 14:31:11 -04:00
|
|
|
|
|
2013-01-06 20:12:43 -05:00
|
|
|
|
# reversed range
|
|
|
|
|
self.fail("extract -a /newton/prep --start 2020-01-01 --end 2000-01-01")
|
|
|
|
|
self.contain("start is after end")
|
|
|
|
|
|
|
|
|
|
# empty ranges return error 2
|
2012-05-30 14:24:36 -04:00
|
|
|
|
self.fail("extract -a /newton/prep " +
|
2013-02-21 14:06:40 -05:00
|
|
|
|
"--start '23 Mar 2012 20:00:30' " +
|
|
|
|
|
"--end '23 Mar 2012 20:00:31'",
|
2013-01-16 16:52:43 -05:00
|
|
|
|
exitcode = 2, require_error = False)
|
2012-05-30 14:24:36 -04:00
|
|
|
|
self.contain("no data")
|
2012-06-04 19:46:33 -04:00
|
|
|
|
self.fail("extract -a /newton/prep " +
|
2013-02-21 14:06:40 -05:00
|
|
|
|
"--start '23 Mar 2012 20:00:30.000001' " +
|
|
|
|
|
"--end '23 Mar 2012 20:00:30.000002'",
|
2013-01-16 16:52:43 -05:00
|
|
|
|
exitcode = 2, require_error = False)
|
2012-06-04 19:46:33 -04:00
|
|
|
|
self.contain("no data")
|
2012-06-25 14:52:50 -04:00
|
|
|
|
self.fail("extract -a /newton/prep " +
|
2012-08-09 14:04:47 -04:00
|
|
|
|
"--start '23 Mar 2022 10:00:30' " +
|
2013-02-21 14:06:40 -05:00
|
|
|
|
"--end '23 Mar 2022 10:00:31'",
|
2013-01-16 16:52:43 -05:00
|
|
|
|
exitcode = 2, require_error = False)
|
2012-06-25 14:52:50 -04:00
|
|
|
|
self.contain("no data")
|
|
|
|
|
|
|
|
|
|
# but are ok if we're just counting results
|
|
|
|
|
self.ok("extract --count /newton/prep " +
|
2013-02-21 14:06:40 -05:00
|
|
|
|
"--start '23 Mar 2012 20:00:30' " +
|
|
|
|
|
"--end '23 Mar 2012 20:00:31'")
|
2012-06-25 14:52:50 -04:00
|
|
|
|
self.match("0\n")
|
|
|
|
|
self.ok("extract -c /newton/prep " +
|
2013-02-21 14:06:40 -05:00
|
|
|
|
"--start '23 Mar 2012 20:00:30.000001' " +
|
|
|
|
|
"--end '23 Mar 2012 20:00:30.000002'")
|
2012-06-25 14:52:50 -04:00
|
|
|
|
self.match("0\n")
|
2012-05-30 14:24:36 -04:00
|
|
|
|
|
|
|
|
|
# Check various dumps against stored copies of how they should appear
|
|
|
|
|
def test(file, start, end, extra=""):
|
|
|
|
|
self.ok("extract " + extra + " /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 " + start + "' " +
|
|
|
|
|
"--end '23 Mar 2012 " + end + "'")
|
|
|
|
|
self.matchfile("tests/data/extract-" + str(file))
|
2012-06-25 14:52:50 -04:00
|
|
|
|
self.ok("extract --count " + extra + " /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 " + start + "' " +
|
|
|
|
|
"--end '23 Mar 2012 " + end + "'")
|
|
|
|
|
self.matchfilecount("tests/data/extract-" + str(file))
|
2012-05-30 14:24:36 -04:00
|
|
|
|
test(1, "10:00:30", "10:00:31", extra="-a")
|
|
|
|
|
test(1, "10:00:30.000000", "10:00:31", extra="-a")
|
|
|
|
|
test(2, "10:00:30.000001", "10:00:31")
|
|
|
|
|
test(2, "10:00:30.008333", "10:00:31")
|
|
|
|
|
test(3, "10:00:30.008333", "10:00:30.008334")
|
|
|
|
|
test(3, "10:00:30.008333", "10:00:30.016667")
|
|
|
|
|
test(4, "10:00:30.008333", "10:00:30.025")
|
|
|
|
|
test(5, "10:00:30", "10:00:31", extra="--annotate --bare")
|
|
|
|
|
test(6, "10:00:30", "10:00:31", extra="-b")
|
2013-01-24 16:03:08 -05:00
|
|
|
|
test(7, "10:00:30", "10:00:30.999", extra="-a -T")
|
|
|
|
|
test(7, "10:00:30", "10:00:30.999", extra="-a --timestamp-raw")
|
2013-03-19 14:23:33 -04:00
|
|
|
|
test(8, "10:01:59.9", "10:02:00.1", extra="--markup")
|
|
|
|
|
test(8, "10:01:59.9", "10:02:00.1", extra="-m")
|
2012-05-30 14:24:36 -04:00
|
|
|
|
|
2012-06-04 19:46:33 -04:00
|
|
|
|
# all data put in by tests
|
2013-04-09 18:19:32 -04:00
|
|
|
|
self.ok("extract -a /newton/prep --start min --end max")
|
2012-12-11 23:31:55 -05:00
|
|
|
|
lines_(self.captured, 43204)
|
2012-06-25 14:52:50 -04:00
|
|
|
|
self.ok("extract -c /newton/prep --start 2000-01-01 --end 2020-01-01")
|
|
|
|
|
self.match("43200\n")
|
2012-05-11 14:31:11 -04:00
|
|
|
|
|
2013-04-11 13:24:11 -04:00
|
|
|
|
# test binary mode
|
|
|
|
|
self.fail("extract -c -B /newton/prep -s min -e max")
|
|
|
|
|
self.contain("binary cannot be combined")
|
|
|
|
|
self.fail("extract -m -B /newton/prep -s min -e max")
|
|
|
|
|
self.contain("binary cannot be combined")
|
|
|
|
|
self.ok("extract -B /newton/prep -s min -e max")
|
|
|
|
|
eq_(len(self.captured), 43200 * (8 + 8*4))
|
|
|
|
|
|
2013-03-19 14:23:33 -04:00
|
|
|
|
# markup for 3 intervals, plus extra markup lines whenever we had
|
|
|
|
|
# a "restart" from the nilmdb.stream_extract function
|
|
|
|
|
self.ok("extract -m /newton/prep --start 2000-01-01 --end 2020-01-01")
|
|
|
|
|
lines_(self.captured, 43210)
|
|
|
|
|
|
2013-01-06 19:25:07 -05:00
|
|
|
|
def test_09_truncated(self):
|
2012-05-25 12:44:24 -04:00
|
|
|
|
# Test truncated responses by overriding the nilmdb max_results
|
2012-05-11 14:31:11 -04:00
|
|
|
|
server_stop()
|
2012-05-25 12:44:24 -04:00
|
|
|
|
server_start(max_results = 2)
|
2012-05-11 14:31:11 -04:00
|
|
|
|
self.ok("list --detail")
|
2012-12-12 19:25:27 -05:00
|
|
|
|
lines_(self.captured, 8)
|
2012-06-25 14:52:50 -04:00
|
|
|
|
server_stop()
|
|
|
|
|
server_start()
|
2012-12-04 22:15:00 -05:00
|
|
|
|
|
2013-01-06 19:25:07 -05:00
|
|
|
|
def test_10_remove(self):
|
|
|
|
|
# Removing data
|
|
|
|
|
|
|
|
|
|
# Try nonexistent stream
|
|
|
|
|
self.fail("remove /no/such/foo --start 2000-01-01 --end 2020-01-01")
|
2013-04-10 15:27:46 -04:00
|
|
|
|
self.contain("no stream matched path")
|
2013-01-06 19:25:07 -05:00
|
|
|
|
|
2013-02-21 14:06:40 -05:00
|
|
|
|
# empty or backward ranges return errors
|
2013-01-08 21:07:52 -05:00
|
|
|
|
self.fail("remove /newton/prep --start 2020-01-01 --end 2000-01-01")
|
2013-02-21 14:06:40 -05:00
|
|
|
|
self.contain("start must precede end")
|
2013-01-08 21:07:52 -05:00
|
|
|
|
|
2013-02-21 14:06:40 -05:00
|
|
|
|
self.fail("remove /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 10:00:30' " +
|
|
|
|
|
"--end '23 Mar 2012 10:00:30'")
|
|
|
|
|
self.contain("start must precede end")
|
|
|
|
|
self.fail("remove /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 10:00:30.000001' " +
|
|
|
|
|
"--end '23 Mar 2012 10:00:30.000001'")
|
|
|
|
|
self.contain("start must precede end")
|
|
|
|
|
self.fail("remove /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2022 10:00:30' " +
|
|
|
|
|
"--end '23 Mar 2022 10:00:30'")
|
|
|
|
|
self.contain("start must precede end")
|
2013-01-06 19:25:07 -05:00
|
|
|
|
|
|
|
|
|
# Verbose
|
2013-01-08 18:12:41 -05:00
|
|
|
|
self.ok("remove -c /newton/prep " +
|
2013-02-21 14:06:40 -05:00
|
|
|
|
"--start '23 Mar 2022 20:00:30' " +
|
|
|
|
|
"--end '23 Mar 2022 20:00:31'")
|
2013-01-06 19:25:07 -05:00
|
|
|
|
self.match("0\n")
|
2013-01-08 18:12:41 -05:00
|
|
|
|
self.ok("remove --count /newton/prep " +
|
2013-02-21 14:06:40 -05:00
|
|
|
|
"--start '23 Mar 2022 20:00:30' " +
|
|
|
|
|
"--end '23 Mar 2022 20:00:31'")
|
2013-01-06 19:25:07 -05:00
|
|
|
|
self.match("0\n")
|
2013-04-10 15:27:46 -04:00
|
|
|
|
self.ok("remove -c /newton/prep /newton/pre* " +
|
|
|
|
|
"--start '23 Mar 2022 20:00:30' " +
|
|
|
|
|
"--end '23 Mar 2022 20:00:31'")
|
|
|
|
|
self.match("Removing from /newton/prep\n0\n" +
|
|
|
|
|
"Removing from /newton/prep\n0\n")
|
2013-01-06 19:25:07 -05:00
|
|
|
|
|
2013-01-08 18:12:41 -05:00
|
|
|
|
# Make sure we have the data we expect
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list -l --detail /newton/prep")
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.match("/newton/prep float32_8\n" +
|
2013-01-08 18:12:41 -05:00
|
|
|
|
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
|
|
|
|
|
" -> Fri, 23 Mar 2012 10:01:59.991668 +0000 ]\n"
|
|
|
|
|
" [ Fri, 23 Mar 2012 10:02:00.000000 +0000"
|
|
|
|
|
" -> Fri, 23 Mar 2012 10:03:59.991668 +0000 ]\n"
|
|
|
|
|
" [ Fri, 23 Mar 2012 10:04:00.000000 +0000"
|
2013-03-06 20:32:24 -05:00
|
|
|
|
" -> Fri, 23 Mar 2012 10:06:00.000000 +0000 ]\n")
|
2013-01-08 18:12:41 -05:00
|
|
|
|
|
2013-01-06 19:25:07 -05:00
|
|
|
|
# Remove various chunks of prep data and make sure
|
2013-01-08 18:12:41 -05:00
|
|
|
|
# they're gone.
|
|
|
|
|
self.ok("remove -c /newton/prep " +
|
2013-01-06 19:25:07 -05:00
|
|
|
|
"--start '23 Mar 2012 10:00:30' " +
|
|
|
|
|
"--end '23 Mar 2012 10:00:40'")
|
|
|
|
|
self.match("1200\n")
|
|
|
|
|
|
2013-01-08 18:12:41 -05:00
|
|
|
|
self.ok("remove -c /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 10:00:10' " +
|
|
|
|
|
"--end '23 Mar 2012 10:00:20'")
|
|
|
|
|
self.match("1200\n")
|
|
|
|
|
|
|
|
|
|
self.ok("remove -c /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 10:00:05' " +
|
|
|
|
|
"--end '23 Mar 2012 10:00:25'")
|
|
|
|
|
self.match("1200\n")
|
|
|
|
|
|
|
|
|
|
self.ok("remove -c /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 10:03:50' " +
|
|
|
|
|
"--end '23 Mar 2012 10:06:50'")
|
|
|
|
|
self.match("15600\n")
|
|
|
|
|
|
|
|
|
|
self.ok("extract -c /newton/prep --start 2000-01-01 --end 2020-01-01")
|
|
|
|
|
self.match("24000\n")
|
|
|
|
|
|
|
|
|
|
# See the missing chunks in list output
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list --layout --detail /newton/prep")
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.match("/newton/prep float32_8\n" +
|
2013-01-08 18:12:41 -05:00
|
|
|
|
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
|
|
|
|
|
" -> Fri, 23 Mar 2012 10:00:05.000000 +0000 ]\n"
|
|
|
|
|
" [ Fri, 23 Mar 2012 10:00:25.000000 +0000"
|
|
|
|
|
" -> Fri, 23 Mar 2012 10:00:30.000000 +0000 ]\n"
|
|
|
|
|
" [ Fri, 23 Mar 2012 10:00:40.000000 +0000"
|
|
|
|
|
" -> Fri, 23 Mar 2012 10:01:59.991668 +0000 ]\n"
|
|
|
|
|
" [ Fri, 23 Mar 2012 10:02:00.000000 +0000"
|
|
|
|
|
" -> Fri, 23 Mar 2012 10:03:50.000000 +0000 ]\n")
|
2013-01-06 19:25:07 -05:00
|
|
|
|
|
2013-01-08 21:07:52 -05:00
|
|
|
|
# Remove all data, verify it's missing
|
|
|
|
|
self.ok("remove /newton/prep --start 2000-01-01 --end 2020-01-01")
|
|
|
|
|
self.match("") # no count requested this time
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list -l --detail /newton/prep")
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.match("/newton/prep float32_8\n" +
|
2013-01-08 21:07:52 -05:00
|
|
|
|
" (no intervals)\n")
|
|
|
|
|
|
|
|
|
|
# Reinsert some data, to verify that no overlaps with deleted
|
|
|
|
|
# data are reported
|
2013-03-18 18:22:45 -04:00
|
|
|
|
for minute in ["0", "2"]:
|
|
|
|
|
self.ok("insert --timestamp -f --rate 120 /newton/prep"
|
|
|
|
|
" tests/data/prep-20120323T100" + minute)
|
2013-01-08 21:07:52 -05:00
|
|
|
|
|
2013-01-06 19:25:07 -05:00
|
|
|
|
def test_11_destroy(self):
|
2012-12-04 22:15:00 -05:00
|
|
|
|
# Delete records
|
|
|
|
|
self.ok("destroy --help")
|
|
|
|
|
|
|
|
|
|
self.fail("destroy")
|
|
|
|
|
self.contain("too few arguments")
|
|
|
|
|
|
|
|
|
|
self.fail("destroy /no/such/stream")
|
2013-04-10 16:23:07 -04:00
|
|
|
|
self.contain("no stream matched path")
|
2012-12-04 22:15:00 -05:00
|
|
|
|
|
2013-03-18 19:39:03 -04:00
|
|
|
|
self.fail("destroy -R /no/such/stream")
|
2013-04-10 16:23:07 -04:00
|
|
|
|
self.contain("no stream matched path")
|
2013-03-18 19:39:03 -04:00
|
|
|
|
|
2012-12-04 22:15:00 -05:00
|
|
|
|
self.fail("destroy asdfasdf")
|
2013-04-10 16:23:07 -04:00
|
|
|
|
self.contain("no stream matched path")
|
2012-12-04 22:15:00 -05:00
|
|
|
|
|
|
|
|
|
# From previous tests, we have:
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list -l")
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.match("/newton/prep float32_8\n"
|
|
|
|
|
"/newton/raw uint16_6\n"
|
|
|
|
|
"/newton/zzz/rawnotch uint16_9\n")
|
2012-12-04 22:15:00 -05:00
|
|
|
|
|
|
|
|
|
# Notice how they're not empty
|
|
|
|
|
self.ok("list --detail")
|
2013-01-08 21:07:52 -05:00
|
|
|
|
lines_(self.captured, 7)
|
2012-12-04 22:15:00 -05:00
|
|
|
|
|
2013-03-18 19:39:03 -04:00
|
|
|
|
# Fail to destroy because intervals still present
|
|
|
|
|
self.fail("destroy /newton/prep")
|
|
|
|
|
self.contain("all intervals must be removed")
|
|
|
|
|
self.ok("list --detail")
|
|
|
|
|
lines_(self.captured, 7)
|
|
|
|
|
|
|
|
|
|
# Destroy for real
|
2013-04-10 16:23:07 -04:00
|
|
|
|
self.ok("destroy -R /n*/prep")
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list -l")
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.match("/newton/raw uint16_6\n"
|
|
|
|
|
"/newton/zzz/rawnotch uint16_9\n")
|
2012-12-04 22:15:00 -05:00
|
|
|
|
|
|
|
|
|
self.ok("destroy /newton/zzz/rawnotch")
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list -l")
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.match("/newton/raw uint16_6\n")
|
2012-12-04 22:15:00 -05:00
|
|
|
|
|
|
|
|
|
self.ok("destroy /newton/raw")
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.ok("create /newton/raw uint16_6")
|
2013-03-18 19:39:03 -04:00
|
|
|
|
# Specify --remove with no data
|
|
|
|
|
self.ok("destroy --remove /newton/raw")
|
2012-12-04 22:15:00 -05:00
|
|
|
|
self.ok("list")
|
|
|
|
|
self.match("")
|
|
|
|
|
|
|
|
|
|
# Re-create a previously deleted location, and some new ones
|
|
|
|
|
rebuild = [ "/newton/prep", "/newton/zzz",
|
|
|
|
|
"/newton/raw", "/newton/asdf/qwer" ]
|
|
|
|
|
for path in rebuild:
|
|
|
|
|
# Create the path
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.ok("create " + path + " float32_8")
|
2012-12-04 22:15:00 -05:00
|
|
|
|
self.ok("list")
|
|
|
|
|
self.contain(path)
|
|
|
|
|
# Make sure it was created empty
|
2013-04-10 14:48:23 -04:00
|
|
|
|
self.ok("list --detail " + path)
|
2012-12-04 22:15:00 -05:00
|
|
|
|
self.contain("(no intervals)")
|
2013-01-02 00:00:30 -05:00
|
|
|
|
|
2013-01-06 19:25:07 -05:00
|
|
|
|
def test_12_unicode(self):
|
2013-01-02 00:00:30 -05:00
|
|
|
|
# Unicode paths.
|
|
|
|
|
self.ok("destroy /newton/asdf/qwer")
|
2013-04-10 16:23:07 -04:00
|
|
|
|
self.ok("destroy /newton/prep /newton/raw")
|
2013-01-02 00:00:30 -05:00
|
|
|
|
self.ok("destroy /newton/zzz")
|
|
|
|
|
|
|
|
|
|
self.ok(u"create /düsseldorf/raw uint16_6")
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list -l --detail")
|
2013-01-02 00:00:30 -05:00
|
|
|
|
self.contain(u"/düsseldorf/raw uint16_6")
|
|
|
|
|
self.contain("(no intervals)")
|
|
|
|
|
|
|
|
|
|
# Unicode metadata
|
|
|
|
|
self.ok(u"metadata /düsseldorf/raw --set α=beta 'γ=δ'")
|
|
|
|
|
self.ok(u"metadata /düsseldorf/raw --update 'α=β ε τ α'")
|
|
|
|
|
self.ok(u"metadata /düsseldorf/raw")
|
|
|
|
|
self.match(u"α=β ε τ α\nγ=δ\n")
|
2013-01-03 20:15:38 -05:00
|
|
|
|
|
|
|
|
|
self.ok(u"destroy /düsseldorf/raw")
|
|
|
|
|
|
2013-01-06 19:25:07 -05:00
|
|
|
|
def test_13_files(self):
|
2013-01-03 20:15:38 -05:00
|
|
|
|
# Test BulkData's ability to split into multiple files,
|
|
|
|
|
# by forcing the file size to be really small.
|
|
|
|
|
server_stop()
|
2013-01-09 17:37:37 -05:00
|
|
|
|
server_start(bulkdata_args = { "file_size" : 920, # 23 rows per file
|
2013-01-04 16:51:05 -05:00
|
|
|
|
"files_per_dir" : 3 })
|
2013-01-03 20:15:38 -05:00
|
|
|
|
|
|
|
|
|
# Fill data
|
|
|
|
|
self.ok("create /newton/prep float32_8")
|
|
|
|
|
os.environ['TZ'] = "UTC"
|
|
|
|
|
with open("tests/data/prep-20120323T1004-timestamped") as input:
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.ok("insert -s 20120323T1004 -e 20120323T1006 /newton/prep",
|
|
|
|
|
input)
|
2013-01-03 20:15:38 -05:00
|
|
|
|
|
|
|
|
|
# Extract it
|
|
|
|
|
self.ok("extract /newton/prep --start '2000-01-01' " +
|
|
|
|
|
"--end '2012-03-23 10:04:01'")
|
|
|
|
|
lines_(self.captured, 120)
|
|
|
|
|
self.ok("extract /newton/prep --start '2000-01-01' " +
|
|
|
|
|
"--end '2022-03-23 10:04:01'")
|
|
|
|
|
lines_(self.captured, 14400)
|
|
|
|
|
|
|
|
|
|
# Make sure there were lots of files generated in the database
|
|
|
|
|
# dir
|
|
|
|
|
nfiles = 0
|
|
|
|
|
for (dirpath, dirnames, filenames) in os.walk(testdb):
|
|
|
|
|
nfiles += len(filenames)
|
|
|
|
|
assert(nfiles > 500)
|
|
|
|
|
|
|
|
|
|
# Make sure we can restart the server with a different file
|
|
|
|
|
# size and have it still work
|
|
|
|
|
server_stop()
|
|
|
|
|
server_start()
|
|
|
|
|
self.ok("extract /newton/prep --start '2000-01-01' " +
|
|
|
|
|
"--end '2022-03-23 10:04:01'")
|
|
|
|
|
lines_(self.captured, 14400)
|
|
|
|
|
|
|
|
|
|
# Now recreate the data one more time and make sure there are
|
|
|
|
|
# fewer files.
|
2013-03-18 19:39:03 -04:00
|
|
|
|
self.ok("destroy --remove /newton/prep")
|
2013-01-09 17:37:37 -05:00
|
|
|
|
self.fail("destroy /newton/prep") # already destroyed
|
2013-01-03 20:15:38 -05:00
|
|
|
|
self.ok("create /newton/prep float32_8")
|
|
|
|
|
os.environ['TZ'] = "UTC"
|
|
|
|
|
with open("tests/data/prep-20120323T1004-timestamped") as input:
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.ok("insert -s 20120323T1004 -e 20120323T1006 /newton/prep",
|
|
|
|
|
input)
|
2013-01-03 20:15:38 -05:00
|
|
|
|
nfiles = 0
|
|
|
|
|
for (dirpath, dirnames, filenames) in os.walk(testdb):
|
|
|
|
|
nfiles += len(filenames)
|
2013-01-09 17:37:37 -05:00
|
|
|
|
lt_(nfiles, 50)
|
2013-03-18 19:39:03 -04:00
|
|
|
|
self.ok("destroy -R /newton/prep") # destroy again
|
2013-01-09 17:37:37 -05:00
|
|
|
|
|
|
|
|
|
def test_14_remove_files(self):
|
|
|
|
|
# Test BulkData's ability to remove when data is split into
|
|
|
|
|
# multiple files. Should be a fairly comprehensive test of
|
|
|
|
|
# remove functionality.
|
2013-03-18 18:22:45 -04:00
|
|
|
|
# Also limit max_removals, to cover more functionality.
|
2013-01-09 17:37:37 -05:00
|
|
|
|
server_stop()
|
2013-03-18 18:22:45 -04:00
|
|
|
|
server_start(max_removals = 4321,
|
|
|
|
|
bulkdata_args = { "file_size" : 920, # 23 rows per file
|
2013-01-09 17:37:37 -05:00
|
|
|
|
"files_per_dir" : 3 })
|
|
|
|
|
|
|
|
|
|
# Insert data. Just for fun, insert out of order
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.ok("create /newton/prep float32_8")
|
2013-01-09 17:37:37 -05:00
|
|
|
|
os.environ['TZ'] = "UTC"
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.ok("insert -t --filename --rate 120 /newton/prep "
|
|
|
|
|
"tests/data/prep-20120323T1002")
|
|
|
|
|
self.ok("insert -t --filename --rate 120 /newton/prep "
|
2013-01-09 17:37:37 -05:00
|
|
|
|
"tests/data/prep-20120323T1000")
|
|
|
|
|
|
|
|
|
|
# Should take up about 2.8 MB here (including directory entries)
|
2013-02-14 16:57:33 -05:00
|
|
|
|
du_before = nilmdb.utils.diskusage.du(testdb)
|
2013-01-09 17:37:37 -05:00
|
|
|
|
|
|
|
|
|
# Make sure we have the data we expect
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list -l --detail")
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.match("/newton/prep float32_8\n" +
|
2013-01-09 17:37:37 -05:00
|
|
|
|
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
|
|
|
|
|
" -> Fri, 23 Mar 2012 10:01:59.991668 +0000 ]\n"
|
|
|
|
|
" [ Fri, 23 Mar 2012 10:02:00.000000 +0000"
|
|
|
|
|
" -> Fri, 23 Mar 2012 10:03:59.991668 +0000 ]\n")
|
|
|
|
|
|
|
|
|
|
# Remove various chunks of prep data and make sure
|
|
|
|
|
# they're gone.
|
|
|
|
|
self.ok("extract -c /newton/prep --start 2000-01-01 --end 2020-01-01")
|
|
|
|
|
self.match("28800\n")
|
|
|
|
|
|
|
|
|
|
self.ok("remove -c /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 10:00:30' " +
|
|
|
|
|
"--end '23 Mar 2012 10:03:30'")
|
|
|
|
|
self.match("21600\n")
|
|
|
|
|
|
|
|
|
|
self.ok("remove -c /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 10:00:10' " +
|
|
|
|
|
"--end '23 Mar 2012 10:00:20'")
|
|
|
|
|
self.match("1200\n")
|
|
|
|
|
|
|
|
|
|
self.ok("remove -c /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 10:00:05' " +
|
|
|
|
|
"--end '23 Mar 2012 10:00:25'")
|
|
|
|
|
self.match("1200\n")
|
|
|
|
|
|
|
|
|
|
self.ok("remove -c /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 10:03:50' " +
|
|
|
|
|
"--end '23 Mar 2012 10:06:50'")
|
|
|
|
|
self.match("1200\n")
|
|
|
|
|
|
|
|
|
|
self.ok("extract -c /newton/prep --start 2000-01-01 --end 2020-01-01")
|
|
|
|
|
self.match("3600\n")
|
|
|
|
|
|
|
|
|
|
# See the missing chunks in list output
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list -l --detail")
|
2013-03-03 13:34:09 -05:00
|
|
|
|
self.match("/newton/prep float32_8\n" +
|
2013-01-09 17:37:37 -05:00
|
|
|
|
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
|
|
|
|
|
" -> Fri, 23 Mar 2012 10:00:05.000000 +0000 ]\n"
|
|
|
|
|
" [ Fri, 23 Mar 2012 10:00:25.000000 +0000"
|
|
|
|
|
" -> Fri, 23 Mar 2012 10:00:30.000000 +0000 ]\n"
|
|
|
|
|
" [ Fri, 23 Mar 2012 10:03:30.000000 +0000"
|
|
|
|
|
" -> Fri, 23 Mar 2012 10:03:50.000000 +0000 ]\n")
|
|
|
|
|
|
|
|
|
|
# We have 1/8 of the data that we had before, so the file size
|
|
|
|
|
# should have dropped below 1/4 of what it used to be
|
2013-02-14 16:57:33 -05:00
|
|
|
|
du_after = nilmdb.utils.diskusage.du(testdb)
|
2013-01-09 17:37:37 -05:00
|
|
|
|
lt_(du_after, (du_before / 4))
|
|
|
|
|
|
|
|
|
|
# Remove anything that came from the 10:02 data file
|
|
|
|
|
self.ok("remove /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 10:02:00' --end '2020-01-01'")
|
|
|
|
|
|
2013-01-09 23:26:59 -05:00
|
|
|
|
# Re-insert 19 lines from that file, then remove them again.
|
|
|
|
|
# With the specific file_size above, this will cause the last
|
|
|
|
|
# file in the bulk data storage to be exactly file_size large,
|
|
|
|
|
# so removing the data should also remove that last file.
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.ok("insert --timestamp -f --rate 120 /newton/prep " +
|
2013-01-09 23:26:59 -05:00
|
|
|
|
"tests/data/prep-20120323T1002-first19lines")
|
|
|
|
|
self.ok("remove /newton/prep " +
|
|
|
|
|
"--start '23 Mar 2012 10:02:00' --end '2020-01-01'")
|
|
|
|
|
|
|
|
|
|
# Shut down and restart server, to force nrows to get refreshed.
|
2013-01-09 17:37:37 -05:00
|
|
|
|
server_stop()
|
|
|
|
|
server_start()
|
|
|
|
|
|
|
|
|
|
# Re-add the full 10:02 data file. This tests adding new data once
|
|
|
|
|
# we removed data near the end.
|
2013-03-06 20:32:24 -05:00
|
|
|
|
self.ok("insert -t -f -r 120 /newton/prep "
|
|
|
|
|
"tests/data/prep-20120323T1002")
|
2013-01-09 17:37:37 -05:00
|
|
|
|
|
|
|
|
|
# See if we can extract it all
|
|
|
|
|
self.ok("extract /newton/prep --start 2000-01-01 --end 2020-01-01")
|
|
|
|
|
lines_(self.captured, 15600)
|
2013-03-11 17:07:26 -04:00
|
|
|
|
|
|
|
|
|
def test_15_intervals_diff(self):
|
|
|
|
|
# Test "intervals" and "intervals --diff" command.
|
|
|
|
|
os.environ['TZ'] = "UTC"
|
|
|
|
|
|
|
|
|
|
self.ok("create /diff/1 uint8_1")
|
|
|
|
|
self.match("")
|
|
|
|
|
self.ok("intervals /diff/1")
|
|
|
|
|
self.match("")
|
|
|
|
|
self.ok("intervals /diff/1 --diff /diff/1")
|
|
|
|
|
self.match("")
|
|
|
|
|
self.ok("intervals --diff /diff/1 /diff/1")
|
|
|
|
|
self.match("")
|
|
|
|
|
self.fail("intervals /diff/2")
|
|
|
|
|
self.fail("intervals /diff/1 -d /diff/2")
|
|
|
|
|
|
|
|
|
|
self.ok("create /diff/2 uint8_1")
|
|
|
|
|
self.ok("intervals -T /diff/1 -d /diff/2")
|
|
|
|
|
self.match("")
|
|
|
|
|
self.ok("insert -s 01-01-2000 -e 01-01-2001 /diff/1 /dev/null")
|
|
|
|
|
|
|
|
|
|
self.ok("intervals /diff/1")
|
|
|
|
|
self.match("[ Sat, 01 Jan 2000 00:00:00.000000 +0000 -"
|
|
|
|
|
"> Mon, 01 Jan 2001 00:00:00.000000 +0000 ]\n")
|
|
|
|
|
|
|
|
|
|
self.ok("intervals /diff/1 -d /diff/2")
|
|
|
|
|
self.match("[ Sat, 01 Jan 2000 00:00:00.000000 +0000 -"
|
|
|
|
|
"> Mon, 01 Jan 2001 00:00:00.000000 +0000 ]\n")
|
|
|
|
|
|
|
|
|
|
self.ok("insert -s 01-01-2000 -e 01-01-2001 /diff/2 /dev/null")
|
|
|
|
|
self.ok("intervals /diff/1 -d /diff/2")
|
|
|
|
|
self.match("")
|
|
|
|
|
|
|
|
|
|
self.ok("insert -s 01-01-2001 -e 01-01-2002 /diff/1 /dev/null")
|
|
|
|
|
self.ok("insert -s 01-01-2002 -e 01-01-2003 /diff/2 /dev/null")
|
|
|
|
|
self.ok("intervals /diff/1 -d /diff/2")
|
|
|
|
|
self.match("[ Mon, 01 Jan 2001 00:00:00.000000 +0000 -"
|
|
|
|
|
"> Tue, 01 Jan 2002 00:00:00.000000 +0000 ]\n")
|
|
|
|
|
|
|
|
|
|
self.ok("insert -s 01-01-2004 -e 01-01-2005 /diff/1 /dev/null")
|
|
|
|
|
self.ok("intervals /diff/1 -d /diff/2")
|
|
|
|
|
self.match("[ Mon, 01 Jan 2001 00:00:00.000000 +0000 -"
|
|
|
|
|
"> Tue, 01 Jan 2002 00:00:00.000000 +0000 ]\n"
|
|
|
|
|
"[ Thu, 01 Jan 2004 00:00:00.000000 +0000 -"
|
|
|
|
|
"> Sat, 01 Jan 2005 00:00:00.000000 +0000 ]\n")
|
|
|
|
|
|
|
|
|
|
self.fail("intervals -s 01-01-2003 -e 01-01-2000 /diff/1 -d /diff/2")
|
|
|
|
|
self.ok("intervals -s 01-01-2003 -e 01-01-2008 /diff/1 -d /diff/2")
|
|
|
|
|
self.match("[ Thu, 01 Jan 2004 00:00:00.000000 +0000 -"
|
|
|
|
|
"> Sat, 01 Jan 2005 00:00:00.000000 +0000 ]\n")
|
|
|
|
|
|
2013-03-18 19:39:03 -04:00
|
|
|
|
self.ok("destroy -R /diff/1")
|
|
|
|
|
self.ok("destroy -R /diff/2")
|
2013-03-13 19:21:18 -04:00
|
|
|
|
|
|
|
|
|
def test_16_rename(self):
|
|
|
|
|
# Test renaming. Force file size smaller so we get more files
|
|
|
|
|
server_stop()
|
|
|
|
|
recursive_unlink(testdb)
|
|
|
|
|
server_start(bulkdata_args = { "file_size" : 920, # 23 rows per file
|
|
|
|
|
"files_per_dir" : 3 })
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Fill data
|
|
|
|
|
self.ok("create /newton/prep float32_8")
|
|
|
|
|
os.environ['TZ'] = "UTC"
|
|
|
|
|
with open("tests/data/prep-20120323T1004-timestamped") as input:
|
|
|
|
|
self.ok("insert -s 20120323T1004 -e 20120323T1006 /newton/prep",
|
|
|
|
|
input)
|
|
|
|
|
|
|
|
|
|
# Extract it
|
|
|
|
|
self.ok("extract /newton/prep --start '2000-01-01' " +
|
|
|
|
|
"--end '2012-03-23 10:04:01'")
|
|
|
|
|
extract_before = self.captured
|
|
|
|
|
|
|
|
|
|
def check_path(*components):
|
|
|
|
|
# Verify the paths look right on disk
|
|
|
|
|
seek = os.path.join(testdb, "data", *components)
|
|
|
|
|
for (dirpath, dirnames, filenames) in os.walk(testdb):
|
|
|
|
|
if "_format" in filenames:
|
|
|
|
|
if dirpath == seek:
|
|
|
|
|
break
|
|
|
|
|
raise AssertionError("data also found at " + dirpath)
|
|
|
|
|
else:
|
|
|
|
|
raise AssertionError("data not found at " + seek)
|
|
|
|
|
# Verify "list" output
|
2013-04-10 14:58:44 -04:00
|
|
|
|
self.ok("list -l")
|
2013-03-13 19:21:18 -04:00
|
|
|
|
self.match("/" + "/".join(components) + " float32_8\n")
|
|
|
|
|
|
|
|
|
|
# Lots of renames
|
|
|
|
|
check_path("newton", "prep")
|
|
|
|
|
|
|
|
|
|
self.fail("rename /newton/prep /newton/prep")
|
|
|
|
|
self.contain("old and new paths are the same")
|
|
|
|
|
check_path("newton", "prep")
|
|
|
|
|
self.fail("rename /newton/prep /newton")
|
2013-04-01 19:25:17 -04:00
|
|
|
|
self.contain("path must contain at least one folder")
|
2013-03-13 19:21:18 -04:00
|
|
|
|
self.fail("rename /newton/prep /newton/prep/")
|
|
|
|
|
self.contain("invalid path")
|
2013-04-01 19:25:17 -04:00
|
|
|
|
self.ok("rename /newton/prep /newton/foo/1")
|
|
|
|
|
check_path("newton", "foo", "1")
|
|
|
|
|
self.ok("rename /newton/foo/1 /newton/foo")
|
2013-03-13 19:21:18 -04:00
|
|
|
|
check_path("newton", "foo")
|
|
|
|
|
self.ok("rename /newton/foo /totally/different/thing")
|
|
|
|
|
check_path("totally", "different", "thing")
|
|
|
|
|
self.ok("rename /totally/different/thing /totally/something")
|
|
|
|
|
check_path("totally", "something")
|
|
|
|
|
self.ok("rename /totally/something /totally/something/cool")
|
|
|
|
|
check_path("totally", "something", "cool")
|
|
|
|
|
self.ok("rename /totally/something/cool /foo/bar")
|
|
|
|
|
check_path("foo", "bar")
|
|
|
|
|
self.ok("create /xxx/yyy/zzz float32_8")
|
|
|
|
|
self.fail("rename /foo/bar /xxx/yyy")
|
|
|
|
|
self.contain("subdirs of this path already exist")
|
|
|
|
|
self.fail("rename /foo/bar /xxx/yyy/zzz")
|
|
|
|
|
self.contain("stream already exists at this path")
|
|
|
|
|
self.fail("rename /foo/bar /xxx/yyy/zzz/www")
|
|
|
|
|
self.contain("path is subdir of existing node")
|
|
|
|
|
self.ok("rename /foo/bar /xxx/yyy/mmm")
|
2013-03-18 19:39:03 -04:00
|
|
|
|
self.ok("destroy -R /xxx/yyy/zzz")
|
2013-03-13 19:21:18 -04:00
|
|
|
|
check_path("xxx", "yyy", "mmm")
|
|
|
|
|
|
|
|
|
|
# Extract it at the final path
|
|
|
|
|
self.ok("extract /xxx/yyy/mmm --start '2000-01-01' " +
|
|
|
|
|
"--end '2012-03-23 10:04:01'")
|
|
|
|
|
eq_(self.captured, extract_before)
|
|
|
|
|
|
2013-03-18 19:39:03 -04:00
|
|
|
|
self.ok("destroy -R /xxx/yyy/mmm")
|
2013-03-13 19:21:18 -04:00
|
|
|
|
|
|
|
|
|
# Make sure temporary rename dirs weren't left around
|
|
|
|
|
for (dirpath, dirnames, filenames) in os.walk(testdb):
|
|
|
|
|
if "rename-" in dirpath:
|
|
|
|
|
raise AssertionError("temporary directories not cleaned up")
|
|
|
|
|
if "totally" in dirpath or "newton" in dirpath:
|
|
|
|
|
raise AssertionError("old directories not cleaned up")
|
|
|
|
|
|
|
|
|
|
server_stop()
|
|
|
|
|
server_start()
|