You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

847 lines
31 KiB

  1. # -*- coding: utf-8 -*-
  2. import nilmdb
  3. from nilmdb.utils.printf import *
  4. import nilmdb.cmdline
  5. from nilmdb.utils import datetime_tz
  6. import unittest
  7. from nose.tools import *
  8. from nose.tools import assert_raises
  9. import itertools
  10. import os
  11. import re
  12. import shutil
  13. import sys
  14. import threading
  15. import urllib2
  16. from urllib2 import urlopen, HTTPError
  17. import Queue
  18. import StringIO
  19. import shlex
  20. from testutil.helpers import *
  21. testdb = "tests/cmdline-testdb"
  22. def server_start(max_results = None, bulkdata_args = {}):
  23. global test_server, test_db
  24. # Start web app on a custom port
  25. test_db = nilmdb.NilmDB(testdb, sync = False,
  26. max_results = max_results,
  27. bulkdata_args = bulkdata_args)
  28. test_server = nilmdb.Server(test_db, host = "127.0.0.1",
  29. port = 12380, stoppable = False,
  30. fast_shutdown = True,
  31. force_traceback = False)
  32. test_server.start(blocking = False)
  33. def server_stop():
  34. global test_server, test_db
  35. # Close web app
  36. test_server.stop()
  37. test_db.close()
  38. def setup_module():
  39. global test_server, test_db
  40. # Clear out DB
  41. recursive_unlink(testdb)
  42. server_start()
  43. def teardown_module():
  44. server_stop()
  45. # Add an encoding property to StringIO so Python will convert Unicode
  46. # properly when writing or reading.
  47. class UTF8StringIO(StringIO.StringIO):
  48. encoding = 'utf-8'
  49. class TestCmdline(object):
  50. def run(self, arg_string, infile=None, outfile=None):
  51. """Run a cmdline client with the specified argument string,
  52. passing the given input. Returns a tuple with the output and
  53. exit code"""
  54. # printf("TZ=UTC ./nilmtool.py %s\n", arg_string)
  55. class stdio_wrapper:
  56. def __init__(self, stdin, stdout, stderr):
  57. self.io = (stdin, stdout, stderr)
  58. def __enter__(self):
  59. self.saved = ( sys.stdin, sys.stdout, sys.stderr )
  60. ( sys.stdin, sys.stdout, sys.stderr ) = self.io
  61. def __exit__(self, type, value, traceback):
  62. ( sys.stdin, sys.stdout, sys.stderr ) = self.saved
  63. # Empty input if none provided
  64. if infile is None:
  65. infile = UTF8StringIO("")
  66. # Capture stderr
  67. errfile = UTF8StringIO()
  68. if outfile is None:
  69. # If no output file, capture stdout with stderr
  70. outfile = errfile
  71. with stdio_wrapper(infile, outfile, errfile) as s:
  72. try:
  73. # shlex doesn't support Unicode very well. Encode the
  74. # string as UTF-8 explicitly before splitting.
  75. args = shlex.split(arg_string.encode('utf-8'))
  76. nilmdb.cmdline.Cmdline(args).run()
  77. sys.exit(0)
  78. except SystemExit as e:
  79. exitcode = e.code
  80. captured = outfile.getvalue()
  81. self.captured = captured
  82. self.exitcode = exitcode
  83. def ok(self, arg_string, infile = None):
  84. self.run(arg_string, infile)
  85. if self.exitcode != 0:
  86. self.dump()
  87. eq_(self.exitcode, 0)
  88. def fail(self, arg_string, infile = None,
  89. exitcode = None, require_error = True):
  90. self.run(arg_string, infile)
  91. if exitcode is not None and self.exitcode != exitcode:
  92. # Wrong exit code
  93. self.dump()
  94. eq_(self.exitcode, exitcode)
  95. if self.exitcode == 0:
  96. # Success, when we wanted failure
  97. self.dump()
  98. ne_(self.exitcode, 0)
  99. # Make sure the output contains the word "error" at the
  100. # beginning of a line, but only if an exitcode wasn't
  101. # specified.
  102. if require_error and not re.search("^error",
  103. self.captured, re.MULTILINE):
  104. raise AssertionError("command failed, but output doesn't "
  105. "contain the string 'error'")
  106. def contain(self, checkstring):
  107. in_(checkstring, self.captured)
  108. def match(self, checkstring):
  109. eq_(checkstring, self.captured)
  110. def matchfile(self, file):
  111. # Captured data should match file contents exactly
  112. with open(file) as f:
  113. contents = f.read()
  114. if contents != self.captured:
  115. print contents[1:1000] + "\n"
  116. print self.captured[1:1000] + "\n"
  117. raise AssertionError("captured data doesn't match " + file)
  118. def matchfilecount(self, file):
  119. # Last line of captured data should match the number of
  120. # non-commented lines in file
  121. count = 0
  122. with open(file) as f:
  123. for line in f:
  124. if line[0] != '#':
  125. count += 1
  126. eq_(self.captured.splitlines()[-1], sprintf("%d", count))
  127. def dump(self):
  128. printf("-----dump start-----\n%s-----dump end-----\n", self.captured)
  129. def test_01_basic(self):
  130. # help
  131. self.ok("--help")
  132. self.contain("usage:")
  133. # fail for no args
  134. self.fail("")
  135. # fail for no such option
  136. self.fail("--nosuchoption")
  137. # fail for bad command
  138. self.fail("badcommand")
  139. # try some URL constructions
  140. self.fail("--url http://nosuchurl/ info")
  141. self.contain("Couldn't resolve host 'nosuchurl'")
  142. self.fail("--url nosuchurl info")
  143. self.contain("Couldn't resolve host 'nosuchurl'")
  144. self.fail("-u nosuchurl/foo info")
  145. self.contain("Couldn't resolve host 'nosuchurl'")
  146. self.fail("-u localhost:0 info")
  147. self.contain("couldn't connect to host")
  148. self.ok("-u localhost:12380 info")
  149. self.ok("info")
  150. # Duplicated arguments should fail, but this isn't implemented
  151. # due to it being kind of a pain with argparse.
  152. if 0:
  153. self.fail("-u url1 -u url2 info")
  154. self.contain("duplicated argument")
  155. self.fail("list --detail --detail")
  156. self.contain("duplicated argument")
  157. self.fail("list --detail --path path1 --path path2")
  158. self.contain("duplicated argument")
  159. self.fail("extract --start 2000-01-01 --start 2001-01-02")
  160. self.contain("duplicated argument")
  161. def test_02_info(self):
  162. self.ok("info")
  163. self.contain("Server URL: http://localhost:12380/")
  164. self.contain("Client version: " + nilmdb.__version__)
  165. self.contain("Server version: " + test_server.version)
  166. self.contain("Server database path")
  167. self.contain("Server database size")
  168. self.contain("Server database free space")
  169. def test_03_createlist(self):
  170. # Basic stream tests, like those in test_client.
  171. # No streams
  172. self.ok("list")
  173. self.match("")
  174. # Bad paths
  175. self.fail("create foo/bar/baz PrepData")
  176. self.contain("paths must start with /")
  177. self.fail("create /foo PrepData")
  178. self.contain("invalid path")
  179. # Bad layout type
  180. self.fail("create /newton/prep NoSuchLayout")
  181. self.contain("no such layout")
  182. self.fail("create /newton/prep float32_0")
  183. self.contain("no such layout")
  184. self.fail("create /newton/prep float33_1")
  185. self.contain("no such layout")
  186. # Create a few streams
  187. self.ok("create /newton/zzz/rawnotch RawNotchedData")
  188. self.ok("create /newton/prep PrepData")
  189. self.ok("create /newton/raw RawData")
  190. # Should not be able to create a stream with another stream as
  191. # its parent
  192. self.fail("create /newton/prep/blah PrepData")
  193. self.contain("path is subdir of existing node")
  194. # Should not be able to create a stream at a location that
  195. # has other nodes as children
  196. self.fail("create /newton/zzz PrepData")
  197. self.contain("subdirs of this path already exist")
  198. # Verify we got those 3 streams and they're returned in
  199. # alphabetical order.
  200. self.ok("list")
  201. self.match("/newton/prep PrepData\n"
  202. "/newton/raw RawData\n"
  203. "/newton/zzz/rawnotch RawNotchedData\n")
  204. # Match just one type or one path. Also check
  205. # that --path is optional
  206. self.ok("list --path /newton/raw")
  207. self.match("/newton/raw RawData\n")
  208. self.ok("list /newton/raw")
  209. self.match("/newton/raw RawData\n")
  210. self.fail("list -p /newton/raw /newton/raw")
  211. self.contain("too many paths")
  212. self.ok("list --layout RawData")
  213. self.match("/newton/raw RawData\n")
  214. # Wildcard matches
  215. self.ok("list --layout Raw*")
  216. self.match("/newton/raw RawData\n"
  217. "/newton/zzz/rawnotch RawNotchedData\n")
  218. self.ok("list --path *zzz* --layout Raw*")
  219. self.match("/newton/zzz/rawnotch RawNotchedData\n")
  220. self.ok("list *zzz* --layout Raw*")
  221. self.match("/newton/zzz/rawnotch RawNotchedData\n")
  222. self.ok("list --path *zzz* --layout Prep*")
  223. self.match("")
  224. # reversed range
  225. self.fail("list /newton/prep --start 2020-01-01 --end 2000-01-01")
  226. self.contain("start must precede end")
  227. def test_04_metadata(self):
  228. # Set / get metadata
  229. self.fail("metadata")
  230. self.fail("metadata --get")
  231. self.ok("metadata /newton/prep")
  232. self.match("")
  233. self.ok("metadata /newton/raw --get")
  234. self.match("")
  235. self.ok("metadata /newton/prep --set "
  236. "'description=The Data' "
  237. "v_scale=1.234")
  238. self.ok("metadata /newton/raw --update "
  239. "'description=The Data'")
  240. self.ok("metadata /newton/raw --update "
  241. "v_scale=1.234")
  242. # various parsing tests
  243. self.ok("metadata /newton/raw --update foo=")
  244. self.fail("metadata /newton/raw --update =bar")
  245. self.fail("metadata /newton/raw --update foo==bar")
  246. self.fail("metadata /newton/raw --update foo;bar")
  247. # errors
  248. self.fail("metadata /newton/nosuchstream foo=bar")
  249. self.contain("unrecognized arguments")
  250. self.fail("metadata /newton/nosuchstream")
  251. self.contain("No stream at path")
  252. self.fail("metadata /newton/nosuchstream --set foo=bar")
  253. self.contain("No stream at path")
  254. self.ok("metadata /newton/prep")
  255. self.match("description=The Data\nv_scale=1.234\n")
  256. self.ok("metadata /newton/prep --get")
  257. self.match("description=The Data\nv_scale=1.234\n")
  258. self.ok("metadata /newton/prep --get descr")
  259. self.match("descr=\n")
  260. self.ok("metadata /newton/prep --get description")
  261. self.match("description=The Data\n")
  262. self.ok("metadata /newton/prep --get description v_scale")
  263. self.match("description=The Data\nv_scale=1.234\n")
  264. self.ok("metadata /newton/prep --set "
  265. "'description=The Data'")
  266. self.ok("metadata /newton/prep --get")
  267. self.match("description=The Data\n")
  268. self.fail("metadata /newton/nosuchpath")
  269. self.contain("No stream at path /newton/nosuchpath")
  270. def test_05_parsetime(self):
  271. os.environ['TZ'] = "America/New_York"
  272. cmd = nilmdb.cmdline.Cmdline(None)
  273. test = datetime_tz.datetime_tz.now()
  274. eq_(cmd.parse_time(str(test)), test)
  275. test = datetime_tz.datetime_tz.smartparse("20120405 1400-0400")
  276. eq_(cmd.parse_time("hi there 20120405 1400-0400 testing! 123"), test)
  277. eq_(cmd.parse_time("20120405 1800 UTC"), test)
  278. eq_(cmd.parse_time("20120405 1400-0400 UTC"), test)
  279. for badtime in [ "20120405 1400-9999", "hello", "-", "", "4:00" ]:
  280. with assert_raises(ValueError):
  281. x = cmd.parse_time(badtime)
  282. x = cmd.parse_time("now")
  283. eq_(cmd.parse_time("snapshot-20120405-140000.raw.gz"), test)
  284. eq_(cmd.parse_time("prep-20120405T1400"), test)
  285. def test_06_insert(self):
  286. self.ok("insert --help")
  287. self.fail("insert /foo/bar baz qwer")
  288. self.contain("error getting stream info")
  289. self.fail("insert /newton/prep baz qwer")
  290. self.match("error opening input file baz\n")
  291. self.fail("insert /newton/prep")
  292. self.contain("error extracting time")
  293. self.fail("insert --start 19801205 /newton/prep 1 2 3 4")
  294. self.contain("--start can only be used with one input file")
  295. self.fail("insert /newton/prep "
  296. "tests/data/prep-20120323T1000")
  297. # insert pre-timestamped data, from stdin
  298. os.environ['TZ'] = "UTC"
  299. with open("tests/data/prep-20120323T1004-timestamped") as input:
  300. self.ok("insert --none /newton/prep", input)
  301. # insert pre-timestamped data, with bad times (non-monotonic)
  302. os.environ['TZ'] = "UTC"
  303. with open("tests/data/prep-20120323T1004-badtimes") as input:
  304. self.fail("insert --none /newton/prep", input)
  305. self.contain("error parsing input data")
  306. self.contain("line 7:")
  307. self.contain("timestamp is not monotonically increasing")
  308. # insert data with normal timestamper from filename
  309. os.environ['TZ'] = "UTC"
  310. self.ok("insert --rate 120 /newton/prep "
  311. "tests/data/prep-20120323T1000 "
  312. "tests/data/prep-20120323T1002")
  313. # overlap
  314. os.environ['TZ'] = "UTC"
  315. self.fail("insert --rate 120 /newton/prep "
  316. "tests/data/prep-20120323T1004")
  317. self.contain("overlap")
  318. # Just to help test more situations -- stop and restart
  319. # the server now. This tests nilmdb's interval caching,
  320. # at the very least.
  321. server_stop()
  322. server_start()
  323. # still an overlap if we specify a different start
  324. os.environ['TZ'] = "America/New_York"
  325. self.fail("insert --rate 120 --start '03/23/2012 06:05:00' /newton/prep"
  326. " tests/data/prep-20120323T1004")
  327. self.contain("overlap")
  328. # wrong format
  329. os.environ['TZ'] = "UTC"
  330. self.fail("insert --rate 120 /newton/raw "
  331. "tests/data/prep-20120323T1004")
  332. self.contain("error parsing input data")
  333. # empty data does nothing
  334. self.ok("insert --rate 120 --start '03/23/2012 06:05:00' /newton/prep "
  335. "/dev/null")
  336. # bad start time
  337. self.fail("insert --rate 120 --start 'whatever' /newton/prep /dev/null")
  338. def test_07_detail(self):
  339. # Just count the number of lines, it's probably fine
  340. self.ok("list --detail")
  341. lines_(self.captured, 8)
  342. self.ok("list --detail --path *prep")
  343. lines_(self.captured, 4)
  344. self.ok("list --detail --path *prep --start='23 Mar 2012 10:02'")
  345. lines_(self.captured, 3)
  346. self.ok("list --detail --path *prep --start='23 Mar 2012 10:05'")
  347. lines_(self.captured, 2)
  348. self.ok("list --detail --path *prep --start='23 Mar 2012 10:05:15'")
  349. lines_(self.captured, 2)
  350. self.contain("10:05:15.000")
  351. self.ok("list --detail --path *prep --start='23 Mar 2012 10:05:15.50'")
  352. lines_(self.captured, 2)
  353. self.contain("10:05:15.500")
  354. self.ok("list --detail --path *prep --start='23 Mar 2012 19:05:15.50'")
  355. lines_(self.captured, 2)
  356. self.contain("no intervals")
  357. self.ok("list --detail --path *prep --start='23 Mar 2012 10:05:15.50'"
  358. + " --end='23 Mar 2012 10:05:15.51'")
  359. lines_(self.captured, 2)
  360. self.contain("10:05:15.500")
  361. self.ok("list --detail")
  362. lines_(self.captured, 8)
  363. # Verify the "raw timestamp" output
  364. self.ok("list --detail --path *prep --timestamp-raw "
  365. "--start='23 Mar 2012 10:05:15.50'")
  366. lines_(self.captured, 2)
  367. self.contain("[ 1332497115.5 -> 1332497159.991668 ]")
  368. self.ok("list --detail --path *prep -T "
  369. "--start='23 Mar 2012 10:05:15.612'")
  370. lines_(self.captured, 2)
  371. self.contain("[ 1332497115.612 -> 1332497159.991668 ]")
  372. def test_08_extract(self):
  373. # nonexistent stream
  374. self.fail("extract /no/such/foo --start 2000-01-01 --end 2020-01-01")
  375. self.contain("error getting stream info")
  376. # reversed range
  377. self.fail("extract -a /newton/prep --start 2020-01-01 --end 2000-01-01")
  378. self.contain("start is after end")
  379. # empty ranges return error 2
  380. self.fail("extract -a /newton/prep " +
  381. "--start '23 Mar 2012 20:00:30' " +
  382. "--end '23 Mar 2012 20:00:31'",
  383. exitcode = 2, require_error = False)
  384. self.contain("no data")
  385. self.fail("extract -a /newton/prep " +
  386. "--start '23 Mar 2012 20:00:30.000001' " +
  387. "--end '23 Mar 2012 20:00:30.000002'",
  388. exitcode = 2, require_error = False)
  389. self.contain("no data")
  390. self.fail("extract -a /newton/prep " +
  391. "--start '23 Mar 2022 10:00:30' " +
  392. "--end '23 Mar 2022 10:00:31'",
  393. exitcode = 2, require_error = False)
  394. self.contain("no data")
  395. # but are ok if we're just counting results
  396. self.ok("extract --count /newton/prep " +
  397. "--start '23 Mar 2012 20:00:30' " +
  398. "--end '23 Mar 2012 20:00:31'")
  399. self.match("0\n")
  400. self.ok("extract -c /newton/prep " +
  401. "--start '23 Mar 2012 20:00:30.000001' " +
  402. "--end '23 Mar 2012 20:00:30.000002'")
  403. self.match("0\n")
  404. # Check various dumps against stored copies of how they should appear
  405. def test(file, start, end, extra=""):
  406. self.ok("extract " + extra + " /newton/prep " +
  407. "--start '23 Mar 2012 " + start + "' " +
  408. "--end '23 Mar 2012 " + end + "'")
  409. self.matchfile("tests/data/extract-" + str(file))
  410. self.ok("extract --count " + extra + " /newton/prep " +
  411. "--start '23 Mar 2012 " + start + "' " +
  412. "--end '23 Mar 2012 " + end + "'")
  413. self.matchfilecount("tests/data/extract-" + str(file))
  414. test(1, "10:00:30", "10:00:31", extra="-a")
  415. test(1, "10:00:30.000000", "10:00:31", extra="-a")
  416. test(2, "10:00:30.000001", "10:00:31")
  417. test(2, "10:00:30.008333", "10:00:31")
  418. test(3, "10:00:30.008333", "10:00:30.008334")
  419. test(3, "10:00:30.008333", "10:00:30.016667")
  420. test(4, "10:00:30.008333", "10:00:30.025")
  421. test(5, "10:00:30", "10:00:31", extra="--annotate --bare")
  422. test(6, "10:00:30", "10:00:31", extra="-b")
  423. test(7, "10:00:30", "10:00:30.999", extra="-a -T")
  424. test(7, "10:00:30", "10:00:30.999", extra="-a --timestamp-raw")
  425. # all data put in by tests
  426. self.ok("extract -a /newton/prep --start 2000-01-01 --end 2020-01-01")
  427. lines_(self.captured, 43204)
  428. self.ok("extract -c /newton/prep --start 2000-01-01 --end 2020-01-01")
  429. self.match("43200\n")
  430. def test_09_truncated(self):
  431. # Test truncated responses by overriding the nilmdb max_results
  432. server_stop()
  433. server_start(max_results = 2)
  434. self.ok("list --detail")
  435. lines_(self.captured, 8)
  436. server_stop()
  437. server_start()
  438. def test_10_remove(self):
  439. # Removing data
  440. # Try nonexistent stream
  441. self.fail("remove /no/such/foo --start 2000-01-01 --end 2020-01-01")
  442. self.contain("No stream at path")
  443. # empty or backward ranges return errors
  444. self.fail("remove /newton/prep --start 2020-01-01 --end 2000-01-01")
  445. self.contain("start must precede end")
  446. self.fail("remove /newton/prep " +
  447. "--start '23 Mar 2012 10:00:30' " +
  448. "--end '23 Mar 2012 10:00:30'")
  449. self.contain("start must precede end")
  450. self.fail("remove /newton/prep " +
  451. "--start '23 Mar 2012 10:00:30.000001' " +
  452. "--end '23 Mar 2012 10:00:30.000001'")
  453. self.contain("start must precede end")
  454. self.fail("remove /newton/prep " +
  455. "--start '23 Mar 2022 10:00:30' " +
  456. "--end '23 Mar 2022 10:00:30'")
  457. self.contain("start must precede end")
  458. # Verbose
  459. self.ok("remove -c /newton/prep " +
  460. "--start '23 Mar 2022 20:00:30' " +
  461. "--end '23 Mar 2022 20:00:31'")
  462. self.match("0\n")
  463. self.ok("remove --count /newton/prep " +
  464. "--start '23 Mar 2022 20:00:30' " +
  465. "--end '23 Mar 2022 20:00:31'")
  466. self.match("0\n")
  467. # Make sure we have the data we expect
  468. self.ok("list --detail /newton/prep")
  469. self.match("/newton/prep PrepData\n" +
  470. " [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
  471. " -> Fri, 23 Mar 2012 10:01:59.991668 +0000 ]\n"
  472. " [ Fri, 23 Mar 2012 10:02:00.000000 +0000"
  473. " -> Fri, 23 Mar 2012 10:03:59.991668 +0000 ]\n"
  474. " [ Fri, 23 Mar 2012 10:04:00.000000 +0000"
  475. " -> Fri, 23 Mar 2012 10:05:59.991668 +0000 ]\n")
  476. # Remove various chunks of prep data and make sure
  477. # they're gone.
  478. self.ok("remove -c /newton/prep " +
  479. "--start '23 Mar 2012 10:00:30' " +
  480. "--end '23 Mar 2012 10:00:40'")
  481. self.match("1200\n")
  482. self.ok("remove -c /newton/prep " +
  483. "--start '23 Mar 2012 10:00:10' " +
  484. "--end '23 Mar 2012 10:00:20'")
  485. self.match("1200\n")
  486. self.ok("remove -c /newton/prep " +
  487. "--start '23 Mar 2012 10:00:05' " +
  488. "--end '23 Mar 2012 10:00:25'")
  489. self.match("1200\n")
  490. self.ok("remove -c /newton/prep " +
  491. "--start '23 Mar 2012 10:03:50' " +
  492. "--end '23 Mar 2012 10:06:50'")
  493. self.match("15600\n")
  494. self.ok("extract -c /newton/prep --start 2000-01-01 --end 2020-01-01")
  495. self.match("24000\n")
  496. # See the missing chunks in list output
  497. self.ok("list --detail /newton/prep")
  498. self.match("/newton/prep PrepData\n" +
  499. " [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
  500. " -> Fri, 23 Mar 2012 10:00:05.000000 +0000 ]\n"
  501. " [ Fri, 23 Mar 2012 10:00:25.000000 +0000"
  502. " -> Fri, 23 Mar 2012 10:00:30.000000 +0000 ]\n"
  503. " [ Fri, 23 Mar 2012 10:00:40.000000 +0000"
  504. " -> Fri, 23 Mar 2012 10:01:59.991668 +0000 ]\n"
  505. " [ Fri, 23 Mar 2012 10:02:00.000000 +0000"
  506. " -> Fri, 23 Mar 2012 10:03:50.000000 +0000 ]\n")
  507. # Remove all data, verify it's missing
  508. self.ok("remove /newton/prep --start 2000-01-01 --end 2020-01-01")
  509. self.match("") # no count requested this time
  510. self.ok("list --detail /newton/prep")
  511. self.match("/newton/prep PrepData\n" +
  512. " (no intervals)\n")
  513. # Reinsert some data, to verify that no overlaps with deleted
  514. # data are reported
  515. os.environ['TZ'] = "UTC"
  516. self.ok("insert --rate 120 /newton/prep "
  517. "tests/data/prep-20120323T1000 "
  518. "tests/data/prep-20120323T1002")
  519. def test_11_destroy(self):
  520. # Delete records
  521. self.ok("destroy --help")
  522. self.fail("destroy")
  523. self.contain("too few arguments")
  524. self.fail("destroy /no/such/stream")
  525. self.contain("No stream at path")
  526. self.fail("destroy asdfasdf")
  527. self.contain("No stream at path")
  528. # From previous tests, we have:
  529. self.ok("list")
  530. self.match("/newton/prep PrepData\n"
  531. "/newton/raw RawData\n"
  532. "/newton/zzz/rawnotch RawNotchedData\n")
  533. # Notice how they're not empty
  534. self.ok("list --detail")
  535. lines_(self.captured, 7)
  536. # Delete some
  537. self.ok("destroy /newton/prep")
  538. self.ok("list")
  539. self.match("/newton/raw RawData\n"
  540. "/newton/zzz/rawnotch RawNotchedData\n")
  541. self.ok("destroy /newton/zzz/rawnotch")
  542. self.ok("list")
  543. self.match("/newton/raw RawData\n")
  544. self.ok("destroy /newton/raw")
  545. self.ok("create /newton/raw RawData")
  546. self.ok("destroy /newton/raw")
  547. self.ok("list")
  548. self.match("")
  549. # Re-create a previously deleted location, and some new ones
  550. rebuild = [ "/newton/prep", "/newton/zzz",
  551. "/newton/raw", "/newton/asdf/qwer" ]
  552. for path in rebuild:
  553. # Create the path
  554. self.ok("create " + path + " PrepData")
  555. self.ok("list")
  556. self.contain(path)
  557. # Make sure it was created empty
  558. self.ok("list --detail --path " + path)
  559. self.contain("(no intervals)")
  560. def test_12_unicode(self):
  561. # Unicode paths.
  562. self.ok("destroy /newton/asdf/qwer")
  563. self.ok("destroy /newton/prep")
  564. self.ok("destroy /newton/raw")
  565. self.ok("destroy /newton/zzz")
  566. self.ok(u"create /düsseldorf/raw uint16_6")
  567. self.ok("list --detail")
  568. self.contain(u"/düsseldorf/raw uint16_6")
  569. self.contain("(no intervals)")
  570. # Unicode metadata
  571. self.ok(u"metadata /düsseldorf/raw --set α=beta 'γ=δ'")
  572. self.ok(u"metadata /düsseldorf/raw --update 'α=β ε τ α'")
  573. self.ok(u"metadata /düsseldorf/raw")
  574. self.match(u"α=β ε τ α\nγ=δ\n")
  575. self.ok(u"destroy /düsseldorf/raw")
  576. def test_13_files(self):
  577. # Test BulkData's ability to split into multiple files,
  578. # by forcing the file size to be really small.
  579. server_stop()
  580. server_start(bulkdata_args = { "file_size" : 920, # 23 rows per file
  581. "files_per_dir" : 3 })
  582. # Fill data
  583. self.ok("create /newton/prep float32_8")
  584. os.environ['TZ'] = "UTC"
  585. with open("tests/data/prep-20120323T1004-timestamped") as input:
  586. self.ok("insert --none /newton/prep", input)
  587. # Extract it
  588. self.ok("extract /newton/prep --start '2000-01-01' " +
  589. "--end '2012-03-23 10:04:01'")
  590. lines_(self.captured, 120)
  591. self.ok("extract /newton/prep --start '2000-01-01' " +
  592. "--end '2022-03-23 10:04:01'")
  593. lines_(self.captured, 14400)
  594. # Make sure there were lots of files generated in the database
  595. # dir
  596. nfiles = 0
  597. for (dirpath, dirnames, filenames) in os.walk(testdb):
  598. nfiles += len(filenames)
  599. assert(nfiles > 500)
  600. # Make sure we can restart the server with a different file
  601. # size and have it still work
  602. server_stop()
  603. server_start()
  604. self.ok("extract /newton/prep --start '2000-01-01' " +
  605. "--end '2022-03-23 10:04:01'")
  606. lines_(self.captured, 14400)
  607. # Now recreate the data one more time and make sure there are
  608. # fewer files.
  609. self.ok("destroy /newton/prep")
  610. self.fail("destroy /newton/prep") # already destroyed
  611. self.ok("create /newton/prep float32_8")
  612. os.environ['TZ'] = "UTC"
  613. with open("tests/data/prep-20120323T1004-timestamped") as input:
  614. self.ok("insert --none /newton/prep", input)
  615. nfiles = 0
  616. for (dirpath, dirnames, filenames) in os.walk(testdb):
  617. nfiles += len(filenames)
  618. lt_(nfiles, 50)
  619. self.ok("destroy /newton/prep") # destroy again
  620. def test_14_remove_files(self):
  621. # Test BulkData's ability to remove when data is split into
  622. # multiple files. Should be a fairly comprehensive test of
  623. # remove functionality.
  624. server_stop()
  625. server_start(bulkdata_args = { "file_size" : 920, # 23 rows per file
  626. "files_per_dir" : 3 })
  627. # Insert data. Just for fun, insert out of order
  628. self.ok("create /newton/prep PrepData")
  629. os.environ['TZ'] = "UTC"
  630. self.ok("insert --rate 120 /newton/prep "
  631. "tests/data/prep-20120323T1002 "
  632. "tests/data/prep-20120323T1000")
  633. # Should take up about 2.8 MB here (including directory entries)
  634. du_before = nilmdb.utils.diskusage.du(testdb)
  635. # Make sure we have the data we expect
  636. self.ok("list --detail")
  637. self.match("/newton/prep PrepData\n" +
  638. " [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
  639. " -> Fri, 23 Mar 2012 10:01:59.991668 +0000 ]\n"
  640. " [ Fri, 23 Mar 2012 10:02:00.000000 +0000"
  641. " -> Fri, 23 Mar 2012 10:03:59.991668 +0000 ]\n")
  642. # Remove various chunks of prep data and make sure
  643. # they're gone.
  644. self.ok("extract -c /newton/prep --start 2000-01-01 --end 2020-01-01")
  645. self.match("28800\n")
  646. self.ok("remove -c /newton/prep " +
  647. "--start '23 Mar 2012 10:00:30' " +
  648. "--end '23 Mar 2012 10:03:30'")
  649. self.match("21600\n")
  650. self.ok("remove -c /newton/prep " +
  651. "--start '23 Mar 2012 10:00:10' " +
  652. "--end '23 Mar 2012 10:00:20'")
  653. self.match("1200\n")
  654. self.ok("remove -c /newton/prep " +
  655. "--start '23 Mar 2012 10:00:05' " +
  656. "--end '23 Mar 2012 10:00:25'")
  657. self.match("1200\n")
  658. self.ok("remove -c /newton/prep " +
  659. "--start '23 Mar 2012 10:03:50' " +
  660. "--end '23 Mar 2012 10:06:50'")
  661. self.match("1200\n")
  662. self.ok("extract -c /newton/prep --start 2000-01-01 --end 2020-01-01")
  663. self.match("3600\n")
  664. # See the missing chunks in list output
  665. self.ok("list --detail")
  666. self.match("/newton/prep PrepData\n" +
  667. " [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
  668. " -> Fri, 23 Mar 2012 10:00:05.000000 +0000 ]\n"
  669. " [ Fri, 23 Mar 2012 10:00:25.000000 +0000"
  670. " -> Fri, 23 Mar 2012 10:00:30.000000 +0000 ]\n"
  671. " [ Fri, 23 Mar 2012 10:03:30.000000 +0000"
  672. " -> Fri, 23 Mar 2012 10:03:50.000000 +0000 ]\n")
  673. # We have 1/8 of the data that we had before, so the file size
  674. # should have dropped below 1/4 of what it used to be
  675. du_after = nilmdb.utils.diskusage.du(testdb)
  676. lt_(du_after, (du_before / 4))
  677. # Remove anything that came from the 10:02 data file
  678. self.ok("remove /newton/prep " +
  679. "--start '23 Mar 2012 10:02:00' --end '2020-01-01'")
  680. # Re-insert 19 lines from that file, then remove them again.
  681. # With the specific file_size above, this will cause the last
  682. # file in the bulk data storage to be exactly file_size large,
  683. # so removing the data should also remove that last file.
  684. self.ok("insert --rate 120 /newton/prep " +
  685. "tests/data/prep-20120323T1002-first19lines")
  686. self.ok("remove /newton/prep " +
  687. "--start '23 Mar 2012 10:02:00' --end '2020-01-01'")
  688. # Shut down and restart server, to force nrows to get refreshed.
  689. server_stop()
  690. server_start()
  691. # Re-add the full 10:02 data file. This tests adding new data once
  692. # we removed data near the end.
  693. self.ok("insert --rate 120 /newton/prep tests/data/prep-20120323T1002")
  694. # See if we can extract it all
  695. self.ok("extract /newton/prep --start 2000-01-01 --end 2020-01-01")
  696. lines_(self.captured, 15600)