You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

835 lines
32 KiB

  1. # -*- coding: utf-8 -*-
  2. import nilmtools.copy_one
  3. import nilmtools.cleanup
  4. import nilmtools.copy_one
  5. import nilmtools.copy_wildcard
  6. import nilmtools.decimate_auto
  7. import nilmtools.decimate
  8. import nilmtools.insert
  9. import nilmtools.median
  10. import nilmtools.pipewatch
  11. import nilmtools.prep
  12. import nilmtools.sinefit
  13. import nilmtools.trainola
  14. from nilmdb.utils.interval import Interval
  15. from nose.tools import assert_raises
  16. import unittest
  17. import numpy
  18. import math
  19. import json
  20. import random
  21. from testutil.helpers import *
  22. import subprocess
  23. import traceback
  24. import os
  25. import atexit
  26. import signal
  27. from urllib.request import urlopen
  28. from nilmtools.filter import ArgumentError
  29. def run_cherrypy_server(path, port, event):
  30. db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(path)
  31. server = nilmdb.server.Server(db, host="127.0.0.1",
  32. port=port, stoppable=True)
  33. server.start(blocking = True, event = event)
  34. db.close()
  35. class CommandTester():
  36. url = "http://localhost:32182/"
  37. url2 = "http://localhost:32183/"
  38. @classmethod
  39. def setup_class(cls):
  40. # We need two servers running for "copy_multiple", but
  41. # cherrypy uses globals and can only run once per process.
  42. # Using multiprocessing with "spawn" method should work in
  43. # theory, but is hard to get working when the test suite is
  44. # spawned directly by nosetests (rather than ./run-tests.py).
  45. # Instead, just run the real nilmdb-server that got installed
  46. # along with our nilmdb dependency.
  47. def terminate_servers():
  48. for p in cls.servers:
  49. p.terminate()
  50. atexit.register(terminate_servers)
  51. cls.servers = []
  52. for (path, port) in (("tests/testdb1", 32182),
  53. ("tests/testdb2", 32183)):
  54. def listening():
  55. try:
  56. urlopen(f"http://127.0.0.1:{port}/", timeout=0.1)
  57. return True
  58. except Exception as e:
  59. return False
  60. if listening():
  61. raise Exception(f"another server already running on {port}")
  62. recursive_unlink(path)
  63. p = subprocess.Popen(["nilmdb-server",
  64. "--address", "127.0.0.1",
  65. "--database", path,
  66. "--port", str(port),
  67. "--quiet",
  68. "--traceback"],
  69. stdin=subprocess.DEVNULL,
  70. stdout=subprocess.DEVNULL)
  71. for i in range(50):
  72. if listening():
  73. break
  74. time.sleep(0.1)
  75. else:
  76. raise Exception(f"server didn't start on port {port}")
  77. @classmethod
  78. def teardown_class(cls):
  79. for p in cls.servers:
  80. p.terminate()
  81. def run(self, arg_string, infile=None, outfile=None):
  82. """Run a cmdline client with the specified argument string,
  83. passing the given input. Save the output and exit code."""
  84. os.environ['NILMDB_URL'] = self.url
  85. self.last_args = arg_string
  86. class stdio_wrapper:
  87. def __init__(self, stdin, stdout, stderr):
  88. self.io = (stdin, stdout, stderr)
  89. def __enter__(self):
  90. self.saved = ( sys.stdin, sys.stdout, sys.stderr )
  91. ( sys.stdin, sys.stdout, sys.stderr ) = self.io
  92. def __exit__(self, type, value, traceback):
  93. ( sys.stdin, sys.stdout, sys.stderr ) = self.saved
  94. # Empty input if none provided
  95. if infile is None:
  96. infile = io.TextIOWrapper(io.BytesIO(b""))
  97. # Capture stderr
  98. errfile = io.TextIOWrapper(io.BytesIO())
  99. if outfile is None:
  100. # If no output file, capture stdout with stderr
  101. outfile = errfile
  102. with stdio_wrapper(infile, outfile, errfile) as s:
  103. try:
  104. args = shlex.split(arg_string)
  105. sys.argv[0] = "test_runner"
  106. self.main(args)
  107. sys.exit(0)
  108. except SystemExit as e:
  109. exitcode = e.code
  110. except Exception as e:
  111. traceback.print_exc()
  112. exitcode = 1
  113. # Capture raw binary output, and also try to decode a Unicode
  114. # string copy.
  115. self.captured_binary = outfile.buffer.getvalue()
  116. try:
  117. outfile.seek(0)
  118. self.captured = outfile.read()
  119. except UnicodeDecodeError:
  120. self.captured = None
  121. self.exitcode = exitcode
  122. def ok(self, arg_string, infile = None):
  123. self.run(arg_string, infile)
  124. if self.exitcode != 0:
  125. self.dump()
  126. eq_(self.exitcode, 0)
  127. def fail(self, arg_string, infile=None, exitcode=None):
  128. self.run(arg_string, infile)
  129. if exitcode is not None and self.exitcode != exitcode:
  130. # Wrong exit code
  131. self.dump()
  132. eq_(self.exitcode, exitcode)
  133. if self.exitcode == 0:
  134. # Success, when we wanted failure
  135. self.dump()
  136. ne_(self.exitcode, 0)
  137. def contain(self, checkstring, contain=True):
  138. if contain:
  139. in_(checkstring, self.captured)
  140. else:
  141. nin_(checkstring, self.captured)
  142. def match(self, checkstring):
  143. eq_(checkstring, self.captured)
  144. def matchfile(self, file):
  145. # Captured data should match file contents exactly
  146. with open(file) as f:
  147. contents = f.read()
  148. if contents != self.captured:
  149. print("--- reference file (first 1000 bytes):\n")
  150. print(contents[0:1000] + "\n")
  151. print("--- captured data (first 1000 bytes):\n")
  152. print(self.captured[0:1000] + "\n")
  153. zipped = itertools.zip_longest(contents, self.captured)
  154. for (n, (a, b)) in enumerate(zipped):
  155. if a != b:
  156. print("--- first difference is at offset", n)
  157. print("--- reference:", repr(a))
  158. print("--- captured:", repr(b))
  159. break
  160. raise AssertionError("captured data doesn't match " + file)
  161. def matchfilecount(self, file):
  162. # Last line of captured data should match the number of
  163. # non-commented lines in file
  164. count = 0
  165. with open(file) as f:
  166. for line in f:
  167. if line[0] != '#':
  168. count += 1
  169. eq_(self.captured.splitlines()[-1], sprintf("%d", count))
  170. def dump(self):
  171. printf("\n===args start===\n%s\n===args end===\n", self.last_args)
  172. printf("===dump start===\n%s===dump end===\n", self.captured)
  173. class TestAllCommands(CommandTester):
  174. def test_00_load_data(self):
  175. client = nilmdb.client.Client(url=self.url)
  176. client.stream_create("/newton/prep", "float32_8")
  177. client.stream_set_metadata("/newton/prep",
  178. { "description": "newton" })
  179. for ts in ("20120323T1000", "20120323T1002", "20120323T1004"):
  180. start = nilmdb.utils.time.parse_time(ts)
  181. fn = f"tests/data/prep-{ts}"
  182. data = nilmdb.utils.timestamper.TimestamperRate(fn, start, 120)
  183. client.stream_insert("/newton/prep", data);
  184. def test_01_copy(self):
  185. self.main = nilmtools.copy_one.main
  186. client = nilmdb.client.Client(url=self.url)
  187. # basic arguments
  188. self.fail(f"")
  189. self.fail(f"no-such-src no-such-dest")
  190. self.contain("source path no-such-src not found")
  191. self.fail(f"-u {self.url} no-such-src no-such-dest")
  192. # nonexistent dest
  193. self.fail(f"/newton/prep /newton/prep-copy")
  194. self.contain("Destination /newton/prep-copy doesn't exist")
  195. # wrong type
  196. client.stream_create("/newton/prep-copy-wrongtype", "uint16_6")
  197. self.fail(f"/newton/prep /newton/prep-copy-wrongtype")
  198. self.contain("wrong number of fields")
  199. # copy with metadata, and compare
  200. client.stream_create("/newton/prep-copy", "float32_8")
  201. self.ok(f"/newton/prep /newton/prep-copy")
  202. a = list(client.stream_extract("/newton/prep"))
  203. b = list(client.stream_extract("/newton/prep-copy"))
  204. eq_(a, b)
  205. a = client.stream_get_metadata("/newton/prep")
  206. b = client.stream_get_metadata("/newton/prep-copy")
  207. eq_(a, b)
  208. # copy with no metadata
  209. client.stream_create("/newton/prep-copy-nometa", "float32_8")
  210. self.ok(f"--nometa /newton/prep /newton/prep-copy-nometa")
  211. a = list(client.stream_extract("/newton/prep"))
  212. b = list(client.stream_extract("/newton/prep-copy-nometa"))
  213. eq_(a, b)
  214. a = client.stream_get_metadata("/newton/prep")
  215. b = client.stream_get_metadata("/newton/prep-copy-nometa")
  216. ne_(a, b)
  217. def test_02_copy_wildcard(self):
  218. self.main = nilmtools.copy_wildcard.main
  219. client1 = nilmdb.client.Client(url=self.url)
  220. client2 = nilmdb.client.Client(url=self.url2)
  221. # basic arguments
  222. self.fail(f"")
  223. self.fail(f"/newton")
  224. self.fail(f"-u {self.url} -U {self.url} /newton")
  225. self.contain("URL must be different")
  226. # no matches; silent
  227. self.ok(f"-u {self.url} -U {self.url2} /newton")
  228. self.ok(f"-u {self.url} -U {self.url2} /asdf*")
  229. self.ok(f"-u {self.url2} -U {self.url} /newton*")
  230. eq_(client2.stream_list(), [])
  231. # this won't actually copy, but will still create streams
  232. self.ok(f"-u {self.url} -U {self.url2} --dry-run /newton*")
  233. self.contain("Creating destination stream /newton/prep-copy")
  234. eq_(len(list(client2.stream_extract("/newton/prep"))), 0)
  235. # this should copy a bunch
  236. self.ok(f"-u {self.url} -U {self.url2} /*")
  237. self.contain("Creating destination stream /newton/prep-copy", False)
  238. eq_(client1.stream_list(), client2.stream_list())
  239. eq_(list(client1.stream_extract("/newton/prep")),
  240. list(client2.stream_extract("/newton/prep")))
  241. eq_(client1.stream_get_metadata("/newton/prep"),
  242. client2.stream_get_metadata("/newton/prep"))
  243. # repeating it is OK; it just won't recreate streams.
  244. # Let's try with --nometa too
  245. client2.stream_remove("/newton/prep")
  246. client2.stream_destroy("/newton/prep")
  247. self.ok(f"-u {self.url} -U {self.url2} --nometa /newton*")
  248. self.contain("Creating destination stream /newton/prep-copy", False)
  249. self.contain("Creating destination stream /newton/prep", True)
  250. eq_(client1.stream_list(), client2.stream_list())
  251. eq_(list(client1.stream_extract("/newton/prep")),
  252. list(client2.stream_extract("/newton/prep")))
  253. eq_(client2.stream_get_metadata("/newton/prep"), {})
  254. # fill in test cases
  255. self.ok(f"-u {self.url} -U {self.url2} -s 2010 -e 2020 -F /newton*")
  256. def test_03_decimate(self):
  257. self.main = nilmtools.decimate.main
  258. client = nilmdb.client.Client(url=self.url)
  259. # basic arguments
  260. self.fail(f"")
  261. # no dest
  262. self.fail(f"/newton/prep /newton/prep-decimated-1")
  263. self.contain("doesn't exist")
  264. # wrong dest shape
  265. client.stream_create("/newton/prep-decimated-bad", "float32_8")
  266. self.fail(f"/newton/prep /newton/prep-decimated-bad")
  267. self.contain("wrong number of fields")
  268. # bad factor
  269. self.fail(f"/newton/prep -f 1 /newton/prep-decimated-bad")
  270. self.contain("needs to be 2 or more")
  271. # ok, default factor 4
  272. client.stream_create("/newton/prep-decimated-4", "float32_24")
  273. self.ok(f"/newton/prep /newton/prep-decimated-4")
  274. a = client.stream_count("/newton/prep")
  275. b = client.stream_count("/newton/prep-decimated-4")
  276. eq_(a // 4, b)
  277. # factor 10
  278. client.stream_create("/newton/prep-decimated-10", "float32_24")
  279. self.ok(f"/newton/prep -f 10 /newton/prep-decimated-10")
  280. self.contain("Processing")
  281. a = client.stream_count("/newton/prep")
  282. b = client.stream_count("/newton/prep-decimated-10")
  283. eq_(a // 10, b)
  284. # different factor, same target
  285. self.fail(f"/newton/prep -f 16 /newton/prep-decimated-10")
  286. self.contain("Metadata in destination stream")
  287. self.contain("decimate_factor = 10")
  288. self.contain("doesn't match desired data")
  289. self.contain("decimate_factor = 16")
  290. # unless we force it
  291. self.ok(f"/newton/prep -f 16 -F /newton/prep-decimated-10")
  292. a = client.stream_count("/newton/prep")
  293. b = client.stream_count("/newton/prep-decimated-10")
  294. # but all data was already converted, so no more
  295. eq_(a // 10, b)
  296. # if we try to decimate an already-decimated stream, the suggested
  297. # shape is different
  298. self.fail(f"/newton/prep-decimated-4 -f 4 /newton/prep-decimated-16")
  299. self.contain("create /newton/prep-decimated-16 float32_24")
  300. # decimate again
  301. client.stream_create("/newton/prep-decimated-16", "float32_24")
  302. self.ok(f"/newton/prep-decimated-4 -f 4 /newton/prep-decimated-16")
  303. self.contain("Processing")
  304. # check shape suggestion for different input types
  305. for (shape, expected) in (("int32_1", "float64_3"),
  306. ("uint32_1", "float64_3"),
  307. ("int64_1", "float64_3"),
  308. ("uint64_1", "float64_3"),
  309. ("float32_1", "float32_3"),
  310. ("float64_1", "float64_3")):
  311. client.stream_create(f"/test/{shape}", shape)
  312. self.fail(f"/test/{shape} /test/{shape}-decim")
  313. self.contain(f"create /test/{shape}-decim {expected}")
  314. def test_04_decimate_auto(self):
  315. self.main = nilmtools.decimate_auto.main
  316. client = nilmdb.client.Client(url=self.url)
  317. self.fail(f"")
  318. self.fail(f"--max -1 asdf")
  319. self.contain("bad max")
  320. self.fail(f"/no/such/stream")
  321. self.contain("no stream matched path")
  322. # normal run
  323. self.ok(f"/newton/prep")
  324. # can't auto decimate a decimated stream
  325. self.fail(f"/newton/prep-decimated-16")
  326. self.contain("need to pass the base stream instead")
  327. # decimate prep again, this time much more; also use -F
  328. self.ok(f"-m 10 --force-metadata /newton/pr??")
  329. self.contain("Level 4096 decimation has 9 rows")
  330. # decimate the different shapes
  331. self.ok(f"/test/*")
  332. self.contain("Level 1 decimation has 0 rows")
  333. def test_05_insert(self):
  334. self.main = nilmtools.insert.main
  335. client = nilmdb.client.Client(url=self.url)
  336. self.fail(f"")
  337. self.ok(f"--help")
  338. # mutually exclusive arguments
  339. self.fail(f"--delta --rate 123 /foo bar")
  340. self.fail(f"--live --filename /foo bar")
  341. # Insert from file
  342. client.stream_create("/insert/prep", "float32_8")
  343. t0 = "tests/data/prep-20120323T1000"
  344. t2 = "tests/data/prep-20120323T1002"
  345. t4 = "tests/data/prep-20120323T1004"
  346. self.ok(f"--file --dry-run --rate 120 /insert/prep {t0} {t2} {t4}")
  347. self.contain("Dry run")
  348. # wrong rate
  349. self.fail(f"--file --dry-run --rate 10 /insert/prep {t0} {t2} {t4}")
  350. self.contain("Data is coming in too fast")
  351. # skip forward in time
  352. self.ok(f"--file --dry-run --rate 120 /insert/prep {t0} {t4}")
  353. self.contain("data timestamp behind by 120")
  354. self.contain("Skipping data timestamp forward")
  355. # skip backwards in time
  356. self.fail(f"--file --dry-run --rate 120 /insert/prep {t0} {t2} {t0}")
  357. self.contain("data timestamp ahead by 240")
  358. # skip backwards in time is OK if --skip provided
  359. self.ok(f"--skip -f -D -r 120 insert/prep {t0} {t2} {t0} {t4}")
  360. self.contain("Skipping the remainder of this file")
  361. # Now insert for real
  362. self.ok(f"--skip --file --rate 120 /insert/prep {t0} {t2} {t4}")
  363. self.contain("Done")
  364. # Overlap
  365. self.fail(f"--skip --file --rate 120 /insert/prep {t0}")
  366. self.contain("new data overlaps existing data")
  367. # Not overlap if we change file offset
  368. self.ok(f"--skip --file --rate 120 -o 0 /insert/prep {t0}")
  369. # Data with no timestamp
  370. self.fail(f"-f -r 120 /insert/prep tests/data/prep-notime")
  371. self.contain("No idea what timestamp to use")
  372. # Check intervals so far
  373. eq_(list(client.stream_intervals("/insert/prep")),
  374. [[1332507600000000, 1332507959991668],
  375. [1332511200000000, 1332511319991668]])
  376. # Delta supplied by file
  377. self.ok(f"--file --delta -o 0 /insert/prep {t4}-delta")
  378. eq_(list(client.stream_intervals("/insert/prep")),
  379. [[1332507600000000, 1332507959991668],
  380. [1332511200000000, 1332511319991668],
  381. [1332511440000000, 1332511499000001]])
  382. # Now fake live timestamps by using the delta file, and a
  383. # fake clock that increments one second per call.
  384. def fake_time_now():
  385. nonlocal fake_time_base
  386. ret = fake_time_base
  387. fake_time_base += 1000000
  388. return ret
  389. real_time_now = nilmtools.insert.time_now
  390. nilmtools.insert.time_now = fake_time_now
  391. # Delta supplied by file. This data is too fast because delta
  392. # contains a 50 sec jump
  393. fake_time_base = 1332511560000000
  394. self.fail(f"--live --delta -o 0 /insert/prep {t4}-delta")
  395. self.contain("Data is coming in too fast")
  396. self.contain("data time is Fri, 23 Mar 2012 10:06:55")
  397. self.contain("clock time is only Fri, 23 Mar 2012 10:06:06")
  398. # This data is OK, no jump
  399. fake_time_base = 1332511560000000
  400. self.ok(f"--live --delta -o 0 /insert/prep {t4}-delta2")
  401. # This has unparseable delta
  402. fake_time_base = 1332511560000000
  403. self.fail(f"--live --delta -o 0 /insert/prep {t4}-delta3")
  404. self.contain("can't parse delta")
  405. # Insert some gzipped data, with no timestamp in name
  406. bp1 = "tests/data/bpnilm-raw-1.gz"
  407. bp2 = "tests/data/bpnilm-raw-2.gz"
  408. client.stream_create("/insert/raw", "uint16_6")
  409. self.ok(f"--file /insert/raw {bp1} {bp2}")
  410. # Try truncated data
  411. tr = "tests/data/trunc"
  412. self.ok(f"--file /insert/raw {tr}1 {tr}2 {tr}3 {tr}4")
  413. nilmtools.insert.time_now = real_time_now
  414. def generate_sine_data(self, client, path, data_sec, fs, freq):
  415. # generate raw data
  416. client.stream_create(path, "uint16_2")
  417. with client.stream_insert_context(path) as ctx:
  418. for n in range(fs * data_sec):
  419. t = n / fs
  420. v = math.sin(t * 2 * math.pi * freq)
  421. i = 0.3 * math.sin(3*t) + math.sin(t)
  422. line = b"%d %d %d\n" % (
  423. (t + 1234567890) * 1e6,
  424. v * 32767 + 32768,
  425. i * 32768 + 32768)
  426. ctx.insert(line)
  427. if 0:
  428. for (s, e) in client.stream_intervals(path):
  429. print(Interval(s,e).human_string())
  430. def test_06_sinefit(self):
  431. self.main = nilmtools.sinefit.main
  432. client = nilmdb.client.Client(url=self.url)
  433. self.fail(f"")
  434. self.ok(f"--help")
  435. self.generate_sine_data(client, "/sf/raw", 50, 8000, 60)
  436. client.stream_create("/sf/out-bad", "float32_4")
  437. self.fail(f"--column 1 /sf/raw /sf/out-bad")
  438. self.contain("wrong number of fields")
  439. self.fail(f"--column 1 /sf/raw /sf/out")
  440. self.contain("/sf/out doesn't exist")
  441. # basic run
  442. client.stream_create("/sf/out", "float32_3")
  443. self.ok(f"--column 1 /sf/raw /sf/out")
  444. eq_(client.stream_count("/sf/out"), 3000)
  445. # parameter errors
  446. self.fail(f"--column 0 /sf/raw /sf/out")
  447. self.contain("need a column number")
  448. self.fail(f"/sf/raw /sf/out")
  449. self.contain("need a column number")
  450. self.fail(f"-c 1 --frequency 0 /sf/raw /sf/out")
  451. self.contain("frequency must be")
  452. self.fail(f"-c 1 --min-freq 100 /sf/raw /sf/out")
  453. self.contain("invalid min or max frequency")
  454. self.fail(f"-c 1 --max-freq 5 /sf/raw /sf/out")
  455. self.contain("invalid min or max frequency")
  456. self.fail(f"-c 1 --min-amp -1 /sf/raw /sf/out")
  457. self.contain("min amplitude must be")
  458. # trigger some warnings
  459. client.stream_create("/sf/out2", "float32_3")
  460. self.ok(f"-c 1 -f 500 -e @1234567897000000 /sf/raw /sf/out2")
  461. self.contain("outside valid range")
  462. self.contain("1000 warnings suppressed")
  463. eq_(client.stream_count("/sf/out2"), 0)
  464. self.ok(f"-c 1 -a 40000 -e @1234567898000000 /sf/raw /sf/out2")
  465. self.contain("below minimum threshold")
  466. # get coverage for "advance = N/2" line near end of sinefit,
  467. # where we found a fit but it was after the end of the window,
  468. # so we didn't actually mark anything in this window.
  469. self.ok(f"-c 1 -f 240 -m 50 -e @1234567898010000 /sf/raw /sf/out2")
  470. def test_07_median(self):
  471. self.main = nilmtools.median.main
  472. client = nilmdb.client.Client(url=self.url)
  473. self.fail(f"")
  474. self.ok(f"--help")
  475. client.stream_create("/median/1", "float32_8")
  476. client.stream_create("/median/2", "float32_8")
  477. self.fail("/newton/prep /median/0")
  478. self.contain("doesn't exist")
  479. self.ok("/newton/prep /median/1")
  480. self.ok("--difference /newton/prep /median/2")
  481. def test_08_prep(self):
  482. self.main = nilmtools.prep.main
  483. client = nilmdb.client.Client(url=self.url)
  484. self.fail(f"")
  485. self.ok(f"--help")
  486. self.fail(f"-c 2 /sf/raw /sf/out /prep/out")
  487. self.contain("/prep/out doesn't exist")
  488. # basic usage
  489. client.stream_create("/prep/out", "float32_8")
  490. self.ok(f"-c 2 /sf/raw /sf/out /prep/out")
  491. self.contain("processed 100000")
  492. # test arguments
  493. self.fail(f"/sf/raw /sf/out /prep/out")
  494. self.contain("need a column number")
  495. self.fail(f"-c 0 /sf/raw /sf/out /prep/out")
  496. self.contain("need a column number")
  497. self.fail(f"-c 2 -n 3 /sf/raw /sf/out /prep/out")
  498. self.contain("need 6 columns")
  499. self.fail(f"-c 2 -n 0 /sf/raw /sf/out /prep/out")
  500. self.contain("number of odd harmonics must be")
  501. self.fail(f"-c 2 -N 0 /sf/raw /sf/out /prep/out")
  502. self.contain("number of shifted FFTs must be")
  503. self.ok(f"-c 2 -r 0 /sf/raw /sf/out /prep/out")
  504. self.ok(f"-c 2 -R 0 /sf/raw /sf/out /prep/out")
  505. self.fail(f"-c 2 -r 0 -R 0 /sf/raw /sf/out /prep/out")
  506. self.fail(f"-c 2 /sf/raw /sf/no-sinefit-data /prep/out")
  507. self.contain("sinefit data not found")
  508. self.fail(f"-c 2 /sf/raw /prep/out /prep/out")
  509. self.contain("sinefit data type is float32_8; expected float32_3")
  510. # Limit time so only one row gets passed in
  511. client.stream_create("/prep/tmp", "float32_8")
  512. s = 1234567890000000
  513. e = 1234567890000125
  514. self.ok(f"-c 2 -s {s} -e {e} /sf/raw /sf/out /prep/tmp")
  515. # Lower sampling rate on everything, so that the FFT doesn't
  516. # return all the harmonics, and prep has to fill with zeros.
  517. # Tests the "if N < (nharm * 2):" condition in prep
  518. self.generate_sine_data(client, "/sf/raw-low", 5, 100, 60)
  519. self.main = nilmtools.sinefit.main
  520. client.stream_create("/sf/out-low", "float32_3")
  521. self.ok(f"--column 1 /sf/raw-low /sf/out-low")
  522. self.main = nilmtools.prep.main
  523. client.stream_create("/prep/out-low", "float32_8")
  524. self.ok(f"-c 2 /sf/raw-low /sf/out-low /prep/out-low")
  525. # Test prep with empty sinefit data
  526. client.stream_create("/sf/out-empty", "float32_3")
  527. with client.stream_insert_context("/sf/out-empty",
  528. 1034567890123456,
  529. 2034567890123456):
  530. pass
  531. client.stream_create("/prep/out-empty", "float32_8")
  532. self.ok(f"-c 2 /sf/raw /sf/out-empty /prep/out-empty")
  533. self.contain("warning: no periods found; skipping")
  534. def generate_trainola_data(self):
  535. # Build some fake data for trainola, which is just pulses of varying
  536. # length.
  537. client = nilmdb.client.Client(url=self.url)
  538. total_sec = 100
  539. fs = 100
  540. rg = numpy.random.Generator(numpy.random.MT19937(1234567))
  541. path = "/train/data"
  542. # Just build up some random pulses. This uses seeded random numbers,
  543. # so any changes here will affect the success/failures of tests later.
  544. client.stream_create(path, "float32_1")
  545. with client.stream_insert_context(path) as ctx:
  546. remaining = 0
  547. for n in range(fs * total_sec):
  548. t = n / fs
  549. data = rg.normal(100) / 100 - 1
  550. if remaining > 0:
  551. remaining -= 1
  552. data += 1
  553. else:
  554. if rg.integers(fs * 10 * total_sec) < fs:
  555. if rg.integers(3) < 2:
  556. remaining = fs*2
  557. else:
  558. remaining = fs/2
  559. line = b"%d %f\n" % (t * 1e6, data)
  560. ctx.insert(line)
  561. # To view what was made, try:
  562. if 0:
  563. subprocess.call(f"nilmtool -u {self.url} extract -s min -e max " +
  564. f"{path} > /tmp/data", shell=True)
  565. # then in Octave: a=load("/tmp/data"); plot(a(:,2));
  566. if 0:
  567. for (s, e) in client.stream_intervals(path):
  568. print(Interval(s,e).human_string())
  569. # Also generate something with more than 100k data points
  570. client.stream_create("/train/big", "uint8_1")
  571. with client.stream_insert_context("/train/big") as ctx:
  572. for n in range(110000):
  573. ctx.insert(b"%d 0\n" % n)
  574. def test_09_trainola(self):
  575. self.main = nilmtools.trainola.main
  576. client = nilmdb.client.numpyclient.NumpyClient(url=self.url)
  577. self.fail(f"")
  578. self.ok(f"--help")
  579. self.ok(f"--version")
  580. self.generate_trainola_data()
  581. def get_json(path):
  582. with open(path) as f:
  583. js = f.read().replace('\n', ' ')
  584. return f"'{js}'"
  585. # pass a dict as argv[0]
  586. with assert_raises(KeyError):
  587. saved_stdout = sys.stdout
  588. try:
  589. with open(os.devnull, 'w') as sys.stdout:
  590. nilmtools.trainola.main([{ "url": self.url }])
  591. finally:
  592. sys.stdout = saved_stdout
  593. # pass no args and they come from sys.argv
  594. saved_argv = sys.argv
  595. try:
  596. sys.argv = [ "prog", "bad-json," ]
  597. with assert_raises(json.decoder.JSONDecodeError):
  598. nilmtools.trainola.main()
  599. finally:
  600. sys.argv = saved_argv
  601. # catch a bunch of errors based on different json input
  602. client.stream_create("/train/matches", "uint8_1")
  603. for (num, error) in [ (1, "no columns"),
  604. (2, "duplicated columns"),
  605. (3, "bad column number"),
  606. (4, "source path '/c/d' does not exist"),
  607. (5, "destination path '/a/b' does not exist"),
  608. (6, "missing exemplars"),
  609. (7, "missing exemplars"),
  610. (8, "exemplar stream '/e/f' does not exist"),
  611. (9, "No data in this exemplar"),
  612. (10, "Too few data points"),
  613. (11, "Too many data points"),
  614. (12, "column FOO is not available in source") ]:
  615. self.fail(get_json(f"tests/data/trainola-bad{num}.js"))
  616. self.contain(error)
  617. # not enough columns in dest
  618. self.fail(get_json("tests/data/trainola1.js"))
  619. self.contain("bad destination column number")
  620. # run normally
  621. client.stream_destroy("/train/matches")
  622. client.stream_create("/train/matches", "uint8_2")
  623. self.ok(get_json("tests/data/trainola1.js"))
  624. self.contain("matched 10 exemplars")
  625. # check actual matches, since we made up the data
  626. matches = list(client.stream_extract_numpy("/train/matches"))
  627. eq_(matches[0].tolist(), [[34000000, 1, 0],
  628. [36000000, 0, 1],
  629. [40800000, 1, 0],
  630. [42800000, 0, 1],
  631. [60310000, 1, 0],
  632. [62310000, 0, 1],
  633. [69290000, 1, 0],
  634. [71290000, 0, 1],
  635. [91210000, 1, 0],
  636. [93210000, 0, 1]])
  637. # another run using random noise as an exemplar, to get better coverage
  638. client.stream_create("/train/matches2", "uint8_1")
  639. self.ok(get_json("tests/data/trainola2.js"))
  640. def test010_pipewatch(self):
  641. self.main = nilmtools.pipewatch.main
  642. self.fail(f"")
  643. self.ok(f"--help")
  644. lock = "tests/pipewatch.lock"
  645. lk = f"--lock {lock}"
  646. try:
  647. os.unlink(lock)
  648. except OSError:
  649. pass
  650. # try locking so pipewatch will exit (with code 0)
  651. lockfile = open(lock, "w")
  652. nilmdb.utils.lock.exclusive_lock(lockfile)
  653. self.ok(f"{lk} true true")
  654. self.contain("pipewatch process already running")
  655. os.unlink(lock)
  656. # have pipewatch remove its own lock to trigger error later
  657. self.ok(f"{lk} 'rm {lock}' true")
  658. # various cases to get coverage
  659. self.ok(f"{lk} true 'cat >/dev/null'")
  660. self.contain("generator returned 0, consumer returned 0")
  661. self.fail(f"{lk} false true")
  662. self.contain("generator returned 1, consumer returned 0")
  663. self.fail(f"{lk} false false")
  664. self.contain("generator returned 1, consumer returned 1")
  665. self.fail(f"{lk} true false")
  666. self.contain("generator returned 0, consumer returned 1")
  667. self.fail(f"{lk} 'kill -15 $$' true")
  668. self.ok(f"{lk} 'sleep 1 ; echo hi' 'cat >/dev/null'")
  669. self.ok(f"{lk} 'echo hi' 'cat >/dev/null'")
  670. self.fail(f"{lk} --timeout 0.5 'sleep 10 ; echo hi' 'cat >/dev/null'")
  671. self.fail(f"{lk} 'yes' 'head -1 >/dev/null'")
  672. self.fail(f"{lk} false 'exec 2>&-; trap \"sleep 10\" 0 15 ; sleep 10'")
  673. def test_11_cleanup(self):
  674. self.main = nilmtools.cleanup.main
  675. client = nilmdb.client.Client(url=self.url)
  676. # This mostly just gets coverage, doesn't carefully verify behavior
  677. self.fail(f"")
  678. self.ok(f"--help")
  679. self.fail(f"tests/data/cleanup-bad.cfg")
  680. self.contain("unknown units")
  681. client.stream_create("/empty/foo", "uint16_1")
  682. self.ok(f"tests/data/cleanup.cfg")
  683. self.contain("'/nonexistent/bar' did not match any existing streams")
  684. self.contain("no config for existing stream '/empty/foo'")
  685. self.contain("nothing to do (only 0.00 weeks of data present)")
  686. self.contain("specify --yes to actually perform")
  687. self.ok(f"--yes tests/data/cleanup.cfg")
  688. self.contain("removing data before")
  689. self.contain("removing from /sf/raw")
  690. self.ok(f"--estimate tests/data/cleanup.cfg")
  691. self.contain("Total estimated disk usage")
  692. self.contain("MiB")
  693. self.contain("GiB")
  694. self.ok(f"--yes tests/data/cleanup-nodecim.cfg")
  695. self.ok(f"--estimate tests/data/cleanup-nodecim.cfg")