Compare commits
11 Commits
nilmtools-
...
nilmtools-
Author | SHA1 | Date | |
---|---|---|---|
a4d4bc22fc | |||
6090dd6112 | |||
![]() |
9c0d9ad324 | ||
![]() |
8b9c5d4898 | ||
cf2c28b0fb | |||
87a26c907b | |||
def465b57c | |||
0589b8d316 | |||
9c5f07106d | |||
62e11a11c0 | |||
2bdcee2c36 |
14
Makefile
14
Makefile
@@ -8,19 +8,26 @@ else
|
|||||||
@echo "Try 'make install'"
|
@echo "Try 'make install'"
|
||||||
endif
|
endif
|
||||||
|
|
||||||
test: test_trainola
|
test: test_insert
|
||||||
|
|
||||||
|
test_pipewatch:
|
||||||
|
nilmtools/pipewatch.py -t 3 "seq 10 20" "seq 20 30"
|
||||||
|
|
||||||
test_trainola:
|
test_trainola:
|
||||||
|
-nilmtool -u http://bucket/nilmdb remove -s min -e max \
|
||||||
|
/sharon/prep-a-matches
|
||||||
|
nilmtools/trainola.py "$$(cat extras/trainola-test-param-2.js)"
|
||||||
-nilmtool -u http://bucket/nilmdb remove -s min -e max \
|
-nilmtool -u http://bucket/nilmdb remove -s min -e max \
|
||||||
/sharon/prep-a-matches
|
/sharon/prep-a-matches
|
||||||
nilmtools/trainola.py "$$(cat extras/trainola-test-param.js)"
|
nilmtools/trainola.py "$$(cat extras/trainola-test-param.js)"
|
||||||
|
|
||||||
|
|
||||||
test_cleanup:
|
test_cleanup:
|
||||||
nilmtools/cleanup.py -e extras/cleanup.cfg
|
nilmtools/cleanup.py -e extras/cleanup.cfg
|
||||||
nilmtools/cleanup.py extras/cleanup.cfg
|
nilmtools/cleanup.py extras/cleanup.cfg
|
||||||
|
|
||||||
test_insert:
|
test_insert:
|
||||||
nilmtools/insert.py --file --dry-run /test/foo </dev/null
|
nilmtools/insert.py --skip --file --dry-run /foo/bar ~/data/20130311T2100.prep1.gz ~/data/20130311T2100.prep1.gz ~/data/20130311T2200.prep1.gz
|
||||||
|
|
||||||
test_copy:
|
test_copy:
|
||||||
nilmtools/copy_wildcard.py -U "http://nilmdb.com/bucket/" -D /lees*
|
nilmtools/copy_wildcard.py -U "http://nilmdb.com/bucket/" -D /lees*
|
||||||
@@ -39,7 +46,8 @@ test_prep: /tmp/raw.dat
|
|||||||
nilmtool create /test/sinefit float32_3
|
nilmtool create /test/sinefit float32_3
|
||||||
nilmtool create /test/prep float32_8
|
nilmtool create /test/prep float32_8
|
||||||
nilmtool insert -s '@0' -t -r 8000 /test/raw /tmp/raw.dat
|
nilmtool insert -s '@0' -t -r 8000 /test/raw /tmp/raw.dat
|
||||||
nilmtools/sinefit.py -a 0.5 -c 1 /test/raw /test/sinefit
|
nilmtools/sinefit.py -a 0.5 -c 1 -s '@0' -e '@5000000' /test/raw /test/sinefit
|
||||||
|
nilmtools/prep.py -c 2 /test/raw /test/sinefit /test/prep
|
||||||
nilmtools/prep.py -c 2 /test/raw /test/sinefit /test/prep
|
nilmtools/prep.py -c 2 /test/raw /test/sinefit /test/prep
|
||||||
nilmtool extract -s min -e max /test/prep | head -20
|
nilmtool extract -s min -e max /test/prep | head -20
|
||||||
|
|
||||||
|
@@ -5,10 +5,10 @@ by Jim Paris <jim@jtan.com>
|
|||||||
Prerequisites:
|
Prerequisites:
|
||||||
|
|
||||||
# Runtime and build environments
|
# Runtime and build environments
|
||||||
sudo apt-get install python2.7 python2.7-dev python-setuptools python-pip
|
sudo apt-get install python2.7 python2.7-dev python-setuptools
|
||||||
sudo apt-get install python-numpy python-scipy
|
sudo apt-get install python-numpy python-scipy python-daemon
|
||||||
|
|
||||||
nilmdb (1.8.1+)
|
nilmdb (1.8.5+)
|
||||||
|
|
||||||
Install:
|
Install:
|
||||||
|
|
||||||
|
10
extras/sample-cron-scripts/capture.sh
Executable file
10
extras/sample-cron-scripts/capture.sh
Executable file
@@ -0,0 +1,10 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Start the ethstream capture using nilm-pipewatch
|
||||||
|
|
||||||
|
# Bail out on errors
|
||||||
|
set -e
|
||||||
|
|
||||||
|
nilm-pipewatch --daemon --lock "/tmp/nilmdb-capture.lock" --timeout 30 \
|
||||||
|
"ethstream -a 192.168.1.209 -n 9 -r 8000 -N" \
|
||||||
|
"nilm-insert -m 10 -r 8000 --live /sharon/raw"
|
8
extras/sample-cron-scripts/cleanup.cfg
Normal file
8
extras/sample-cron-scripts/cleanup.cfg
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
[/sharon/prep-*]
|
||||||
|
keep = 1y
|
||||||
|
|
||||||
|
[/sharon/raw]
|
||||||
|
keep = 2w
|
||||||
|
|
||||||
|
[/sharon/sinefit]
|
||||||
|
keep = 1y
|
9
extras/sample-cron-scripts/crontab
Normal file
9
extras/sample-cron-scripts/crontab
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Install this by running "crontab crontab" (will replace existing crontab)
|
||||||
|
|
||||||
|
# m h dom mon dow cmd
|
||||||
|
|
||||||
|
# Run NilmDB processing every 5 minutes
|
||||||
|
*/5 * * * * chronic /home/nilm/data/process.sh
|
||||||
|
|
||||||
|
# Check the capture process every minute
|
||||||
|
*/1 * * * * chronic /home/nilm/data/capture.sh
|
28
extras/sample-cron-scripts/process.sh
Executable file
28
extras/sample-cron-scripts/process.sh
Executable file
@@ -0,0 +1,28 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Run all necessary processing on NilmDB data.
|
||||||
|
|
||||||
|
# Bail out on errors
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Ensure only one copy of this code runs at a time:
|
||||||
|
LOCKFILE="/tmp/nilmdb-process.lock"
|
||||||
|
exec 99>"$LOCKFILE"
|
||||||
|
if ! flock -n -x 99 ; then
|
||||||
|
echo "NilmDB processing already running, giving up..."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
trap 'rm -f "$LOCKFILE"' 0
|
||||||
|
|
||||||
|
# sinefit on phase A voltage
|
||||||
|
nilm-sinefit -c 5 /sharon/raw /sharon/sinefit
|
||||||
|
|
||||||
|
# prep on A, B, C with appropriate rotations
|
||||||
|
nilm-prep -c 1 -r 0 /sharon/raw /sharon/sinefit /sharon/prep-a
|
||||||
|
nilm-prep -c 2 -r 120 /sharon/raw /sharon/sinefit /sharon/prep-b
|
||||||
|
nilm-prep -c 3 -r 240 /sharon/raw /sharon/sinefit /sharon/prep-c
|
||||||
|
|
||||||
|
# decimate raw and prep data
|
||||||
|
nilm-decimate-auto /sharon/raw /sharon/prep*
|
||||||
|
|
||||||
|
# run cleanup
|
||||||
|
nilm-cleanup --yes /home/nilm/data/cleanup.cfg
|
29
extras/trainola-test-param-2.js
Normal file
29
extras/trainola-test-param-2.js
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{ "columns" : [ { "index" : 0, "name" : "P1" },
|
||||||
|
{ "index" : 1, "name" : "Q1" },
|
||||||
|
{ "index" : 2, "name" : "P3" } ],
|
||||||
|
"stream" : "/sharon/prep-a",
|
||||||
|
"url" : "http://bucket.mit.edu/nilmdb",
|
||||||
|
"dest_stream" : "/sharon/prep-a-matches",
|
||||||
|
"start" : 1365153062643133.5,
|
||||||
|
"end" : 1365168814443575.5,
|
||||||
|
"exemplars" : [ { "columns" : [ { "index" : 0,
|
||||||
|
"name" : "P1"
|
||||||
|
} ],
|
||||||
|
"dest_column" : 0,
|
||||||
|
"end" : 1365073657682000,
|
||||||
|
"name" : "Turn ON",
|
||||||
|
"start" : 1365073654321000,
|
||||||
|
"stream" : "/sharon/prep-a",
|
||||||
|
"url" : "http://bucket.mit.edu/nilmdb"
|
||||||
|
},
|
||||||
|
{ "columns" : [ { "index" : 2, "name" : "P3" },
|
||||||
|
{ "index" : 0, "name" : "P1" } ],
|
||||||
|
"dest_column" : 1,
|
||||||
|
"end" : 1365176528818000,
|
||||||
|
"name" : "Type 2 turn ON",
|
||||||
|
"start" : 1365176520030000,
|
||||||
|
"stream" : "/sharon/prep-a",
|
||||||
|
"url" : "http://bucket.mit.edu/nilmdb"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@@ -316,7 +316,8 @@ class Filter(object):
|
|||||||
self._client_dest.stream_update_metadata(self.dest.path, data)
|
self._client_dest.stream_update_metadata(self.dest.path, data)
|
||||||
|
|
||||||
# The main filter processing method.
|
# The main filter processing method.
|
||||||
def process_numpy(self, function, args = None, rows = 100000):
|
def process_numpy(self, function, args = None, rows = 100000,
|
||||||
|
intervals = None):
|
||||||
"""Calls process_numpy_interval for each interval that currently
|
"""Calls process_numpy_interval for each interval that currently
|
||||||
exists in self.src, but doesn't exist in self.dest. It will
|
exists in self.src, but doesn't exist in self.dest. It will
|
||||||
process the data in chunks as follows:
|
process the data in chunks as follows:
|
||||||
@@ -325,6 +326,9 @@ class Filter(object):
|
|||||||
corresponding to the data. The data is converted to a Numpy
|
corresponding to the data. The data is converted to a Numpy
|
||||||
array in chunks of 'rows' rows at a time.
|
array in chunks of 'rows' rows at a time.
|
||||||
|
|
||||||
|
If 'intervals' is not None, process those intervals instead of
|
||||||
|
the default list.
|
||||||
|
|
||||||
'function' should be defined as:
|
'function' should be defined as:
|
||||||
# def function(data, interval, args, insert_func, final)
|
# def function(data, interval, args, insert_func, final)
|
||||||
|
|
||||||
@@ -358,7 +362,7 @@ class Filter(object):
|
|||||||
maxrows = rows)
|
maxrows = rows)
|
||||||
inserter_func = functools.partial(inserter, self.dest.path)
|
inserter_func = functools.partial(inserter, self.dest.path)
|
||||||
|
|
||||||
for interval in self.intervals():
|
for interval in (intervals or self.intervals()):
|
||||||
print "Processing", interval.human_string()
|
print "Processing", interval.human_string()
|
||||||
process_numpy_interval(interval, extractor_func, inserter_func,
|
process_numpy_interval(interval, extractor_func, inserter_func,
|
||||||
rows * 3, function, args)
|
rows * 3, function, args)
|
||||||
|
@@ -53,7 +53,8 @@ def parse_args(argv = None):
|
|||||||
is stepped forward to match 'clock'.
|
is stepped forward to match 'clock'.
|
||||||
|
|
||||||
- If 'data' is running ahead, there is overlap in the data, and an
|
- If 'data' is running ahead, there is overlap in the data, and an
|
||||||
error is raised.
|
error is raised. If '--ignore' is specified, the current file
|
||||||
|
is skipped instead of raising an error.
|
||||||
"""))
|
"""))
|
||||||
parser.add_argument("-u", "--url", action="store",
|
parser.add_argument("-u", "--url", action="store",
|
||||||
default="http://localhost/nilmdb/",
|
default="http://localhost/nilmdb/",
|
||||||
@@ -61,6 +62,8 @@ def parse_args(argv = None):
|
|||||||
group = parser.add_argument_group("Misc options")
|
group = parser.add_argument_group("Misc options")
|
||||||
group.add_argument("-D", "--dry-run", action="store_true",
|
group.add_argument("-D", "--dry-run", action="store_true",
|
||||||
help="Parse files, but don't insert any data")
|
help="Parse files, but don't insert any data")
|
||||||
|
group.add_argument("-s", "--skip", action="store_true",
|
||||||
|
help="Skip files if the data would overlap")
|
||||||
group.add_argument("-m", "--max-gap", action="store", default=10.0,
|
group.add_argument("-m", "--max-gap", action="store", default=10.0,
|
||||||
metavar="SEC", type=float,
|
metavar="SEC", type=float,
|
||||||
help="Max discrepency between clock and data "
|
help="Max discrepency between clock and data "
|
||||||
@@ -235,6 +238,10 @@ def main(argv = None):
|
|||||||
"is %s but clock time is only %s",
|
"is %s but clock time is only %s",
|
||||||
timestamp_to_human(data_ts),
|
timestamp_to_human(data_ts),
|
||||||
timestamp_to_human(clock_ts))
|
timestamp_to_human(clock_ts))
|
||||||
|
if args.skip:
|
||||||
|
printf("%s\n", err)
|
||||||
|
printf("Skipping the remainder of this file\n")
|
||||||
|
break
|
||||||
raise ParseError(filename, err)
|
raise ParseError(filename, err)
|
||||||
|
|
||||||
if (data_ts + max_gap) < clock_ts:
|
if (data_ts + max_gap) < clock_ts:
|
||||||
|
168
nilmtools/pipewatch.py
Executable file
168
nilmtools/pipewatch.py
Executable file
@@ -0,0 +1,168 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
import nilmdb.client
|
||||||
|
from nilmdb.utils.printf import *
|
||||||
|
import nilmdb.utils.lock
|
||||||
|
import nilmtools
|
||||||
|
|
||||||
|
import time
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
import threading
|
||||||
|
import select
|
||||||
|
import signal
|
||||||
|
import Queue
|
||||||
|
import daemon
|
||||||
|
|
||||||
|
def parse_args(argv = None):
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
formatter_class = argparse.ArgumentDefaultsHelpFormatter,
|
||||||
|
version = nilmtools.__version__,
|
||||||
|
description = """\
|
||||||
|
Pipe data from 'generator' to 'consumer'. This is intended to be
|
||||||
|
executed frequently from cron, and will exit if another copy is
|
||||||
|
already running. If 'generator' or 'consumer' returns an error,
|
||||||
|
or if 'generator' stops sending data for a while, it will exit.
|
||||||
|
|
||||||
|
Intended for use with ethstream (generator) and nilm-insert
|
||||||
|
(consumer). Commands are executed through the shell.
|
||||||
|
""")
|
||||||
|
parser.add_argument("-d", "--daemon", action="store_true",
|
||||||
|
help="Run in background")
|
||||||
|
parser.add_argument("-l", "--lock", metavar="FILENAME", action="store",
|
||||||
|
default=tempfile.gettempdir() +
|
||||||
|
"/nilm-pipewatch.lock",
|
||||||
|
help="Lock file for detecting running instance")
|
||||||
|
parser.add_argument("-t", "--timeout", metavar="SECONDS", action="store",
|
||||||
|
type=float, default=30,
|
||||||
|
help="Restart if no output from " +
|
||||||
|
"generator for this long")
|
||||||
|
group = parser.add_argument_group("commands to execute")
|
||||||
|
group.add_argument("generator", action="store",
|
||||||
|
help="Data generator (e.g. \"ethstream -r 8000\")")
|
||||||
|
group.add_argument("consumer", action="store",
|
||||||
|
help="Data consumer (e.g. \"nilm-insert /foo/bar\")")
|
||||||
|
args = parser.parse_args(argv)
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
def reader_thread(queue, fd):
|
||||||
|
# Read from a file descriptor, write to queue.
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
(r, w, x) = select.select([fd], [], [fd], 0.25)
|
||||||
|
if x:
|
||||||
|
raise Exception # generator died?
|
||||||
|
if not r:
|
||||||
|
# short timeout -- just try again. This is to catch the
|
||||||
|
# fd being closed elsewhere, which is only detected
|
||||||
|
# when select restarts.
|
||||||
|
continue
|
||||||
|
data = os.read(fd, 65536)
|
||||||
|
if data == "": # generator EOF
|
||||||
|
raise Exception
|
||||||
|
queue.put(data)
|
||||||
|
except Exception:
|
||||||
|
queue.put(None)
|
||||||
|
|
||||||
|
def watcher_thread(queue, procs):
|
||||||
|
# Put None in the queue if either process dies
|
||||||
|
while True:
|
||||||
|
for p in procs:
|
||||||
|
if p.poll() is not None:
|
||||||
|
queue.put(None)
|
||||||
|
return
|
||||||
|
time.sleep(0.25)
|
||||||
|
|
||||||
|
def pipewatch(args):
|
||||||
|
# Run the processes, etc
|
||||||
|
with open(os.devnull, "r") as devnull:
|
||||||
|
generator = subprocess.Popen(args.generator, shell = True,
|
||||||
|
bufsize = -1, close_fds = True,
|
||||||
|
stdin = devnull,
|
||||||
|
stdout = subprocess.PIPE,
|
||||||
|
stderr = None)
|
||||||
|
consumer = subprocess.Popen(args.consumer, shell = True,
|
||||||
|
bufsize = -11, close_fds = True,
|
||||||
|
stdin = subprocess.PIPE,
|
||||||
|
stdout = None, stderr = None)
|
||||||
|
|
||||||
|
queue = Queue.Queue(maxsize = 32)
|
||||||
|
reader = threading.Thread(target = reader_thread,
|
||||||
|
args = (queue, generator.stdout.fileno()))
|
||||||
|
reader.start()
|
||||||
|
watcher = threading.Thread(target = watcher_thread,
|
||||||
|
args = (queue, [generator, consumer]))
|
||||||
|
watcher.start()
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
data = queue.get(True, args.timeout)
|
||||||
|
if data is None:
|
||||||
|
break
|
||||||
|
consumer.stdin.write(data)
|
||||||
|
except Queue.Empty:
|
||||||
|
# Timeout: kill the generator
|
||||||
|
fprintf(sys.stderr, "pipewatch: timeout\n")
|
||||||
|
generator.terminate()
|
||||||
|
break
|
||||||
|
|
||||||
|
generator.stdout.close()
|
||||||
|
consumer.stdin.close()
|
||||||
|
except IOError:
|
||||||
|
fprintf(sys.stderr, "pipewatch: I/O error\n")
|
||||||
|
|
||||||
|
def kill(proc):
|
||||||
|
# Wait for a process to end, or kill it
|
||||||
|
def poll_timeout(proc, timeout):
|
||||||
|
for x in range(1+int(timeout / 0.1)):
|
||||||
|
if proc.poll() is not None:
|
||||||
|
break
|
||||||
|
time.sleep(0.1)
|
||||||
|
return proc.poll()
|
||||||
|
try:
|
||||||
|
if poll_timeout(proc, 0.5) is None:
|
||||||
|
proc.terminate()
|
||||||
|
if poll_timeout(proc, 0.5) is None:
|
||||||
|
proc.kill()
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
return poll_timeout(proc, 0.5)
|
||||||
|
|
||||||
|
# Wait for them to die, or kill them
|
||||||
|
gret = kill(generator)
|
||||||
|
cret = kill(consumer)
|
||||||
|
|
||||||
|
fprintf(sys.stderr, "pipewatch: generator returned %d, " +
|
||||||
|
"consumer returned %d\n", gret, cret)
|
||||||
|
if gret == 0 and cret == 0:
|
||||||
|
sys.exit(0)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def main(argv = None):
|
||||||
|
args = parse_args(argv)
|
||||||
|
|
||||||
|
lockfile = open(args.lock, "w")
|
||||||
|
if not nilmdb.utils.lock.exclusive_lock(lockfile):
|
||||||
|
printf("pipewatch process already running (according to %s)\n",
|
||||||
|
args.lock)
|
||||||
|
sys.exit(0)
|
||||||
|
try:
|
||||||
|
# Run as a daemon if requested, otherwise run directly.
|
||||||
|
if args.daemon:
|
||||||
|
with daemon.DaemonContext(files_preserve = [ lockfile ]):
|
||||||
|
pipewatch(args)
|
||||||
|
else:
|
||||||
|
pipewatch(args)
|
||||||
|
finally:
|
||||||
|
# Clean up lockfile
|
||||||
|
try:
|
||||||
|
os.unlink(args.lock)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@@ -12,6 +12,7 @@ import scipy.fftpack
|
|||||||
import scipy.signal
|
import scipy.signal
|
||||||
#from matplotlib import pyplot as p
|
#from matplotlib import pyplot as p
|
||||||
import bisect
|
import bisect
|
||||||
|
from nilmdb.utils.interval import Interval
|
||||||
|
|
||||||
def main(argv = None):
|
def main(argv = None):
|
||||||
# Set up argument parser
|
# Set up argument parser
|
||||||
@@ -82,9 +83,20 @@ def main(argv = None):
|
|||||||
"prep_column": args.column,
|
"prep_column": args.column,
|
||||||
"prep_rotation": repr(rotation) })
|
"prep_rotation": repr(rotation) })
|
||||||
|
|
||||||
# Run the processing function on all data
|
# Find the intersection of the usual set of intervals we'd filter,
|
||||||
|
# and the intervals actually present in sinefit data. This is
|
||||||
|
# what we will process.
|
||||||
|
filter_int = f.intervals()
|
||||||
|
sinefit_int = ( Interval(start, end) for (start, end) in
|
||||||
|
client_sinefit.stream_intervals(
|
||||||
|
args.sinepath, start = f.start, end = f.end) )
|
||||||
|
intervals = nilmdb.utils.interval.intersection(filter_int, sinefit_int)
|
||||||
|
|
||||||
|
# Run the process (using the helper in the filter module)
|
||||||
f.process_numpy(process, args = (client_sinefit, sinefit.path, args.column,
|
f.process_numpy(process, args = (client_sinefit, sinefit.path, args.column,
|
||||||
args.nharm, rotation, args.nshift))
|
args.nharm, rotation, args.nshift),
|
||||||
|
intervals = intervals)
|
||||||
|
|
||||||
|
|
||||||
def process(data, interval, args, insert_function, final):
|
def process(data, interval, args, insert_function, final):
|
||||||
(client, sinefit_path, column, nharm, rotation, nshift) = args
|
(client, sinefit_path, column, nharm, rotation, nshift) = args
|
||||||
|
@@ -28,12 +28,12 @@ def build_column_mapping(colinfo, streaminfo):
|
|||||||
pull out a dictionary mapping for the column names/numbers."""
|
pull out a dictionary mapping for the column names/numbers."""
|
||||||
columns = OrderedDict()
|
columns = OrderedDict()
|
||||||
for c in colinfo:
|
for c in colinfo:
|
||||||
if (c['name'] in columns.keys() or
|
col_num = c['index'] + 1 # skip timestamp
|
||||||
c['index'] in columns.values()):
|
if (c['name'] in columns.keys() or col_num in columns.values()):
|
||||||
raise DataError("duplicated columns")
|
raise DataError("duplicated columns")
|
||||||
if (c['index'] < 0 or c['index'] >= streaminfo.layout_count):
|
if (c['index'] < 0 or c['index'] >= streaminfo.layout_count):
|
||||||
raise DataError("bad column number")
|
raise DataError("bad column number")
|
||||||
columns[c['name']] = c['index']
|
columns[c['name']] = col_num
|
||||||
if not len(columns):
|
if not len(columns):
|
||||||
raise DataError("no columns")
|
raise DataError("no columns")
|
||||||
return columns
|
return columns
|
||||||
@@ -54,6 +54,9 @@ class Exemplar(object):
|
|||||||
# Get stream info
|
# Get stream info
|
||||||
self.client = nilmdb.client.numpyclient.NumpyClient(self.url)
|
self.client = nilmdb.client.numpyclient.NumpyClient(self.url)
|
||||||
self.info = nilmtools.filter.get_stream_info(self.client, self.stream)
|
self.info = nilmtools.filter.get_stream_info(self.client, self.stream)
|
||||||
|
if not self.info:
|
||||||
|
raise DataError(sprintf("exemplar stream '%s' does not exist " +
|
||||||
|
"on server '%s'", self.stream, self.url))
|
||||||
|
|
||||||
# Build up name => index mapping for the columns
|
# Build up name => index mapping for the columns
|
||||||
self.columns = build_column_mapping(exinfo['columns'], self.info)
|
self.columns = build_column_mapping(exinfo['columns'], self.info)
|
||||||
@@ -76,10 +79,17 @@ class Exemplar(object):
|
|||||||
maxrows = self.count)
|
maxrows = self.count)
|
||||||
self.data = list(datagen)[0]
|
self.data = list(datagen)[0]
|
||||||
|
|
||||||
# Discard timestamp
|
# Extract just the columns that were specified in self.columns,
|
||||||
self.data = self.data[:,1:]
|
# skipping the timestamp.
|
||||||
|
extract_columns = [ value for (key, value) in self.columns.items() ]
|
||||||
|
self.data = self.data[:,extract_columns]
|
||||||
|
|
||||||
# Subtract the mean from each column
|
# Fix the column indices in e.columns, since we removed/reordered
|
||||||
|
# columns in self.data
|
||||||
|
for n, k in enumerate(self.columns):
|
||||||
|
self.columns[k] = n
|
||||||
|
|
||||||
|
# Subtract the means from each column
|
||||||
self.data = self.data - self.data.mean(axis=0)
|
self.data = self.data - self.data.mean(axis=0)
|
||||||
|
|
||||||
# Get scale factors for each column by computing dot product
|
# Get scale factors for each column by computing dot product
|
||||||
@@ -144,7 +154,7 @@ def trainola_matcher(data, interval, args, insert_func, final_chunk):
|
|||||||
|
|
||||||
# Compute cross-correlation for each column
|
# Compute cross-correlation for each column
|
||||||
for col_name in e.columns:
|
for col_name in e.columns:
|
||||||
a = data[:, src_columns[col_name] + 1]
|
a = data[:, src_columns[col_name]]
|
||||||
b = e.data[:, e.columns[col_name]]
|
b = e.data[:, e.columns[col_name]]
|
||||||
corr = scipy.signal.fftconvolve(a, np.flipud(b), 'valid')[0:valid]
|
corr = scipy.signal.fftconvolve(a, np.flipud(b), 'valid')[0:valid]
|
||||||
|
|
||||||
|
4
setup.py
4
setup.py
@@ -61,9 +61,10 @@ setup(name='nilmtools',
|
|||||||
long_description = "NILM Database Tools",
|
long_description = "NILM Database Tools",
|
||||||
license = "Proprietary",
|
license = "Proprietary",
|
||||||
author_email = 'jim@jtan.com',
|
author_email = 'jim@jtan.com',
|
||||||
install_requires = [ 'nilmdb >= 1.8.1',
|
install_requires = [ 'nilmdb >= 1.8.5',
|
||||||
'numpy',
|
'numpy',
|
||||||
'scipy',
|
'scipy',
|
||||||
|
'python-daemon >= 1.5',
|
||||||
#'matplotlib',
|
#'matplotlib',
|
||||||
],
|
],
|
||||||
packages = [ 'nilmtools',
|
packages = [ 'nilmtools',
|
||||||
@@ -80,6 +81,7 @@ setup(name='nilmtools',
|
|||||||
'nilm-cleanup = nilmtools.cleanup:main',
|
'nilm-cleanup = nilmtools.cleanup:main',
|
||||||
'nilm-median = nilmtools.median:main',
|
'nilm-median = nilmtools.median:main',
|
||||||
'nilm-trainola = nilmtools.trainola:main',
|
'nilm-trainola = nilmtools.trainola:main',
|
||||||
|
'nilm-pipewatch = nilmtools.pipewatch:main',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
zip_safe = False,
|
zip_safe = False,
|
||||||
|
Reference in New Issue
Block a user