Compare commits

..

45 Commits

Author SHA1 Message Date
eae6dd623f Freeze requirements 2020-08-06 18:13:01 -04:00
09a9ed9734 Fix #! at top of shell scripts for py3 and venvs 2020-08-05 17:01:47 -04:00
079a2b5192 Fix nilmrun scripts for python 3 2020-08-05 16:59:56 -04:00
e7f52a4013 Update versioneer 2020-08-05 16:42:46 -04:00
c36b9b97e0 Add missing MANIFEST.in, update .gitignore 2020-08-03 23:48:07 -04:00
549a27e66c Add flake8 tests and clean up warnings 2020-08-03 23:46:34 -04:00
cd68389e9a Update for Python 3 and psutil >= 2. 2020-08-03 23:08:52 -04:00
6faf563bda Remove nilmrun.testfilter
This module was only used to assist testing, but wasn't actually
available in a test environment where no copies of nilmrun have been
installed.  Just code the contents directly into the filters that the
tests are running.
2020-08-03 23:05:32 -04:00
58fd9d1559 Run cherrypy always in "embedded" mode
Non-embedded mode is not used in the test suite or wsgi server;
it was an option in the standalong nilmdb-server script, but it's
really not necessary, and removing it gets rid of some untested
code.
2020-08-03 16:57:17 -04:00
2bc939d42d CherryPy bug 1200 is no longer an issue 2020-08-03 16:55:23 -04:00
fe36722684 Use built-in json module rather than external simplejson 2020-08-03 16:54:47 -04:00
24740a838e Run 2to3 over all code 2020-08-03 16:54:33 -04:00
d332fa1e0f Fix inconsistent indentation 2020-08-03 16:52:02 -04:00
cbc7a7dd55 Start Python 3 conversion 2020-08-03 16:51:01 -04:00
38c3e67cf9 Fix long lines 2013-08-08 17:47:06 -04:00
7f05a0fb62 Switch to bash for Unicode tests 2013-08-07 12:42:05 -04:00
81c2ad07d4 More robust checking in 09_info test 2013-08-07 12:32:21 -04:00
3588e843ac Replace burnP5 with dd 2013-08-07 12:31:43 -04:00
9309fd9b57 Change -V option to -v everywhere 2013-08-06 21:33:59 -04:00
21bd1bd050 Always print header, even if no processes 2013-08-06 14:56:10 -04:00
cafdfce4f0 Add nilmrun-ps, -kill, and -run commands 2013-08-06 14:38:43 -04:00
477c27a4e6 Clean up temp dirs and processes at exit 2013-07-30 20:15:01 -04:00
bed26e099b Put each code in its own dir; save args too 2013-07-30 20:14:48 -04:00
9224566f9b More robust process killing 2013-07-30 20:13:30 -04:00
a8ecad9329 Use NilmDB serializer for ProcessManager 2013-07-24 14:55:59 -04:00
5b878378f3 Translate UTF-8 in command output more robustly 2013-07-22 13:03:09 -04:00
5cd38f1ba9 Don't spin so fast in tests while waiting 2013-07-21 19:49:44 -04:00
d7551bde0b Make 'args' optional to /run/code 2013-07-21 19:49:30 -04:00
40fd377a38 Remove 'name' from spawned processes 2013-07-21 19:49:15 -04:00
6e7f3ac704 Remove nilm-trainola script 2013-07-18 12:28:32 -04:00
29adb47a33 Fix test order 2013-07-18 11:01:27 -04:00
7c605a469a Cleanup dependencies 2013-07-18 11:00:53 -04:00
f5225f88f9 Add max CPU percentage 2013-07-17 18:48:55 -04:00
32e59310ef Fix for dead processes 2013-07-17 18:19:52 -04:00
5a33ef48cc Add /process/info request 2013-07-17 18:12:44 -04:00
18a5cd6334 Improve boolean parameter parsing 2013-07-15 14:39:28 -04:00
7ec4d60d38 Fix WSGI docs 2013-07-11 16:36:18 -04:00
b2bdf784ac Make test URLs relative 2013-07-11 11:46:02 -04:00
e0709f0d17 Remove multiprocessing due to mod_wsgi incompatibility; use subprocess
Multiprocessing and Apache's mod_wsgi don't play nicely.  Switch to
manually managing processes via subprocess.Popen etc instead.  When
running arbitrary code, we write it to an external file, and running
functions directly is no longer supported.
2013-07-11 11:39:22 -04:00
18d3cff772 Update WSGI docs 2013-07-10 14:16:35 -04:00
a7b9656916 Remove parameters from status output 2013-07-10 11:35:17 -04:00
2e9ec63675 Don't catch SystemExit from a subprocess 2013-07-09 13:15:27 -04:00
6d295b840a Delete trainola; it will live in nilmtools repo 2013-07-08 11:57:45 -04:00
74a05d05d6 Clear out traceback object to avoid reference cycles 2013-07-08 11:44:19 -04:00
35b20c90a5 Rename and reorganize stuff 2013-07-08 11:33:27 -04:00
27 changed files with 3143 additions and 1590 deletions

2
.gitattributes vendored
View File

@ -1 +1 @@
src/_version.py export-subst nilmrun/_version.py export-subst

2
.gitignore vendored
View File

@ -3,8 +3,8 @@ build/
*.pyc *.pyc
dist/ dist/
nilmrun.egg-info/ nilmrun.egg-info/
.eggs/
# This gets generated as needed by setup.py # This gets generated as needed by setup.py
MANIFEST.in
MANIFEST MANIFEST

8
MANIFEST.in Normal file
View File

@ -0,0 +1,8 @@
# Root
include README.md
include setup.py
include versioneer.py
include Makefile
# Version
include nilmrun/_version.py

View File

@ -1,48 +1,49 @@
# By default, run the tests. # By default, run the tests.
all: test all: test
test2:
nilmrun/trainola.py data.js
version: version:
python setup.py version python3 setup.py version
build: build:
python setup.py build_ext --inplace python3 setup.py build_ext --inplace
dist: sdist dist: sdist
sdist: sdist:
python setup.py sdist python3 setup.py sdist
install: install:
python setup.py install python3 setup.py install
develop: develop:
python setup.py develop python3 setup.py develop
docs: docs:
make -C docs make -C docs
ctrl: flake
flake:
flake8 nilmrun
lint: lint:
pylint --rcfile=.pylintrc nilmdb pylint3 --rcfile=setup.cfg nilmrun
test: test:
ifeq ($(INSIDE_EMACS), t) ifneq ($(INSIDE_EMACS),)
# Use the slightly more flexible script # Use the slightly more flexible script
python setup.py build_ext --inplace python3 setup.py build_ext --inplace
python tests/runtests.py python3 tests/runtests.py
else else
# Let setup.py check dependencies, build stuff, and run the test # Let setup.py check dependencies, build stuff, and run the test
python setup.py nosetests python3 setup.py nosetests
endif endif
clean:: clean::
find . -name '*.pyc' -o -name '__pycache__' -print0 | xargs -0 rm -rf
rm -f .coverage rm -f .coverage
find . -name '*pyc' | xargs rm -f rm -rf nilmrun.egg-info/ build/
rm -rf nilmtools.egg-info/ build/ MANIFEST.in
make -C docs clean make -C docs clean
gitclean:: gitclean::
git clean -dXf git clean -dXf
.PHONY: all version dist sdist install docs lint test clean gitclean .PHONY: all version dist sdist install docs test
.PHONY: ctrl lint flake clean gitclean

32
README.md Normal file
View File

@ -0,0 +1,32 @@
# nilmrun: Run NilmDB filters
by Jim Paris <jim@jtan.com>
## Prerequisites:
# Runtime and build environments
sudo apt-get install python3
# Create a new Python virtual environment to isolate deps.
python3 -m venv ../venv
source ../venv/bin/activate # run "deactivate" to leave
# Install all Python dependencies
pip3 install -r requirements.txt
## Install:
Install it into the virtual environment
python3 setup.py install
If you want to instead install it system-wide, you will also need to
install the requirements system-wide:
sudo pip3 install -r requirements.txt
sudo python3 setup.py install
## Usage:
nilmrun-server --help
See docs/wsgi.md for info on setting up a WSGI application in Apache.

View File

@ -1,22 +0,0 @@
nilmrun: Run NilmDB filters
by Jim Paris <jim@jtan.com>
Prerequisites:
# Runtime and build environments
sudo apt-get install python2.7 python-setuptools
# Base dependencies
sudo apt-get install python-numpy python-scipy
nilmdb (1.8.0+)
Install:
python setup.py install
Usage:
nilmrun-server --help
See docs/wsgi.md for info on setting up a WSGI application in Apache.

View File

@ -21,13 +21,13 @@ arbitrary commands.
SSLEngine On SSLEngine On
WSGIScriptAlias /nilmrun /home/nilm/nilmrun.wsgi WSGIScriptAlias /nilmrun /home/nilm/nilmrun.wsgi
WSGIApplicationGroup nilmrun-appgroup
WSGIProcessGroup nilmrun-procgroup
WSGIDaemonProcess nilmrun-procgroup threads=32 user=nilm group=nilm WSGIDaemonProcess nilmrun-procgroup threads=32 user=nilm group=nilm
# Access control example:
<Location /nilmrun> <Location /nilmrun>
WSGIProcessGroup nilmrun-procgroup
WSGIApplicationGroup nilmrun-appgroup
SSLRequireSSL SSLRequireSSL
# Access control example:
Order deny,allow Order deny,allow
Deny from all Deny from all
Allow from 1.2.3.4 Allow from 1.2.3.4

View File

@ -1,5 +1,3 @@
import nilmrun.processmanager
from ._version import get_versions from ._version import get_versions
__version__ = get_versions()['version'] __version__ = get_versions()['version']
del get_versions del get_versions

View File

@ -1,197 +1,520 @@
IN_LONG_VERSION_PY = True
# This file helps to compute a version number in source trees obtained from # This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag # git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (build by setup.py sdist) and build # feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file # directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number. # that just contains the computed version number.
# This file is released into the public domain. Generated by # This file is released into the public domain. Generated by
# versioneer-0.7+ (https://github.com/warner/python-versioneer) # versioneer-0.18 (https://github.com/warner/python-versioneer)
# these strings will be replaced by git during git-archive
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess import subprocess
import sys import sys
def run_command(args, cwd=None, verbose=False):
try: def get_keywords():
# remember shell=False, so use git.cmd on windows, not just git """Get the keywords needed to look up the version information."""
p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd) # these strings will be replaced by git during git-archive.
except EnvironmentError: # setup.py/versioneer.py will grep for the variable names, so they must
e = sys.exc_info()[1] # each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = "nilmrun-"
cfg.parentdir_prefix = "nilmrun-"
cfg.versionfile_source = "nilmrun/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose: if verbose:
print("unable to run %s" % args[0]) print("unable to find command, tried %s" % (commands,))
print(e) return None, None
return None
stdout = p.communicate()[0].strip() stdout = p.communicate()[0].strip()
if sys.version >= '3': if sys.version_info[0] >= 3:
stdout = stdout.decode() stdout = stdout.decode()
if p.returncode != 0: if p.returncode != 0:
if verbose: if verbose:
print("unable to run %s (error)" % args[0]) print("unable to run %s (error)" % dispcmd)
return None print("stdout was %s" % stdout)
return stdout return None, p.returncode
return stdout, p.returncode
import sys def versions_from_parentdir(parentdir_prefix, root, verbose):
import re """Try to determine the version from the parent directory name.
import os.path
def get_expanded_variables(versionfile_source): Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these # the code embedded in _version.py can just fetch the value of these
# variables. When used from setup.py, we don't want to import # keywords. When used from setup.py, we don't want to import _version.py,
# _version.py, so we do it with a regexp instead. This function is not # so we do it with a regexp instead. This function is not used from
# used from _version.py. # _version.py.
variables = {} keywords = {}
try: try:
for line in open(versionfile_source,"r").readlines(): f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="): if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line) mo = re.search(r'=\s*"(.*)"', line)
if mo: if mo:
variables["refnames"] = mo.group(1) keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="): if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line) mo = re.search(r'=\s*"(.*)"', line)
if mo: if mo:
variables["full"] = mo.group(1) keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError: except EnvironmentError:
pass pass
return variables return keywords
def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
refnames = variables["refnames"].strip() @register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"): if refnames.startswith("$Format"):
if verbose: if verbose:
print("variables are unexpanded, not using") print("keywords are unexpanded, not using")
return {} # unexpanded, so not in an unpacked git-archive tarball raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")]) refs = set([r.strip() for r in refnames.strip("()").split(",")])
for ref in list(refs): # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
if not re.search(r'\d', ref): # just "foo-1.0". If we see a "tag: " prefix, prefer those.
if verbose: TAG = "tag: "
print("discarding '%s', no digits" % ref) tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
refs.discard(ref) if not tags:
# Assume all version tags have a digit. git's %d expansion # Either we're using git < 1.8.3, or there really are no tags. We use
# behaves like git log --decorate=short and strips out the # a heuristic: assume all version tags have a digit. The old git %d
# refs/heads/ and refs/tags/ prefixes that would let us # expansion behaves like git log --decorate=short and strips out the
# distinguish between branches and tags. By ignoring refnames # refs/heads/ and refs/tags/ prefixes that would let us distinguish
# without digits, we filter out many common branch names like # between branches and tags. By ignoring refnames without digits, we
# "release" and "stabilization", as well as "HEAD" and "master". # filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose: if verbose:
print("remaining refs: %s" % ",".join(sorted(refs))) print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(refs): for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1" # sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix): if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):] r = ref[len(tag_prefix):]
if verbose: if verbose:
print("picking %s" % r) print("picking %s" % r)
return { "version": r, return {"version": r,
"full": variables["full"].strip() } "full-revisionid": keywords["full"].strip(),
# no suitable tags, so we use the full revision id "dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose: if verbose:
print("no suitable tags, using full revision id") print("no suitable tags, using unknown + full revision id")
return { "version": variables["full"].strip(), return {"version": "0+unknown",
"full": variables["full"].strip() } "full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
# this runs 'git' from the root of the source tree. That either means @register_vcs_handler("git", "pieces_from_vcs")
# someone ran a setup.py command (and this code is in versioneer.py, so def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# IN_LONG_VERSION_PY=False, thus the containing directory is the root of """Get version from 'git describe' in the root of the source tree.
# the source tree), or someone ran a project-specific entry point (and
# this code is in _version.py, so IN_LONG_VERSION_PY=True, thus the This only gets called if the git-archive 'subst' keywords were *not*
# containing directory is somewhere deeper in the source tree). This only expanded, and _version.py hasn't already been rewritten with a short
# gets called if the git-archive 'subst' variables were *not* expanded, version string, meaning we're inside a checked out source tree.
# and _version.py hasn't already been rewritten with a short version """
# string, meaning we're inside a checked out source tree. GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try: try:
here = os.path.abspath(__file__) return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
except NameError: verbose)
# some py2exe/bbfreeze/non-CPython implementations don't do __file__ except NotThisMethod:
return {} # not always correct pass
# versionfile_source is the relative path from the top of the source tree try:
# (where the .git directory might live) to this file. Invert this to find root = os.path.realpath(__file__)
# the root from __file__.
root = here
if IN_LONG_VERSION_PY:
for i in range(len(versionfile_source.split("/"))):
root = os.path.dirname(root)
else:
root = os.path.dirname(here)
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
return {}
GIT = "git"
if sys.platform == "win32":
GIT = "git.cmd"
stdout = run_command([GIT, "describe", "--tags", "--dirty", "--always"],
cwd=root)
if stdout is None:
return {}
if not stdout.startswith(tag_prefix):
if verbose:
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
return {}
tag = stdout[len(tag_prefix):]
stdout = run_command([GIT, "rev-parse", "HEAD"], cwd=root)
if stdout is None:
return {}
full = stdout.strip()
if tag.endswith("-dirty"):
full += "-dirty"
return {"version": tag, "full": full}
def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False):
if IN_LONG_VERSION_PY:
# We're running from _version.py. If it's from a source tree
# (execute-in-place), we can work upwards to find the root of the
# tree, and then check the parent directory for a version string. If
# it's in an installed application, there's no hope.
try:
here = os.path.abspath(__file__)
except NameError:
# py2exe/bbfreeze/non-CPython don't have __file__
return {} # without __file__, we have no hope
# versionfile_source is the relative path from the top of the source # versionfile_source is the relative path from the top of the source
# tree to _version.py. Invert this to find the root from __file__. # tree (where the .git directory might live) to this file. Invert
root = here # this to find the root from __file__.
for i in range(len(versionfile_source.split("/"))): for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root) root = os.path.dirname(root)
else: except NameError:
# we're running from versioneer.py, which means we're running from return {"version": "0+unknown", "full-revisionid": None,
# the setup.py in a source tree. sys.argv[0] is setup.py in the root. "dirty": None,
here = os.path.abspath(sys.argv[0]) "error": "unable to find root of source tree",
root = os.path.dirname(here) "date": None}
# Source tarballs conventionally unpack into a directory that includes try:
# both the project name and a version string. pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
dirname = os.path.basename(root) return render(pieces, cfg.style)
if not dirname.startswith(parentdir_prefix): except NotThisMethod:
if verbose: pass
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
(root, dirname, parentdir_prefix))
return None
return {"version": dirname[len(parentdir_prefix):], "full": ""}
tag_prefix = "nilmrun-" try:
parentdir_prefix = "nilmrun-" if cfg.parentdir_prefix:
versionfile_source = "nilmrun/_version.py" return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
def get_versions(default={"version": "unknown", "full": ""}, verbose=False): pass
variables = { "refnames": git_refnames, "full": git_full }
ver = versions_from_expanded_variables(variables, tag_prefix, verbose)
if not ver:
ver = versions_from_vcs(tag_prefix, versionfile_source, verbose)
if not ver:
ver = versions_from_parentdir(parentdir_prefix, versionfile_source,
verbose)
if not ver:
ver = default
return ver
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}

View File

@ -1 +0,0 @@
# Filters

View File

@ -1,22 +0,0 @@
#!/usr/bin/python
from nilmdb.utils.printf import *
import time
import signal
import sys
# This is just for testing the process management.
def filterfunc(n):
n = int(n)
if n < 0: # raise an exception
raise Exception("test exception")
if n == 0: # ignore SIGTERM and count to 100
n = 100
signal.signal(signal.SIGTERM, signal.SIG_IGN)
for x in range(n):
s = sprintf("dummy %d\n", x)
if x & 1:
sys.stdout.write(s)
else:
sys.stderr.write(s)
time.sleep(0.1)

View File

@ -1,260 +0,0 @@
#!/usr/bin/python
from nilmdb.utils.printf import *
import nilmdb.client
import nilmtools.filter
from nilmdb.utils.time import (timestamp_to_human,
timestamp_to_seconds,
seconds_to_timestamp)
from nilmdb.utils.interval import Interval
import numpy as np
import scipy
import scipy.signal
from numpy.core.umath_tests import inner1d
import nilmrun
from collections import OrderedDict
class DataError(ValueError):
pass
class Data(object):
def __init__(self, name, url, stream, start, end, columns):
"""Initialize, get stream info, check columns"""
self.name = name
self.url = url
self.stream = stream
self.start = start
self.end = end
# Get stream info
self.client = nilmdb.client.numpyclient.NumpyClient(url)
self.info = nilmtools.filter.get_stream_info(self.client, stream)
# Build up name => index mapping for the columns
self.columns = OrderedDict()
for c in columns:
if (c['name'] in self.columns.keys() or
c['index'] in self.columns.values()):
raise DataError("duplicated columns")
if (c['index'] < 0 or c['index'] >= self.info.layout_count):
raise DataError("bad column number")
self.columns[c['name']] = c['index']
if not len(self.columns):
raise DataError("no columns")
# Count points
self.count = self.client.stream_count(self.stream, self.start, self.end)
def __str__(self):
return sprintf("%-20s: %s%s, %s rows",
self.name, self.stream, str(self.columns.keys()),
self.count)
def fetch(self, min_rows = 10, max_rows = 100000):
"""Fetch all the data into self.data. This is intended for
exemplars, and can only handle a relatively small number of
rows"""
# Verify count
if self.count == 0:
raise DataError("No data in this exemplar!")
if self.count < min_rows:
raise DataError("Too few data points: " + str(self.count))
if self.count > max_rows:
raise DataError("Too many data points: " + str(self.count))
# Extract the data
datagen = self.client.stream_extract_numpy(self.stream,
self.start, self.end,
self.info.layout,
maxrows = self.count)
self.data = list(datagen)[0]
# Discard timestamp
self.data = self.data[:,1:]
# Subtract the mean from each column
self.data = self.data - self.data.mean(axis=0)
# Get scale factors for each column by computing dot product
# of each column with itself.
self.scale = inner1d(self.data.T, self.data.T)
# Ensure a minimum (nonzero) scale and convert to list
self.scale = np.maximum(self.scale, [1e-9]).tolist()
def process(main, function, args = None, rows = 200000):
"""Process through the data; similar to nilmtools.Filter.process_numpy"""
if args is None:
args = []
extractor = main.client.stream_extract_numpy
old_array = np.array([])
for new_array in extractor(main.stream, main.start, main.end,
layout = main.info.layout, maxrows = rows):
# If we still had old data left, combine it
if old_array.shape[0] != 0:
array = np.vstack((old_array, new_array))
else:
array = new_array
# Process it
processed = function(array, args)
# Save the unprocessed parts
if processed >= 0:
old_array = array[processed:]
else:
raise Exception(sprintf("%s return value %s must be >= 0",
str(function), str(processed)))
# Warn if there's too much data remaining
if old_array.shape[0] > 3 * rows:
printf("warning: %d unprocessed rows in buffer\n",
old_array.shape[0])
# Handle leftover data
if old_array.shape[0] != 0:
processed = function(array, args)
def peak_detect(data, delta):
"""Simple min/max peak detection algorithm, taken from my code
in the disagg.m from the 10-8-5 paper"""
mins = [];
maxs = [];
cur_min = (None, np.inf)
cur_max = (None, -np.inf)
lookformax = False
for (n, p) in enumerate(data):
if p > cur_max[1]:
cur_max = (n, p)
if p < cur_min[1]:
cur_min = (n, p)
if lookformax:
if p < (cur_max[1] - delta):
maxs.append(cur_max)
cur_min = (n, p)
lookformax = False
else:
if p > (cur_min[1] + delta):
mins.append(cur_min)
cur_max = (n, p)
lookformax = True
return (mins, maxs)
def match(data, args):
"""Perform cross-correlation match"""
( columns, exemplars ) = args
nrows = data.shape[0]
# We want at least 10% more points than the widest exemplar.
widest = max([ x.count for x in exemplars ])
if (widest * 1.1) > nrows:
return 0
# This is how many points we'll consider valid in the
# cross-correlation.
valid = nrows + 1 - widest
matches = []
# Try matching against each of the exemplars
for e_num, e in enumerate(exemplars):
corrs = []
# Compute cross-correlation for each column
for c in e.columns:
a = data[:,columns[c] + 1]
b = e.data[:,e.columns[c]]
corr = scipy.signal.fftconvolve(a, np.flipud(b), 'valid')[0:valid]
# Scale by the norm of the exemplar
corr = corr / e.scale[columns[c]]
corrs.append(corr)
# Find the peaks using the column with the largest amplitude
biggest = e.scale.index(max(e.scale))
peaks_minmax = peak_detect(corrs[biggest], 0.1)
peaks = [ p[0] for p in peaks_minmax[1] ]
# Now look at every peak
for p in peaks:
# Correlation for each column must be close enough to 1.
for (corr, scale) in zip(corrs, e.scale):
# The accepted distance from 1 is based on the relative
# amplitude of the column. Use a linear mapping:
# scale 1.0 -> distance 0.1
# scale 0.0 -> distance 1.0
distance = 1 - 0.9 * (scale / e.scale[biggest])
if abs(corr[p] - 1) > distance:
# No match
break
else:
# Successful match
matches.append((p, e_num))
# Print matches
for (point, e_num) in sorted(matches):
# Ignore matches that showed up at the very tail of the window,
# and shorten the window accordingly. This is an attempt to avoid
# problems at chunk boundaries.
if point > (valid - 50):
valid -= 50
break
print "matched", data[point,0], "exemplar", exemplars[e_num].name
#from matplotlib import pyplot as p
#p.plot(data[:,1:3])
#p.show()
return max(valid, 0)
def trainola(conf):
# Load main stream data
print "Loading stream data"
main = Data(None, conf['url'], conf['stream'],
conf['start'], conf['end'], conf['columns'])
# Pull in the exemplar data
exemplars = []
for n, e in enumerate(conf['exemplars']):
print sprintf("Loading exemplar %d: %s", n, e['name'])
ex = Data(e['name'], e['url'], e['stream'],
e['start'], e['end'], e['columns'])
ex.fetch()
exemplars.append(ex)
# Verify that the exemplar columns are all represented in the main data
for n, ex in enumerate(exemplars):
for col in ex.columns:
if col not in main.columns:
raise DataError(sprintf("Exemplar %d column %s is not "
"available in main data", n, col))
# Process the main data
process(main, match, (main.columns, exemplars))
return "done"
filterfunc = trainola
def main(argv = None):
import simplejson as json
import argparse
import sys
parser = argparse.ArgumentParser(
formatter_class = argparse.RawDescriptionHelpFormatter,
version = nilmrun.__version__,
description = """Run Trainola using parameters passed in as
JSON-formatted data.""")
parser.add_argument("file", metavar="FILE", nargs="?",
type=argparse.FileType('r'), default=sys.stdin)
args = parser.parse_args(argv)
conf = json.loads(args.file.read())
result = trainola(conf)
print json.dumps(result, sort_keys = True, indent = 2 * ' ')
if __name__ == "__main__":
main()

View File

@ -1,26 +1,30 @@
#!/usr/bin/python #!/usr/bin/env python3
from nilmdb.utils.printf import *
import threading import threading
import multiprocessing import subprocess
import cStringIO import io
import sys import sys
import os import os
import signal import signal
import time import time
import uuid import uuid
import psutil import psutil
import imp import tempfile
import traceback import atexit
import shutil
class ProcessError(Exception):
pass
class LogReceiver(object): class LogReceiver(object):
"""Spawn a thread that listens to a pipe for log messages, """Spawn a thread that listens to a pipe for log messages,
and stores them locally.""" and stores them locally."""
def __init__(self, pipe): def __init__(self, pipe):
self.pipe = pipe self.pipe = pipe
self.log = cStringIO.StringIO() self.log = io.BytesIO()
self.thread = threading.Thread(target = self.run) self.thread = threading.Thread(target=self.run)
self.thread.start() self.thread.start()
def run(self): def run(self):
@ -35,110 +39,90 @@ class LogReceiver(object):
return self.log.getvalue() return self.log.getvalue()
def clear(self): def clear(self):
self.log = cStringIO.StringIO() self.log = io.BytesIO()
class Process(object): class Process(object):
"""Spawn and manage a process that calls a Python function""" """Spawn and manage a subprocess, and capture its output."""
def __init__(self, name, function, parameters): def __init__(self, argv, tempfile=None):
self.parameters = parameters
self.start_time = None self.start_time = None
self.name = name
# Use a pipe for communicating log data # Use a pipe for communicating log data
(rpipe, wpipe) = os.pipe() (rpipe, wpipe) = os.pipe()
self._log = LogReceiver(rpipe) self._log = LogReceiver(rpipe)
# Start the function in a new process # Stdin is null
self._process = multiprocessing.Process( nullfd = os.open(os.devnull, os.O_RDONLY)
target = self._trampoline, name = name,
args = (function, rpipe, wpipe, parameters))
self._process.daemon = True
self._process.start()
# Close the writer end of the pipe, get process info # Spawn the new process
os.close(wpipe) try:
self._process = subprocess.Popen(args=argv, stdin=nullfd,
stdout=wpipe, stderr=wpipe,
close_fds=True, cwd="/tmp")
except (OSError, TypeError) as e:
raise ProcessError(str(e))
finally:
# Close the FDs we don't need
os.close(wpipe)
os.close(nullfd)
# Get process info
self.start_time = time.time() self.start_time = time.time()
self.pid = str(uuid.uuid1(self._process.pid or 0)) self.pid = str(uuid.uuid1(self._process.pid or 0))
def _trampoline(self, func, rpipe, wpipe, param): # pragma: no cover def _join(self, timeout=1.0):
# No coverage report for this, because it's executed in a subprocess start = time.time()
"""Trampoline function to set up stdio and call the real function.""" while True:
# Close the reader end of the pipe if self._process.poll() is not None:
os.close(rpipe) return True
if (time.time() - start) >= timeout:
return False
time.sleep(0.1)
# Like os.close() but ignores errors def terminate(self, timeout=1.0):
def tryclose(fd):
try:
os.close(fd)
except OSError:
pass
# Remap stdio to go to the pipe. We do this at the OS level,
# replacing FDs, so that future spawned processes do the right thing.
# stdin
sys.stdin.close()
tryclose(0)
fd = os.open(os.devnull, os.O_RDONLY) # 0
sys.stdin = os.fdopen(fd, 'r', 0)
# stdout
sys.stdout.close()
tryclose(1)
fd = os.dup(wpipe) # 1
sys.stdout = os.fdopen(fd, 'w', 0)
# stdout
sys.stderr.close()
tryclose(2)
fd = os.dup(wpipe) # 2
sys.stderr = os.fdopen(fd, 'w', 0)
# Don't need this extra fd
os.close(wpipe)
# Ready to go -- call the function, exit when it's done
func(param)
sys.exit(0)
def terminate(self, timeout = 1.0):
"""Terminate a process, and all of its children that are in the same """Terminate a process, and all of its children that are in the same
process group.""" process group."""
# First give it some time to die on its own try:
self._process.join(timeout) # First give it some time to die on its own
if not self.alive: if self._join(timeout):
return True
def getpgid(pid):
try:
return os.getpgid(pid)
except OSError: # pragma: no cover
return None
def kill(pid, sig):
try:
return os.kill(pid, sig)
except OSError: # pragma: no cover
return
# Find all children
group = getpgid(self._process.pid)
main = psutil.Process(self._process.pid)
allproc = [main] + main.children(recursive=True)
# Kill with SIGTERM, if they're still in this process group
for proc in allproc:
if getpgid(proc.pid) == group:
kill(proc.pid, signal.SIGTERM)
# Wait for it to die again
if self._join(timeout):
return True
# One more try with SIGKILL
for proc in allproc:
if getpgid(proc.pid) == group:
kill(proc.pid, signal.SIGKILL)
# See if it worked
return self._join(timeout)
except psutil.Error: # pragma: no cover (race condition)
return True return True
def getpgid(pid):
try:
return os.getpgid(pid)
except OSError: # pragma: no cover
return None
# Find all children
group = getpgid(self._process.pid)
main = psutil.Process(self._process.pid)
allproc = [ main ] + main.get_children(recursive = True)
# Kill with SIGTERM, if they're still in this process group
for proc in allproc:
if getpgid(proc.pid) == group:
os.kill(proc.pid, signal.SIGTERM)
# Wait for it to die again
self._process.join(timeout)
if not self.alive:
return True
# One more try with SIGKILL
for proc in allproc:
if getpgid(proc.pid) == group:
os.kill(proc.pid, signal.SIGKILL)
# See if it worked
self._process.join(timeout)
return not self.alive
def clear_log(self): def clear_log(self):
self._log.clear() self._log.clear()
@ -148,95 +132,163 @@ class Process(object):
@property @property
def alive(self): def alive(self):
return self._process.is_alive() return self._process.poll() is None
@property @property
def exitcode(self): def exitcode(self):
return self._process.exitcode return self._process.returncode
def _exec_user_code(codeargs): # pragma: no cover (runs in subprocess) def get_info_prepare(self):
"""Execute 'code' as if it were placed into a file and executed""" """Prepare the process list and measurement for .get_info.
(code, args) = codeargs Call .get_info() about a second later."""
# This is split off into a separate function because the Python3 try:
# syntax of "exec" triggers a SyntaxError in Python2, if it's within main = psutil.Process(self._process.pid)
# a nested function. self._process_list = [main] + main.children(recursive=True)
imp.acquire_lock() for proc in self._process_list:
try: proc.cpu_percent(0)
module = imp.new_module("__main__") except psutil.Error: # pragma: no cover (race condition)
finally: self._process_list = []
imp.release_lock()
module.__file__ = "<user-code>"
sys.argv = [''] + args
# Wrap the compile and exec in a try/except so we can format the
# exception more nicely.
try:
codeobj = compile(code, '<user-code>', 'exec',
flags = 0, dont_inherit = 1)
exec(codeobj, module.__dict__, {})
except:
# Pull out the exception
info = sys.exc_info()
tblist = traceback.extract_tb(info[2])
# First entry is probably this code; get rid of it @staticmethod
if len(tblist) and tblist[0][2] == '_exec_user_code': def get_empty_info():
tblist = tblist[1:] return {"cpu_percent": 0,
"cpu_user": 0,
"cpu_sys": 0,
"mem_phys": 0,
"mem_virt": 0,
"io_read": 0,
"io_write": 0,
"procs": 0}
# Add the user's source code to every line that's missing it def get_info(self):
lines = code.splitlines() """Return a dictionary with info about the process CPU and memory
for (n, (name, line, func, text)) in enumerate(tblist): usage. Call .get_info_prepare() about a second before this."""
if name == '<user-code>' and text is None and line <= len(lines): d = self.get_empty_info()
tblist[n] = (name, line, func, lines[line-1].strip()) for proc in self._process_list:
try:
d["cpu_percent"] += proc.cpu_percent(0)
cpuinfo = proc.cpu_times()
d["cpu_user"] += cpuinfo.user
d["cpu_sys"] += cpuinfo.system
meminfo = proc.memory_info()
d["mem_phys"] += meminfo.rss
d["mem_virt"] += meminfo.vms
ioinfo = proc.io_counters()
d["io_read"] += ioinfo.read_bytes
d["io_write"] += ioinfo.write_bytes
d["procs"] += 1
except psutil.Error:
pass
return d
# Print it to stderr in the usual format
out = ['Traceback (most recent call last):\n']
out.extend(traceback.format_list(tblist))
out.extend(traceback.format_exception_only(info[0], info[1]))
sys.stderr.write("".join(out))
sys.stderr.flush()
sys.exit(1)
sys.exit(0)
class ProcessManager(object): class ProcessManager(object):
"""Track and manage a collection of Process objects""" """Track and manage a collection of Process objects"""
def __init__(self): def __init__(self):
self.processes = {} self.processes = {}
self.tmpdirs = {}
atexit.register(self._atexit)
def _cleanup_tmpdir(self, pid):
if pid in self.tmpdirs:
try:
shutil.rmtree(self.tmpdirs[pid])
except OSError: # pragma: no cover
pass
del self.tmpdirs[pid]
def _atexit(self):
# Kill remaining processes, remove their dirs
for pid in list(self.processes.keys()):
try:
self.processes[pid].terminate()
del self.processes[pid]
shutil.rmtree(self.tmpdirs[pid])
del self.tmpdirs[pid]
except Exception: # pragma: no cover
pass
def __iter__(self): def __iter__(self):
return iter(self.processes.keys()) return iter(list(self.processes.keys()))
def __getitem__(self, key): def __getitem__(self, key):
return self.processes[key] return self.processes[key]
def run_function(self, procname, function, parameters): def run_code(self, code, args):
"""Run a Python function that already exists""" """Evaluate 'code' as if it were placed into a Python file and
new = Process(procname, function, parameters) executed. The arguments, which must be strings, will be
accessible in the code as sys.argv[1:]."""
# The easiest way to do this, by far, is to just write the
# code to a file. Make a directory to put it in.
tmpdir = tempfile.mkdtemp(prefix="nilmrun-usercode-")
try:
# Write the code
codepath = os.path.join(tmpdir, "usercode.py")
with open(codepath, "w") as f:
f.write(code)
# Save the args too, for debugging purposes
with open(os.path.join(tmpdir, "args.txt"), "w") as f:
f.write(repr(args))
# Run the code
argv = [sys.executable, "-B", "-s", "-u", codepath] + args
pid = self.run_command(argv)
# Save the temp dir
self.tmpdirs[pid] = tmpdir
tmpdir = None # Don't need to remove it anymore
return pid
finally:
# Clean up tempdir if we didn't finish
if tmpdir is not None:
try:
shutil.rmtree(tmpdir)
except OSError: # pragma: no cover
pass
def run_command(self, argv):
"""Execute a command line program"""
new = Process(argv)
self.processes[new.pid] = new self.processes[new.pid] = new
return new.pid return new.pid
def run_code(self, procname, code, args):
"""Evaluate 'code' as if it were placed into a Python file and
executed. The arguments will be accessible in the code as
sys.argv[1:]."""
return self.run_function(procname, _exec_user_code, (code, args))
def run_command(self, procname, argv):
"""Execute a command line program"""
def spwan_user_command(argv): # pragma: no cover (runs in subprocess)
try:
maxfd = os.sysconf("SC_OPEN_MAX")
except Exception:
maxfd = 256
os.closerange(3, maxfd)
try:
os.chdir("/tmp")
except OSError:
pass
os.execvp(argv[0], argv)
return self.run_function(procname, spwan_user_command, argv)
def terminate(self, pid): def terminate(self, pid):
return self.processes[pid].terminate() return self.processes[pid].terminate()
def remove(self, pid): def remove(self, pid):
self._cleanup_tmpdir(pid)
del self.processes[pid] del self.processes[pid]
def get_info(self):
"""Get info about all running PIDs"""
info = {
"total": Process.get_empty_info(),
"pids": {},
"system": {}
}
# Trigger CPU usage collection
for pid in self:
self[pid].get_info_prepare()
psutil.cpu_percent(0, percpu=True)
# Give it some time
time.sleep(1)
# Retrieve info for system
info["system"]["cpu_percent"] = sum(psutil.cpu_percent(0, percpu=True))
info["system"]["cpu_max"] = 100.0 * psutil.cpu_count()
info["system"]["procs"] = len(psutil.pids())
meminfo = psutil.virtual_memory()
info["system"]["mem_total"] = meminfo.total
info["system"]["mem_used"] = meminfo.used
# Retrieve info for each PID
for pid in self:
info["pids"][pid] = self[pid].get_info()
# Update totals
for key in info["total"]:
info["total"][key] += info["pids"][pid][key]
return info

View File

@ -1,36 +1,28 @@
"""CherryPy-based server for running NILM filters via HTTP""" """CherryPy-based server for running NILM filters via HTTP"""
import cherrypy import cherrypy
import sys
import os import os
import socket import socket
import simplejson as json
import decorator
import psutil
import traceback import traceback
import argparse
import time
import nilmdb from nilmdb.utils.printf import sprintf
from nilmdb.utils.printf import *
from nilmdb.server.serverutil import ( from nilmdb.server.serverutil import (
chunked_response,
response_type,
workaround_cp_bug_1200,
exception_to_httperror, exception_to_httperror,
CORS_allow, CORS_allow,
json_to_request_params, json_to_request_params,
json_error_page, json_error_page,
cherrypy_start, cherrypy_start,
cherrypy_stop, cherrypy_stop,
bool_param,
) )
from nilmdb.utils import serializer_proxy
import nilmrun import nilmrun
import nilmrun.filters.trainola import nilmrun.processmanager
import nilmrun.filters.dummy
# Add CORS_allow tool # Add CORS_allow tool
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow) cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
# CherryPy apps # CherryPy apps
class App(object): class App(object):
"""Root application for NILM runner""" """Root application for NILM runner"""
@ -57,28 +49,32 @@ class App(object):
def version(self): def version(self):
return nilmrun.__version__ return nilmrun.__version__
class AppProcess(object): class AppProcess(object):
def __init__(self, manager): def __init__(self, manager):
self.manager = manager self.manager = manager
def process_status(self, pid): def process_status(self, pid):
# We need to convert the log (which is bytes) to Unicode
# characters, in order to send it via JSON. Treat it as UTF-8
# but replace invalid characters with markers.
log = self.manager[pid].log.decode('utf-8', errors='replace')
return { return {
"pid": pid, "pid": pid,
"alive": self.manager[pid].alive, "alive": self.manager[pid].alive,
"exitcode": self.manager[pid].exitcode, "exitcode": self.manager[pid].exitcode,
"name": self.manager[pid].name,
"start_time": self.manager[pid].start_time, "start_time": self.manager[pid].start_time,
"parameters": self.manager[pid].parameters, "log": log
"log": self.manager[pid].log,
} }
# /process/status # /process/status
@cherrypy.expose @cherrypy.expose
@cherrypy.tools.json_out() @cherrypy.tools.json_out()
def status(self, pid, clear = False): def status(self, pid, clear=False):
"""Return status about a process. If clear = True, also clear """Return status about a process. If clear = True, also clear
the log.""" the log."""
clear = bool_param(clear)
if pid not in self.manager: if pid not in self.manager:
raise cherrypy.HTTPError("404 Not Found", "No such PID") raise cherrypy.HTTPError("404 Not Found", "No such PID")
status = self.process_status(pid) status = self.process_status(pid)
@ -93,22 +89,31 @@ class AppProcess(object):
"""Return a list of processes in the manager.""" """Return a list of processes in the manager."""
return list(self.manager) return list(self.manager)
# /process/info
@cherrypy.expose
@cherrypy.tools.json_out()
def info(self):
"""Return detailed CPU and memory info about the system and
all processes"""
return self.manager.get_info()
# /process/remove # /process/remove
@cherrypy.expose @cherrypy.expose
@cherrypy.tools.json_in() @cherrypy.tools.json_in()
@cherrypy.tools.json_out() @cherrypy.tools.json_out()
@cherrypy.tools.CORS_allow(methods = ["POST"]) @cherrypy.tools.CORS_allow(methods=["POST"])
def remove(self, pid): def remove(self, pid):
"""Remove a process from the manager, killing it if necessary.""" """Remove a process from the manager, killing it if necessary."""
if pid not in self.manager: if pid not in self.manager:
raise cherrypy.HTTPError("404 Not Found", "No such PID") raise cherrypy.HTTPError("404 Not Found", "No such PID")
if not self.manager.terminate(pid): # pragma: no cover if not self.manager.terminate(pid): # pragma: no cover
raise cherrypy.HTTPError("503 Service Unavailable", raise cherrypy.HTTPError("503 Service Unavailable",
"Failed to stop process") "Failed to stop process")
status = self.process_status(pid) status = self.process_status(pid)
self.manager.remove(pid) self.manager.remove(pid)
return status return status
class AppRun(object): class AppRun(object):
def __init__(self, manager): def __init__(self, manager):
self.manager = manager self.manager = manager
@ -117,61 +122,51 @@ class AppRun(object):
@cherrypy.expose @cherrypy.expose
@cherrypy.tools.json_in() @cherrypy.tools.json_in()
@cherrypy.tools.json_out() @cherrypy.tools.json_out()
@cherrypy.tools.CORS_allow(methods = ["POST"]) @exception_to_httperror(nilmrun.processmanager.ProcessError)
@cherrypy.tools.CORS_allow(methods=["POST"])
def command(self, argv): def command(self, argv):
"""Execute an arbitrary program on the server. argv is a """Execute an arbitrary program on the server. argv is a
list of the program and its arguments: 'argv[0]' is the program list of the program and its arguments: 'argv[0]' is the program
and 'argv[1:]' are arguments""" and 'argv[1:]' are arguments"""
return self.manager.run_command("command", argv) if not isinstance(argv, list):
raise cherrypy.HTTPError("400 Bad Request",
"argv must be a list of strings")
return self.manager.run_command(argv)
# /run/code
@cherrypy.expose @cherrypy.expose
@cherrypy.tools.json_in() @cherrypy.tools.json_in()
@cherrypy.tools.json_out() @cherrypy.tools.json_out()
@cherrypy.tools.CORS_allow(methods = ["POST"]) @exception_to_httperror(nilmrun.processmanager.ProcessError)
def code(self, code, args): @cherrypy.tools.CORS_allow(methods=["POST"])
def code(self, code, args=None):
"""Execute arbitrary Python code. 'code' is a formatted string. """Execute arbitrary Python code. 'code' is a formatted string.
It will be run as if it were written into a Python file and It will be run as if it were written into a Python file and
executed, with the arguments in 'args' passed on the command line executed. 'args' is a list of strings, and they are passed
(i.e., they end up in sys.argv[1:])""" on the command line as additional arguments (i.e., they end up
return self.manager.run_code("usercode", code, args) in sys.argv[1:])"""
if args is None:
args = []
if not isinstance(args, list):
raise cherrypy.HTTPError("400 Bad Request",
"args must be a list of strings")
return self.manager.run_code(code, args)
# /run/trainola
@cherrypy.expose
@cherrypy.tools.json_in()
@cherrypy.tools.json_out()
@exception_to_httperror(KeyError, ValueError)
@cherrypy.tools.CORS_allow(methods = ["POST"])
def trainola(self, data):
return self.manager.run_function(
"trainola", nilmrun.filters.trainola.filterfunc, data)
# /run/dummy
@cherrypy.expose
@cherrypy.tools.json_in()
@cherrypy.tools.json_out()
@exception_to_httperror(KeyError, ValueError)
@cherrypy.tools.CORS_allow(methods = ["POST"])
def dummy(self, data):
return self.manager.run_function(
"dummy", nilmrun.filters.dummy.filterfunc, data)
class Server(object): class Server(object):
def __init__(self, host = '127.0.0.1', port = 8080, def __init__(self, host='127.0.0.1', port=8080,
embedded = True, # hide diagnostics and output, etc force_traceback=False, # include traceback in all errors
force_traceback = False, # include traceback in all errors basepath='', # base URL path for cherrypy.tree
basepath = '', # base URL path for cherrypy.tree
): ):
self.embedded = embedded
# Build up global server configuration # Build up global server configuration
cherrypy.config.update({ cherrypy.config.update({
'environment': 'embedded',
'server.socket_host': host, 'server.socket_host': host,
'server.socket_port': port, 'server.socket_port': port,
'engine.autoreload_on': False, 'engine.autoreload_on': False,
'server.max_request_body_size': 8*1024*1024, 'server.max_request_body_size': 8*1024*1024,
}) })
if self.embedded:
cherrypy.config.update({ 'environment': 'embedded' })
# Build up application specific configuration # Build up application specific configuration
app_config = {} app_config = {}
@ -180,23 +175,25 @@ class Server(object):
}) })
# Some default headers to just help identify that things are working # Some default headers to just help identify that things are working
app_config.update({ 'response.headers.X-Jim-Is-Awesome': 'yeah' }) app_config.update({'response.headers.X-Jim-Is-Awesome': 'yeah'})
# Set up Cross-Origin Resource Sharing (CORS) handler so we # Set up Cross-Origin Resource Sharing (CORS) handler so we
# can correctly respond to browsers' CORS preflight requests. # can correctly respond to browsers' CORS preflight requests.
# This also limits verbs to GET and HEAD by default. # This also limits verbs to GET and HEAD by default.
app_config.update({ 'tools.CORS_allow.on': True, app_config.update({
'tools.CORS_allow.methods': ['GET', 'HEAD'] }) 'tools.CORS_allow.on': True,
'tools.CORS_allow.methods': ['GET', 'HEAD']
})
# Configure the 'json_in' tool to also allow other content-types # Configure the 'json_in' tool to also allow other content-types
# (like x-www-form-urlencoded), and to treat JSON as a dict that # (like x-www-form-urlencoded), and to treat JSON as a dict that
# fills requests.param. # fills requests.param.
app_config.update({ 'tools.json_in.force': False, app_config.update({'tools.json_in.force': False,
'tools.json_in.processor': json_to_request_params }) 'tools.json_in.processor': json_to_request_params})
# Send tracebacks in error responses. They're hidden by the # Send tracebacks in error responses. They're hidden by the
# error_page function for client errors (code 400-499). # error_page function for client errors (code 400-499).
app_config.update({ 'request.show_tracebacks' : True }) app_config.update({'request.show_tracebacks': True})
self.force_traceback = force_traceback self.force_traceback = force_traceback
# Patch CherryPy error handler to never pad out error messages. # Patch CherryPy error handler to never pad out error messages.
@ -204,13 +201,17 @@ class Server(object):
# error messages. # error messages.
cherrypy._cperror._ie_friendly_error_sizes = {} cherrypy._cperror._ie_friendly_error_sizes = {}
# The manager maintains internal state and isn't necessarily
# thread-safe, so wrap it in the serializer.
manager = serializer_proxy(nilmrun.processmanager.ProcessManager)()
# Build up the application and mount it # Build up the application and mount it
manager = nilmrun.processmanager.ProcessManager() self._manager = manager
root = App() root = App()
root.process = AppProcess(manager) root.process = AppProcess(manager)
root.run = AppRun(manager) root.run = AppRun(manager)
cherrypy.tree.apps = {} cherrypy.tree.apps = {}
cherrypy.tree.mount(root, basepath, config = { "/" : app_config }) cherrypy.tree.mount(root, basepath, config={"/": app_config})
# Set up the WSGI application pointer for external programs # Set up the WSGI application pointer for external programs
self.wsgi_application = cherrypy.tree self.wsgi_application = cherrypy.tree
@ -220,17 +221,20 @@ class Server(object):
return json_error_page(status, message, traceback, version, return json_error_page(status, message, traceback, version,
self.force_traceback) self.force_traceback)
def start(self, blocking = False, event = None): def start(self, blocking=False, event=None):
cherrypy_start(blocking, event, self.embedded) cherrypy_start(blocking, event)
def stop(self): def stop(self):
cherrypy_stop() cherrypy_stop()
# Multiple processes and threads should be OK here, but we'll still # Multiple processes and threads should be OK here, but we'll still
# follow the NilmDB approach of having just one globally initialized # follow the NilmDB approach of having just one globally initialized
# copy of the server object. # copy of the server object.
_wsgi_server = None _wsgi_server = None
def wsgi_application(basepath): # pragma: no cover
def wsgi_application(basepath): # pragma: no cover
"""Return a WSGI application object. """Return a WSGI application object.
'basepath' is the URL path of the application base, which 'basepath' is the URL path of the application base, which
@ -243,13 +247,11 @@ def wsgi_application(basepath): # pragma: no cover
# Try to start the server # Try to start the server
try: try:
_wsgi_server = nilmrun.server.Server( _wsgi_server = nilmrun.server.Server(
embedded = True, basepath=basepath.rstrip('/'))
basepath = basepath.rstrip('/'))
except Exception: except Exception:
# Build an error message on failure # Build an error message on failure
import pprint import pprint
err = sprintf("Initializing nilmrun failed:\n\n", err = "Initializing nilmrun failed:\n\n"
dbpath)
err += traceback.format_exc() err += traceback.format_exc()
try: try:
import pwd import pwd
@ -264,8 +266,10 @@ def wsgi_application(basepath): # pragma: no cover
err += sprintf("\nEnvironment:\n%s\n", pprint.pformat(environ)) err += sprintf("\nEnvironment:\n%s\n", pprint.pformat(environ))
if _wsgi_server is None: if _wsgi_server is None:
# Serve up the error with our own mini WSGI app. # Serve up the error with our own mini WSGI app.
headers = [ ('Content-type', 'text/plain'), headers = [
('Content-length', str(len(err))) ] ('Content-type', 'text/plain'),
('Content-length', str(len(err)))
]
start_response("500 Internal Server Error", headers) start_response("500 Internal Server Error", headers)
return [err] return [err]

2
requirements.txt Normal file
View File

@ -0,0 +1,2 @@
nilmdb>=2.0.3
psutil==5.7.2

51
scripts/kill.py Executable file
View File

@ -0,0 +1,51 @@
#!/usr/bin/env python3
from nilmdb.client.httpclient import HTTPClient, ClientError, ServerError
from nilmdb.utils.printf import *
import nilmrun
import argparse
import os
import sys
def main():
"""Kill/remove a process from the NilmRun server"""
def_url = os.environ.get("NILMRUN_URL", "http://localhost/nilmrun/")
parser = argparse.ArgumentParser(
description = 'Kill/remove a process from the NilmRun server',
formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-v", "--version", action="version",
version=nilmrun.__version__)
group = parser.add_argument_group("Standard options")
group.add_argument('-u', '--url',
help = 'NilmRun server URL', default = def_url)
group.add_argument('-n', '--noverify', action="store_true",
help = 'Disable SSL certificate verification')
group = parser.add_argument_group("Program")
group.add_argument('-q', '--quiet', action="store_true",
help = "Don't print out the final log contents")
group.add_argument('pid', nargs='+', help="PIDs to kill")
args = parser.parse_args()
client = HTTPClient(baseurl = args.url, verify_ssl = not args.noverify)
# Kill or remove process
all_failed = True
for pid in args.pid:
try:
s = client.post("process/remove", { "pid": pid })
if not args.quiet:
sys.stdout.write(s['log'])
all_failed = False
except ClientError as e:
if "404" in e.status:
fprintf(sys.stderr, "no such pid: %s\n", pid)
else:
raise
# Return error if we failed to remove any of them
if all_failed:
raise SystemExit(1)
if __name__ == "__main__":
main()

View File

@ -1,4 +1,4 @@
#!/usr/bin/python #!/usr/bin/env python3
import nilmrun.server import nilmrun.server
import argparse import argparse
@ -11,9 +11,8 @@ def main():
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description = 'Run the NilmRun server', description = 'Run the NilmRun server',
formatter_class = argparse.ArgumentDefaultsHelpFormatter) formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-v", "--version", action="version",
parser.add_argument("-V", "--version", action="version", version=nilmrun.__version__)
version = nilmrun.__version__)
group = parser.add_argument_group("Standard options") group = parser.add_argument_group("Standard options")
group.add_argument('-a', '--address', group.add_argument('-a', '--address',
@ -36,25 +35,24 @@ def main():
embedded = False embedded = False
server = nilmrun.server.Server(host = args.address, server = nilmrun.server.Server(host = args.address,
port = args.port, port = args.port,
embedded = embedded,
force_traceback = args.traceback) force_traceback = args.traceback)
# Print info # Print info
if not args.quiet: if not args.quiet:
print "NilmRun version: %s" % nilmrun.__version__ print("NilmRun version: %s" % nilmrun.__version__)
print ("Note: This server does not do any authentication! " + print(("Note: This server does not do any authentication! " +
"Anyone who can connect can run arbitrary commands.") "Anyone who can connect can run arbitrary commands."))
if args.address == '0.0.0.0' or args.address == '::': if args.address == '0.0.0.0' or args.address == '::':
host = socket.getfqdn() host = socket.getfqdn()
else: else:
host = args.address host = args.address
print "Server URL: http://%s:%d/" % ( host, args.port) print("Server URL: http://%s:%d/" % ( host, args.port))
print "----" print("----")
server.start(blocking = True) server.start(blocking = True)
if not args.quiet: if not args.quiet:
print "Shutting down" print("Shutting down")
if __name__ == "__main__": if __name__ == "__main__":
main() main()

67
scripts/ps.py Executable file
View File

@ -0,0 +1,67 @@
#!/usr/bin/env python3
from nilmdb.client.httpclient import HTTPClient, ClientError, ServerError
from nilmdb.utils.printf import *
import datetime_tz
import nilmrun
import argparse
import os
def main():
"""List NilmRun processes"""
def_url = os.environ.get("NILMRUN_URL", "http://localhost/nilmrun/")
parser = argparse.ArgumentParser(
description = 'List NilmRun processes',
formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-v", "--version", action="version",
version=nilmrun.__version__)
group = parser.add_argument_group("Standard options")
group.add_argument('-u', '--url',
help = 'NilmRun server URL', default = def_url)
group.add_argument('-n', '--noverify', action="store_true",
help = 'Disable SSL certificate verification')
args = parser.parse_args()
client = HTTPClient(baseurl = args.url, verify_ssl = not args.noverify)
# Print overall system info
info = client.get("process/info")
total = info['total']
system = info['system']
printf(" procs: %d nilm, %d other\n", info['total']['procs'],
info['system']['procs'] - info['total']['procs'])
printf(" cpu: %d%% nilm, %d%% other, %d%% max\n",
round(info['total']['cpu_percent']),
round(info['system']['cpu_percent'] - info['total']['cpu_percent']),
round(info['system']['cpu_max']))
printf(" mem: %d MiB used, %d MiB total, %d%%\n",
round(info['system']['mem_used'] / 1048576.0),
round(info['system']['mem_total'] / 1048576.0),
round(info['system']['mem_used'] * 100.0
/ info['system']['mem_total']))
# Print process detail for each managed process
fmt = "%-36s %-6s %-15s %-4s %-3s %-5s\n"
printf(fmt, "PID", "STATE", "SINCE", "PROC", "CPU", "LOG")
if len(info['pids']) == 0:
printf("No running processes\n")
raise SystemExit(0)
for pid in sorted(info['pids'].keys()):
pidinfo = client.get("process/status", { "pid": pid })
if pidinfo['alive']:
status = "alive"
else:
if pidinfo['exitcode']:
status = "error"
else:
status = "done"
dt = datetime_tz.datetime_tz.fromtimestamp(pidinfo['start_time'])
since = dt.strftime("%m/%d-%H:%M:%S")
printf(fmt, pid, status, since, info['pids'][pid]['procs'],
str(int(round(info['pids'][pid]['cpu_percent']))),
len(pidinfo['log']))
if __name__ == "__main__":
main()

61
scripts/run.py Executable file
View File

@ -0,0 +1,61 @@
#!/usr/bin/env python3
from nilmdb.client.httpclient import HTTPClient, ClientError, ServerError
from nilmdb.utils.printf import *
import nilmrun
import argparse
import os
import time
import sys
def main():
"""Run a command on the NilmRun server"""
def_url = os.environ.get("NILMRUN_URL", "http://localhost/nilmrun/")
parser = argparse.ArgumentParser(
description = 'Run a command on the NilmRun server',
formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-v", "--version", action="version",
version=nilmrun.__version__)
group = parser.add_argument_group("Standard options")
group.add_argument('-u', '--url',
help = 'NilmRun server URL', default = def_url)
group.add_argument('-n', '--noverify', action="store_true",
help = 'Disable SSL certificate verification')
group = parser.add_argument_group("Program")
group.add_argument('-d', '--detach', action="store_true",
help = 'Run process and return immediately without '
'printing its output')
group.add_argument('cmd', help="Command to run")
group.add_argument('arg', nargs=argparse.REMAINDER,
help="Arguments for command")
args = parser.parse_args()
client = HTTPClient(baseurl=args.url, verify_ssl=not args.noverify,
post_json=True)
# Run command
pid = client.post("run/command", { "argv": [ args.cmd ] + args.arg })
# If we're detaching, just print the PID
if args.detach:
print(pid)
raise SystemExit(0)
# Otherwise, watch the log output, and kill the process when it's done
# or when this script terminates.
try:
while True:
s = client.get("process/status", { "pid": pid, "clear": 1 })
sys.stdout.write(s['log'])
sys.stdout.flush()
if not s['alive']:
break
time.sleep(1)
finally:
s = client.post("process/remove", { "pid": pid })
raise SystemExit(s['exitcode'])
if __name__ == "__main__":
main()

View File

@ -20,3 +20,19 @@ cover-erase=1
stop=1 stop=1
verbosity=2 verbosity=2
tests=tests tests=tests
[versioneer]
VCS=git
style=pep440
versionfile_source=nilmrun/_version.py
versionfile_build=nilmrun/_version.py
tag_prefix=nilmrun-
parentdir_prefix=nilmrun-
[flake8]
exclude=_version.py
extend-ignore=E731
[pylint]
ignore=_version.py
disable=C0103,C0111,R0913,R0914

View File

@ -1,4 +1,4 @@
#!/usr/bin/python #!/usr/bin/env python3
# To release a new version, tag it: # To release a new version, tag it:
# git tag -a nilmrun-1.1 -m "Version 1.1" # git tag -a nilmrun-1.1 -m "Version 1.1"
@ -6,66 +6,31 @@
# Then just package it up: # Then just package it up:
# python setup.py sdist # python setup.py sdist
# This is supposed to be using Distribute:
#
# distutils provides a "setup" method.
# setuptools is a set of monkeypatches on top of that.
# distribute is a particular version/implementation of setuptools.
#
# So we don't really know if this is using the old setuptools or the
# Distribute-provided version of setuptools.
import traceback
import sys import sys
import os import os
from setuptools import setup
try:
from setuptools import setup, find_packages
import distutils.version
except ImportError:
traceback.print_exc()
print "Please install the prerequisites listed in README.txt"
sys.exit(1)
# Versioneer manages version numbers from git tags. # Versioneer manages version numbers from git tags.
# https://github.com/warner/python-versioneer # https://github.com/warner/python-versioneer
import versioneer import versioneer
versioneer.versionfile_source = 'nilmrun/_version.py'
versioneer.versionfile_build = 'nilmrun/_version.py'
versioneer.tag_prefix = 'nilmrun-'
versioneer.parentdir_prefix = 'nilmrun-'
# Hack to workaround logging/multiprocessing issue: # Get list of requirements to use in `install_requires` below. Note
# https://groups.google.com/d/msg/nose-users/fnJ-kAUbYHQ/_UsLN786ygcJ # that we don't make a distinction between things that are actually
try: import multiprocessing # required for end-users vs developers (or use `test_requires` or
except: pass # anything else) -- just install everything for simplicity.
install_requires = open('requirements.txt').readlines()
# We need a MANIFEST.in. Generate it here rather than polluting the
# repository with yet another setup-related file.
with open("MANIFEST.in", "w") as m:
m.write("""
# Root
include README.txt
include setup.py
include versioneer.py
include Makefile
""")
# Run setup # Run setup
setup(name='nilmrun', setup(name='nilmrun',
version = versioneer.get_version(), version = versioneer.get_version(),
cmdclass = versioneer.get_cmdclass(), cmdclass = versioneer.get_cmdclass(),
url = 'https://git.jim.sh/jim/lees/nilmrun.git', url = 'https://git.jim.sh/nilm/nilmrun.git',
author = 'Jim Paris', author = 'Jim Paris',
description = "NILM Database Filter Runner", description = "NILM Database Filter Runner",
long_description = "NILM Database Filter Runner", long_description = "NILM Database Filter Runner",
license = "Proprietary", license = "Proprietary",
author_email = 'jim@jtan.com', author_email = 'jim@jtan.com',
install_requires = [ 'nilmdb >= 1.8.0', install_requires = install_requires,
'nilmtools >= 1.2.2',
'numpy',
'scipy',
],
packages = [ 'nilmrun', packages = [ 'nilmrun',
'nilmrun.scripts', 'nilmrun.scripts',
], ],
@ -75,7 +40,9 @@ setup(name='nilmrun',
entry_points = { entry_points = {
'console_scripts': [ 'console_scripts': [
'nilmrun-server = nilmrun.scripts.nilmrun_server:main', 'nilmrun-server = nilmrun.scripts.nilmrun_server:main',
'nilm-trainola = nilmrun.trainola:main', 'nilmrun-ps = nilmrun.scripts.ps:main',
'nilmrun-run = nilmrun.scripts.run:main',
'nilmrun-kill = nilmrun.scripts.kill:main',
], ],
}, },
zip_safe = False, zip_safe = False,

View File

@ -1,4 +1,4 @@
#!/usr/bin/python #!/usr/bin/env python3
import nose import nose
import os import os

View File

@ -1,3 +1,3 @@
test_client.py test_nilmrun.py
test_*.py test_*.py

View File

@ -1,323 +0,0 @@
# -*- coding: utf-8 -*-
import nilmrun.server
from nilmdb.client.httpclient import HTTPClient, ClientError, ServerError
from nilmdb.utils.printf import *
from nose.plugins.skip import SkipTest
from nose.tools import *
from nose.tools import assert_raises
import itertools
import distutils.version
import os
import sys
import threading
import cStringIO
import simplejson as json
import unittest
import warnings
import time
import re
import urllib2
from urllib2 import urlopen, HTTPError
import requests
import pprint
import textwrap
from testutil.helpers import *
testurl = "http://localhost:32181/"
def setup_module():
global test_server
# Start web app on a custom port
test_server = nilmrun.server.Server(host = "127.0.0.1",
port = 32181,
force_traceback = True)
test_server.start(blocking = False)
def teardown_module():
global test_server
# Close web app
test_server.stop()
class TestClient(object):
def wait_kill(self, client, pid, timeout = 1):
time.sleep(timeout)
status = client.get("/process/status", { "pid": pid })
if not status["alive"]:
raise AssertionError("died before we could kill it")
status = client.post("/process/remove", { "pid": pid })
if status["alive"]:
raise AssertionError("didn't get killed")
return status
def wait_end(self, client, pid, timeout = 5, remove = True):
start = time.time()
status = None
while (time.time() - start) < timeout:
status = client.get("/process/status", { "pid": pid })
if status["alive"] == False:
break
else:
raise AssertionError("process " + str(pid) + " didn't die in " +
str(timeout) + " seconds: " + repr(status))
if remove:
status = client.post("/process/remove", { "pid": pid })
return status
def test_client_01_basic(self):
client = HTTPClient(baseurl = testurl)
version = client.get("/version")
eq_(distutils.version.LooseVersion(version),
distutils.version.LooseVersion(nilmrun.__version__))
in_("This is NilmRun", client.get("/"))
with assert_raises(ClientError):
client.get("/favicon.ico")
def test_client_02_manager(self):
client = HTTPClient(baseurl = testurl)
eq_(client.get("/process/list"), [])
with assert_raises(ClientError) as e:
client.get("/process/status", { "pid": 12345 })
in_("No such PID", str(e.exception))
with assert_raises(ClientError):
client.get("/process/remove", { "pid": 12345 })
in_("No such PID", str(e.exception))
def test_client_03_process_basic(self):
client = HTTPClient(baseurl = testurl, post_json = True)
# start dummy filter
pid = client.post("/run/dummy", { "data": 30 })
eq_(client.get("/process/list"), [pid])
time.sleep(1)
# Verify that status looks OK
status = client.get("/process/status", { "pid": pid, "clear": True })
for x in [ "pid", "alive", "exitcode", "name",
"start_time", "parameters", "log" ]:
in_(x, status)
in_("dummy 0\ndummy 1\ndummy 2\ndummy 3\n", status["log"])
eq_(status["alive"], True)
eq_(status["exitcode"], None)
# Check that the log got cleared
status = client.get("/process/status", { "pid": pid })
nin_("dummy 0\ndummy 1\ndummy 2\ndummy 3\n", status["log"])
# See that it ended properly
status = self.wait_end(client, pid, remove = False)
in_("dummy 27\ndummy 28\ndummy 29\n", status["log"])
eq_(status["exitcode"], 0)
# Remove it
killstatus = client.post("/process/remove", { "pid": pid })
eq_(status, killstatus)
eq_(client.get("/process/list"), [])
with assert_raises(ClientError) as e:
client.post("/process/remove", { "pid": pid })
in_("No such PID", str(e.exception))
def test_client_04_process_terminate(self):
client = HTTPClient(baseurl = testurl, post_json = True)
# Trigger exception in filter
pid = client.post("/run/dummy", { "data": -1 })
time.sleep(0.5)
status = client.get("/process/status", { "pid": pid })
eq_(status["alive"], False)
eq_(status["exitcode"], 1)
in_("Exception: test exception", status["log"])
client.post("/process/remove", { "pid": pid })
# Kill a running filter by removing it early
newpid = client.post("/run/dummy", { "data": 50 })
ne_(newpid, pid)
time.sleep(0.5)
start = time.time()
status = client.post("/process/remove", { "pid": newpid })
elapsed = time.time() - start
# Should have died in slightly over 1 second
assert(0.5 < elapsed < 2)
eq_(status["alive"], False)
ne_(status["exitcode"], 0)
# No more
eq_(client.get("/process/list"), [])
# Try to remove a running filter that ignored SIGTERM
pid = client.post("/run/dummy", { "data": 0 })
start = time.time()
status = client.post("/process/remove", { "pid": pid })
elapsed = time.time() - start
# Should have died in slightly over 2 seconds
assert(1.5 < elapsed < 3)
eq_(status["alive"], False)
ne_(status["exitcode"], 0)
def test_client_05_trainola_simple(self):
client = HTTPClient(baseurl = testurl, post_json = True)
pid = client.post("/run/trainola", { "data": {} })
status = self.wait_end(client, pid, remove = False)
ne_(status["exitcode"], 0)
status = client.post("/process/remove", { "pid": pid })
@unittest.skip("needs a running nilmdb")
def test_client_06_trainola(self):
client = HTTPClient(baseurl = testurl, post_json = True)
data = { "url": "http://bucket.mit.edu/nilmdb",
"stream": "/sharon/prep-a",
"start": 1366111383280463,
"end": 1366126163457797,
"columns": [ { "name": "P1", "index": 0 },
{ "name": "Q1", "index": 1 },
{ "name": "P3", "index": 2 } ],
"exemplars": [
{ "name": "Boiler Pump ON",
"url": "http://bucket.mit.edu/nilmdb",
"stream": "/sharon/prep-a",
"start": 1366260494269078,
"end": 1366260608185031,
"columns": [ { "name": "P1", "index": 0 },
{ "name": "Q1", "index": 1 }
]
},
{ "name": "Boiler Pump OFF",
"url": "http://bucket.mit.edu/nilmdb",
"stream": "/sharon/prep-a",
"start": 1366260864215764,
"end": 1366260870882998,
"columns": [ { "name": "P1", "index": 0 },
{ "name": "Q1", "index": 1 }
]
}
]
}
# start trainola
pid = client.post("/run/trainola", { "data": data })
# wait for it to finish
for i in range(60):
time.sleep(1)
if i == 2:
status = client.get("/process/status", { "pid": pid,
"clear": True })
in_("Loading stream data", status['log'])
elif i == 3:
status = client.get("/process/status", { "pid": pid })
nin_("Loading stream data", status['log'])
else:
status = client.get("/process/status", { "pid": pid })
if status["alive"] == False:
break
else:
client.post("/process/remove", {"pid": pid })
raise AssertionError("took too long")
if i < 3:
raise AssertionError("too fast?")
def test_client_07_run_command(self):
client = HTTPClient(baseurl = testurl, post_json = True)
eq_(client.get("/process/list"), [])
def do(argv, kill):
pid = client.post("/run/command", { "argv": argv } )
eq_(client.get("/process/list"), [pid])
if kill:
return self.wait_kill(client, pid)
return self.wait_end(client, pid)
# Simple command
status = do(["pwd"], False)
eq_(status["exitcode"], 0)
eq_("/tmp\n", status["log"])
# Command with args
status = do(["expr", "1", "+", "2"], False)
eq_(status["exitcode"], 0)
eq_("3\n", status["log"])
# Missing command
status = do(["/no-such-command-blah-blah"], False)
ne_(status["exitcode"], 0)
# Kill a slow command
status = do(["sleep", "60"], True)
ne_(status["exitcode"], 0)
def test_client_08_run_code(self):
client = HTTPClient(baseurl = testurl, post_json = True)
eq_(client.get("/process/list"), [])
def do(code, args, kill):
pid = client.post("/run/code", { "code": code, "args": args } )
eq_(client.get("/process/list"), [pid])
if kill:
return self.wait_kill(client, pid)
return self.wait_end(client, pid)
# basic code snippet
code=textwrap.dedent("""
print 'hello'
def foo(arg):
print 'world'
""")
status = do(code, [], False)
eq_("hello\n", status["log"])
eq_(status["exitcode"], 0)
# compile error
code=textwrap.dedent("""
def foo(arg:
print 'hello'
""")
status = do(code, [], False)
in_("SyntaxError", status["log"])
eq_(status["exitcode"], 1)
# traceback in user code should be formatted nicely
code=textwrap.dedent("""
def foo(arg):
raise Exception(arg)
foo(123)
""")
status = do(code, [], False)
eq_('Traceback (most recent call last):\n' +
' File "<user-code>", line 4, in <module>\n' +
' foo(123)\n' +
' File "<user-code>", line 3, in foo\n' +
' raise Exception(arg)\n' +
'Exception: 123\n', status["log"])
eq_(status["exitcode"], 1)
# argument handling (strings come in as unicode)
code=textwrap.dedent("""
import sys
print sys.argv[1].encode('ascii'), sys.argv[2]
""")
status = do(code, ["hello", 123], False)
eq_(status["log"], "hello 123\n")
eq_(status["exitcode"], 0)
# try killing a long-running process
code=textwrap.dedent("""
import time
print 'hello'
time.sleep(60)
print 'world'
""")
status = do(code, [], True)
eq_(status["log"], "hello\n")
ne_(status["exitcode"], 0)

434
tests/test_nilmrun.py Normal file
View File

@ -0,0 +1,434 @@
# -*- coding: utf-8 -*-
import nilmrun.server
from nilmdb.client.httpclient import HTTPClient, ClientError, ServerError
from nilmdb.utils.printf import *
from nose.plugins.skip import SkipTest
from nose.tools import *
from nose.tools import assert_raises
import itertools
import distutils.version
import os
import sys
import threading
import io
import json
import unittest
import warnings
import time
import re
import urllib.request, urllib.error, urllib.parse
from urllib.request import urlopen
from urllib.error import HTTPError
import requests
import pprint
import textwrap
from testutil.helpers import *
testurl = "http://localhost:32181/"
#testurl = "http://bucket.mit.edu/nilmrun/"
def setup_module():
global test_server
# Start web app on a custom port
test_server = nilmrun.server.Server(host = "127.0.0.1",
port = 32181,
force_traceback = True)
test_server.start(blocking = False)
def teardown_module():
global test_server
# Close web app
test_server.stop()
class TestClient(object):
def wait_kill(self, client, pid, timeout = 1):
time.sleep(timeout)
status = client.get("process/status", { "pid": pid })
if not status["alive"]:
raise AssertionError("died before we could kill it")
status = client.post("process/remove", { "pid": pid })
if status["alive"]:
raise AssertionError("didn't get killed")
return status
def wait_end(self, client, pid, timeout = 5, remove = True):
start = time.time()
status = None
while (time.time() - start) < timeout:
status = client.get("process/status", { "pid": pid })
if status["alive"] == False:
break
time.sleep(0.1)
else:
raise AssertionError("process " + str(pid) + " didn't die in " +
str(timeout) + " seconds: " + repr(status))
if remove:
status = client.post("process/remove", { "pid": pid })
return status
def test_client_01_basic(self):
client = HTTPClient(baseurl = testurl)
version = client.get("version")
eq_(distutils.version.LooseVersion(version),
distutils.version.LooseVersion(nilmrun.__version__))
in_("This is NilmRun", client.get(""))
with assert_raises(ClientError):
client.get("favicon.ico")
def test_client_02_manager(self):
client = HTTPClient(baseurl = testurl)
eq_(client.get("process/list"), [])
with assert_raises(ClientError) as e:
client.get("process/status", { "pid": 12345 })
in_("No such PID", str(e.exception))
with assert_raises(ClientError):
client.get("process/remove", { "pid": 12345 })
in_("No such PID", str(e.exception))
def test_client_03_run_command(self):
client = HTTPClient(baseurl = testurl, post_json = True)
eq_(client.get("process/list"), [])
def do(argv, kill):
pid = client.post("run/command", { "argv": argv } )
eq_(client.get("process/list"), [pid])
if kill:
return self.wait_kill(client, pid)
return self.wait_end(client, pid)
# Simple command
status = do(["pwd"], False)
eq_(status["exitcode"], 0)
eq_("/tmp\n", status["log"])
# Command with args
status = do(["expr", "1", "+", "2"], False)
eq_(status["exitcode"], 0)
eq_("3\n", status["log"])
# Missing command
with assert_raises(ClientError) as e:
do(["/no-such-command-blah-blah"], False)
in_("No such file or directory", str(e.exception))
# Kill a slow command
status = do(["sleep", "60"], True)
ne_(status["exitcode"], 0)
def _run_testfilter(self, client, args):
code = textwrap.dedent("""
from nilmdb.utils.printf import *
import time
import signal
import json
import sys
# This is just for testing the process management.
def test(n):
n = int(n)
if n < 0: # raise an exception
raise Exception("test exception")
if n == 0: # ignore SIGTERM and count to 100
n = 100
signal.signal(signal.SIGTERM, signal.SIG_IGN)
for x in range(n):
s = sprintf("dummy %d\\n", x)
if x & 1:
sys.stdout.write(s)
else:
sys.stderr.write(s)
time.sleep(0.1)
test(json.loads(sys.argv[1]))
""")
jsonargs = json.dumps(args)
return client.post("run/code", { "code": code, "args": [ jsonargs ] })
def test_client_04_process_basic(self):
client = HTTPClient(baseurl = testurl, post_json = True)
# start dummy filter
pid = self._run_testfilter(client, 30)
eq_(client.get("process/list"), [pid])
time.sleep(1)
# Verify that status looks OK
status = client.get("process/status", { "pid": pid, "clear": True })
for x in [ "pid", "alive", "exitcode", "start_time", "log" ]:
in_(x, status)
in_("dummy 0\ndummy 1\ndummy 2\ndummy 3\n", status["log"])
eq_(status["alive"], True)
eq_(status["exitcode"], None)
# Check that the log got cleared
status = client.get("process/status", { "pid": pid })
nin_("dummy 0\ndummy 1\ndummy 2\ndummy 3\n", status["log"])
# See that it ended properly
status = self.wait_end(client, pid, remove = False)
in_("dummy 27\ndummy 28\ndummy 29\n", status["log"])
eq_(status["exitcode"], 0)
# Remove it
killstatus = client.post("process/remove", { "pid": pid })
eq_(status, killstatus)
eq_(client.get("process/list"), [])
with assert_raises(ClientError) as e:
client.post("process/remove", { "pid": pid })
in_("No such PID", str(e.exception))
def test_client_05_process_terminate(self):
client = HTTPClient(baseurl = testurl, post_json = True)
# Trigger exception in filter
pid = self._run_testfilter(client, -1)
time.sleep(0.5)
status = client.get("process/status", { "pid": pid })
eq_(status["alive"], False)
eq_(status["exitcode"], 1)
in_("Exception: test exception", status["log"])
client.post("process/remove", { "pid": pid })
# Kill a running filter by removing it early
newpid = self._run_testfilter(client, 50)
ne_(newpid, pid)
time.sleep(0.5)
start = time.time()
status = client.post("process/remove", { "pid": newpid })
elapsed = time.time() - start
# Should have died in slightly over 1 second
assert(0.5 < elapsed < 2)
eq_(status["alive"], False)
ne_(status["exitcode"], 0)
# No more
eq_(client.get("process/list"), [])
# Try to remove a running filter that ignored SIGTERM
pid = self._run_testfilter(client, 0)
start = time.time()
status = client.post("process/remove", { "pid": pid })
elapsed = time.time() - start
# Should have died in slightly over 2 seconds
assert(1.5 < elapsed < 3)
eq_(status["alive"], False)
ne_(status["exitcode"], 0)
@unittest.skip("needs a running nilmdb; trainola moved to nilmtools")
def test_client_06_trainola(self):
client = HTTPClient(baseurl = testurl, post_json = True)
data = { "url": "http://bucket.mit.edu/nilmdb",
"dest_stream": "/sharon/prep-a-matches",
"stream": "/sharon/prep-a",
"start": 1366111383280463,
"end": 1366126163457797,
"columns": [ { "name": "P1", "index": 0 },
{ "name": "Q1", "index": 1 },
{ "name": "P3", "index": 2 } ],
"exemplars": [
{ "name": "Boiler Pump ON",
"url": "http://bucket.mit.edu/nilmdb",
"stream": "/sharon/prep-a",
"start": 1366260494269078,
"end": 1366260608185031,
"dest_column": 0,
"columns": [ { "name": "P1", "index": 0 },
{ "name": "Q1", "index": 1 }
]
},
{ "name": "Boiler Pump OFF",
"url": "http://bucket.mit.edu/nilmdb",
"stream": "/sharon/prep-a",
"start": 1366260864215764,
"end": 1366260870882998,
"dest_column": 1,
"columns": [ { "name": "P1", "index": 0 },
{ "name": "Q1", "index": 1 }
]
}
]
}
pid = client.post("run/code", { "code": "import nilmtools.trainola\n" +
"nilmtools.trainola.main()",
"args": [ data ] })
while True:
status = client.get("process/status", { "pid": pid, "clear": 1 })
sys.stdout.write(status["log"])
sys.stdout.flush()
if status["alive"] == False:
break
time.sleep(0.1)
status = client.post("process/remove", { "pid": pid })
os._exit(int(status["exitcode"]))
def test_client_07_run_code(self):
client = HTTPClient(baseurl = testurl, post_json = True)
eq_(client.get("process/list"), [])
def do(code, args, kill):
if args is not None:
pid = client.post("run/code", { "code": code, "args": args } )
else:
pid = client.post("run/code", { "code": code } )
eq_(client.get("process/list"), [pid])
if kill:
return self.wait_kill(client, pid)
return self.wait_end(client, pid)
# basic code snippet
code = textwrap.dedent("""
print('hello')
def foo(arg):
print('world')
""")
status = do(code, [], False)
eq_("hello\n", status["log"])
eq_(status["exitcode"], 0)
# compile error
code = textwrap.dedent("""
def foo(arg:
print('hello')
""")
status = do(code, [], False)
in_("SyntaxError", status["log"])
eq_(status["exitcode"], 1)
# traceback in user code should be formatted nicely
code = textwrap.dedent("""
def foo(arg):
raise Exception(arg)
foo(123)
""")
status = do(code, [], False)
cleaned_log = re.sub('File "[^"]*",', 'File "",', status["log"])
eq_('Traceback (most recent call last):\n' +
' File "", line 4, in <module>\n' +
' foo(123)\n' +
' File "", line 3, in foo\n' +
' raise Exception(arg)\n' +
'Exception: 123\n', cleaned_log)
eq_(status["exitcode"], 1)
# argument handling (strings come in as unicode)
code = textwrap.dedent("""
import sys
print(sys.argv[1], sys.argv[2])
sys.exit(0) # also test raising SystemExit
""")
with assert_raises(ClientError) as e:
do(code, ["hello", 123], False)
in_("400 Bad Request", str(e.exception))
status = do(code, ["hello", "123"], False)
eq_(status["log"], "hello 123\n")
eq_(status["exitcode"], 0)
# try killing a long-running process
code = textwrap.dedent("""
import time
print('hello')
time.sleep(60)
print('world')
""")
status = do(code, [], True)
eq_(status["log"], "hello\n")
ne_(status["exitcode"], 0)
# default arguments are empty
code = textwrap.dedent("""
import sys
print('args:', len(sys.argv[1:]))
""")
status = do(code, None, False)
eq_(status["log"], "args: 0\n")
eq_(status["exitcode"], 0)
def test_client_08_bad_types(self):
client = HTTPClient(baseurl = testurl, post_json = True)
with assert_raises(ClientError) as e:
client.post("run/code", { "code": "asdf", "args": "qwer" })
in_("must be a list", str(e.exception))
with assert_raises(ClientError) as e:
client.post("run/command", { "argv": "asdf" })
in_("must be a list", str(e.exception))
def test_client_09_info(self):
client = HTTPClient(baseurl = testurl, post_json = True)
# start some processes
a = client.post("run/command", { "argv": ["sleep","60"] } )
b = client.post("run/command", { "argv": ["sh","-c","sleep 2;true"] } )
c = client.post("run/command", { "argv": [
"sh","-c","dd if=/dev/zero of=/dev/null;true"] } )
d = client.post("run/command", { "argv": [
"dd", "if=/dev/zero", "of=/dev/null" ] } )
info = client.get("process/info")
eq_(info["pids"][a]["procs"], 1)
eq_(info["pids"][b]["procs"], 2)
eq_(info["pids"][c]["procs"], 2)
eq_(info["pids"][d]["procs"], 1)
eq_(info["total"]["procs"], 6)
lt_(info["pids"][a]["cpu_percent"], 50)
lt_(20, info["pids"][c]["cpu_percent"])
lt_(80, info["system"]["cpu_percent"])
for x in range(10):
time.sleep(1)
info = client.get("process/info")
if info["pids"][b]["procs"] != 2:
break
else:
raise Exception("process B didn't die: " + str(info["pids"][b]))
# kill all processes
for pid in client.get("process/list"):
client.post("process/remove", { "pid": pid })
def test_client_10_unicode(self):
client = HTTPClient(baseurl = testurl, post_json = True)
eq_(client.get("process/list"), [])
def verify(cmd, result):
pid = client.post("run/command", { "argv": [
"/bin/bash", "-c", cmd ] })
eq_(client.get("process/list"), [pid])
status = self.wait_end(client, pid)
eq_(result, status["log"])
# Unicode should work
verify("echo -n hello", "hello")
verify("echo -n ☠", "")
verify("echo -ne \\\\xe2\\\\x98\\\\xa0", "")
# Programs that spit out invalid UTF-8 should get replacement
# markers
verify("echo -ne \\\\xae", "\ufffd")
def test_client_11_atexit(self):
# Leave a directory and running process behind, for the atexit
# handler to clean up. Here we trigger the atexit manually,
# since it's hard to trigger it as part of the test suite.
client = HTTPClient(baseurl = testurl, post_json = True)
code = textwrap.dedent("""
import time
time.sleep(10)
""")
client.post("run/code", { "code": code, "args": [ "hello"] })
# Trigger atexit function
test_server._manager._atexit()
# Ensure no processes exit
eq_(client.get("process/list"), [])

View File

@ -1,7 +1,7 @@
# Just some helpers for test functions # Just some helpers for test functions
def myrepr(x): def myrepr(x):
if isinstance(x, basestring): if isinstance(x, str):
return '"' + x + '"' return '"' + x + '"'
else: else:
return repr(x) return repr(x)

File diff suppressed because it is too large Load Diff