Compare commits
130 Commits
Author | SHA1 | Date | |
---|---|---|---|
517b237636 | |||
07f138e0f4 | |||
7538c6201b | |||
4d9a106ca1 | |||
e90a79ddad | |||
7056c5b4ec | |||
df4e7f0967 | |||
b6bba16505 | |||
d4003d0d34 | |||
759492298a | |||
b5f6fcc253 | |||
905e325ded | |||
648b6f4b70 | |||
7f8a2c7027 | |||
276fbc652a | |||
10b34f5937 | |||
83daeb148a | |||
d65f00e8b2 | |||
71dc01c9a7 | |||
bcd21b3498 | |||
a1dee0e6f2 | |||
99ac47cf0d | |||
4cdaef51c1 | |||
88466dcafe | |||
8dfb8da15c | |||
6cc1f6b7b2 | |||
8dc36c2d37 | |||
3738430103 | |||
a41111b045 | |||
85f822e1c4 | |||
0222dfebf0 | |||
70914690c1 | |||
10400f2b07 | |||
56153ff7ad | |||
671f87b047 | |||
2f2faeeab7 | |||
2ed544bd30 | |||
6821b2a97b | |||
b20bb92988 | |||
699de7b11f | |||
ea67e45be9 | |||
ca440a42bd | |||
4ff4b263b4 | |||
79e544c733 | |||
9acf99ff25 | |||
4958a5ab2e | |||
f2d89e2da5 | |||
1952f245c0 | |||
7cbc0c11c3 | |||
9f2651c35e | |||
9126980ed4 | |||
ea051c85b3 | |||
d8294469cf | |||
96eadb0577 | |||
fb524c649f | |||
19a34a07a4 | |||
d8df6f515f | |||
90ee127c87 | |||
0b631b7dea | |||
f587518adb | |||
efbb2665fe | |||
544413018c | |||
322b0ec423 | |||
f3833d9b20 | |||
735c8497af | |||
7252e40c2d | |||
caa5604d81 | |||
6624e8dab6 | |||
d907638858 | |||
39e66fe38c | |||
ba915bb290 | |||
3f0b8e50a2 | |||
f93edc469c | |||
087fb39475 | |||
8b4acf41d6 | |||
32a76ccf3f | |||
5f9367bdd3 | |||
5848d03507 | |||
36dc448f02 | |||
2764283f59 | |||
2d0c3f7868 | |||
cadba9fbba | |||
2d200a86c9 | |||
640c1bc95e | |||
b574fc86f4 | |||
02ee18c410 | |||
d1e241a213 | |||
c5c7f638e7 | |||
a1218fd20b | |||
c58a933d21 | |||
7874e1ebfa | |||
79b410a85b | |||
6645395924 | |||
beb3eadd38 | |||
edf4568e8f | |||
a962258b2a | |||
fa011559c1 | |||
349eec3942 | |||
99500f3a88 | |||
54eccb17aa | |||
cc8ac74a37 | |||
3be904d158 | |||
5d9fc5500c | |||
57751f5b32 | |||
1c005518d8 | |||
3279f7ef2c | |||
a2e124f444 | |||
6d673bd2be | |||
613a3185e3 | |||
c83ee65cf7 | |||
113633459d | |||
41abf53085 | |||
fef3e1d31e | |||
02db87eee6 | |||
ad85c3dd29 | |||
0e6ccd687b | |||
85d4c419fd | |||
159278066c | |||
b69358a185 | |||
e82ef60e2e | |||
911d9bc284 | |||
752a9b36ae | |||
97d17de8ad | |||
5da7e6558e | |||
1928caa1d7 | |||
5db034432c | |||
55119a3e07 | |||
a9eff10dbf | |||
0f5c1c0db6 | |||
d17365ca37 |
|
@ -1,10 +1,11 @@
|
||||||
# -*- conf -*-
|
# -*- conf -*-
|
||||||
|
|
||||||
[run]
|
[run]
|
||||||
# branch = True
|
branch = True
|
||||||
|
|
||||||
[report]
|
[report]
|
||||||
exclude_lines =
|
exclude_lines =
|
||||||
pragma: no cover
|
pragma: no cover
|
||||||
if 0:
|
if 0:
|
||||||
omit = nilmdb/utils/datetime_tz*,nilmdb/scripts,nilmdb/_version.py,nilmdb/fsck
|
omit = nilmdb/scripts,nilmdb/_version.py,nilmdb/fsck
|
||||||
|
show_missing = True
|
||||||
|
|
7
.gitignore
vendored
7
.gitignore
vendored
|
@ -4,6 +4,7 @@ tests/*testdb/
|
||||||
db/
|
db/
|
||||||
|
|
||||||
# Compiled / cythonized files
|
# Compiled / cythonized files
|
||||||
|
README.html
|
||||||
docs/*.html
|
docs/*.html
|
||||||
build/
|
build/
|
||||||
*.pyc
|
*.pyc
|
||||||
|
@ -15,10 +16,8 @@ nilmdb/server/rbtree.c
|
||||||
# Setup junk
|
# Setup junk
|
||||||
dist/
|
dist/
|
||||||
nilmdb.egg-info/
|
nilmdb.egg-info/
|
||||||
|
venv/
|
||||||
# This gets generated as needed by setup.py
|
.eggs/
|
||||||
MANIFEST.in
|
|
||||||
MANIFEST
|
|
||||||
|
|
||||||
# Misc
|
# Misc
|
||||||
timeit*out
|
timeit*out
|
||||||
|
|
250
.pylintrc
250
.pylintrc
|
@ -1,250 +0,0 @@
|
||||||
# -*- conf -*-
|
|
||||||
[MASTER]
|
|
||||||
|
|
||||||
# Specify a configuration file.
|
|
||||||
#rcfile=
|
|
||||||
|
|
||||||
# Python code to execute, usually for sys.path manipulation such as
|
|
||||||
# pygtk.require().
|
|
||||||
#init-hook=
|
|
||||||
|
|
||||||
# Profiled execution.
|
|
||||||
profile=no
|
|
||||||
|
|
||||||
# Add files or directories to the blacklist. They should be base names, not
|
|
||||||
# paths.
|
|
||||||
ignore=datetime_tz
|
|
||||||
|
|
||||||
# Pickle collected data for later comparisons.
|
|
||||||
persistent=no
|
|
||||||
|
|
||||||
# List of plugins (as comma separated values of python modules names) to load,
|
|
||||||
# usually to register additional checkers.
|
|
||||||
load-plugins=
|
|
||||||
|
|
||||||
|
|
||||||
[MESSAGES CONTROL]
|
|
||||||
|
|
||||||
# Enable the message, report, category or checker with the given id(s). You can
|
|
||||||
# either give multiple identifier separated by comma (,) or put this option
|
|
||||||
# multiple time.
|
|
||||||
#enable=
|
|
||||||
|
|
||||||
# Disable the message, report, category or checker with the given id(s). You
|
|
||||||
# can either give multiple identifier separated by comma (,) or put this option
|
|
||||||
# multiple time (only on the command line, not in the configuration file where
|
|
||||||
# it should appear only once).
|
|
||||||
disable=C0111,R0903,R0201,R0914,R0912,W0142,W0703,W0702
|
|
||||||
|
|
||||||
|
|
||||||
[REPORTS]
|
|
||||||
|
|
||||||
# Set the output format. Available formats are text, parseable, colorized, msvs
|
|
||||||
# (visual studio) and html
|
|
||||||
output-format=parseable
|
|
||||||
|
|
||||||
# Include message's id in output
|
|
||||||
include-ids=yes
|
|
||||||
|
|
||||||
# Put messages in a separate file for each module / package specified on the
|
|
||||||
# command line instead of printing them on stdout. Reports (if any) will be
|
|
||||||
# written in a file name "pylint_global.[txt|html]".
|
|
||||||
files-output=no
|
|
||||||
|
|
||||||
# Tells whether to display a full report or only the messages
|
|
||||||
reports=yes
|
|
||||||
|
|
||||||
# Python expression which should return a note less than 10 (10 is the highest
|
|
||||||
# note). You have access to the variables errors warning, statement which
|
|
||||||
# respectively contain the number of errors / warnings messages and the total
|
|
||||||
# number of statements analyzed. This is used by the global evaluation report
|
|
||||||
# (RP0004).
|
|
||||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
|
||||||
|
|
||||||
# Add a comment according to your evaluation note. This is used by the global
|
|
||||||
# evaluation report (RP0004).
|
|
||||||
comment=no
|
|
||||||
|
|
||||||
|
|
||||||
[SIMILARITIES]
|
|
||||||
|
|
||||||
# Minimum lines number of a similarity.
|
|
||||||
min-similarity-lines=4
|
|
||||||
|
|
||||||
# Ignore comments when computing similarities.
|
|
||||||
ignore-comments=yes
|
|
||||||
|
|
||||||
# Ignore docstrings when computing similarities.
|
|
||||||
ignore-docstrings=yes
|
|
||||||
|
|
||||||
|
|
||||||
[TYPECHECK]
|
|
||||||
|
|
||||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
|
||||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
|
||||||
ignore-mixin-members=yes
|
|
||||||
|
|
||||||
# List of classes names for which member attributes should not be checked
|
|
||||||
# (useful for classes with attributes dynamically set).
|
|
||||||
ignored-classes=SQLObject
|
|
||||||
|
|
||||||
# When zope mode is activated, add a predefined set of Zope acquired attributes
|
|
||||||
# to generated-members.
|
|
||||||
zope=no
|
|
||||||
|
|
||||||
# List of members which are set dynamically and missed by pylint inference
|
|
||||||
# system, and so shouldn't trigger E0201 when accessed. Python regular
|
|
||||||
# expressions are accepted.
|
|
||||||
generated-members=REQUEST,acl_users,aq_parent
|
|
||||||
|
|
||||||
|
|
||||||
[FORMAT]
|
|
||||||
|
|
||||||
# Maximum number of characters on a single line.
|
|
||||||
max-line-length=80
|
|
||||||
|
|
||||||
# Maximum number of lines in a module
|
|
||||||
max-module-lines=1000
|
|
||||||
|
|
||||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
|
||||||
# tab).
|
|
||||||
indent-string=' '
|
|
||||||
|
|
||||||
|
|
||||||
[MISCELLANEOUS]
|
|
||||||
|
|
||||||
# List of note tags to take in consideration, separated by a comma.
|
|
||||||
notes=FIXME,XXX,TODO
|
|
||||||
|
|
||||||
|
|
||||||
[VARIABLES]
|
|
||||||
|
|
||||||
# Tells whether we should check for unused import in __init__ files.
|
|
||||||
init-import=no
|
|
||||||
|
|
||||||
# A regular expression matching the beginning of the name of dummy variables
|
|
||||||
# (i.e. not used).
|
|
||||||
dummy-variables-rgx=_|dummy
|
|
||||||
|
|
||||||
# List of additional names supposed to be defined in builtins. Remember that
|
|
||||||
# you should avoid to define new builtins when possible.
|
|
||||||
additional-builtins=
|
|
||||||
|
|
||||||
|
|
||||||
[BASIC]
|
|
||||||
|
|
||||||
# Required attributes for module, separated by a comma
|
|
||||||
required-attributes=
|
|
||||||
|
|
||||||
# List of builtins function names that should not be used, separated by a comma
|
|
||||||
bad-functions=apply,input
|
|
||||||
|
|
||||||
# Regular expression which should only match correct module names
|
|
||||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct module level names
|
|
||||||
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__)|version)$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct class names
|
|
||||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct function names
|
|
||||||
function-rgx=[a-z_][a-z0-9_]{0,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct method names
|
|
||||||
method-rgx=[a-z_][a-z0-9_]{0,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct instance attribute names
|
|
||||||
attr-rgx=[a-z_][a-z0-9_]{0,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct argument names
|
|
||||||
argument-rgx=[a-z_][a-z0-9_]{0,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct variable names
|
|
||||||
variable-rgx=[a-z_][a-z0-9_]{0,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct list comprehension /
|
|
||||||
# generator expression variable names
|
|
||||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
|
||||||
|
|
||||||
# Good variable names which should always be accepted, separated by a comma
|
|
||||||
good-names=i,j,k,ex,Run,_
|
|
||||||
|
|
||||||
# Bad variable names which should always be refused, separated by a comma
|
|
||||||
bad-names=foo,bar,baz,toto,tutu,tata
|
|
||||||
|
|
||||||
# Regular expression which should only match functions or classes name which do
|
|
||||||
# not require a docstring
|
|
||||||
no-docstring-rgx=__.*__
|
|
||||||
|
|
||||||
|
|
||||||
[CLASSES]
|
|
||||||
|
|
||||||
# List of interface methods to ignore, separated by a comma. This is used for
|
|
||||||
# instance to not check methods defines in Zope's Interface base class.
|
|
||||||
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
|
|
||||||
|
|
||||||
# List of method names used to declare (i.e. assign) instance attributes.
|
|
||||||
defining-attr-methods=__init__,__new__,setUp
|
|
||||||
|
|
||||||
# List of valid names for the first argument in a class method.
|
|
||||||
valid-classmethod-first-arg=cls
|
|
||||||
|
|
||||||
|
|
||||||
[DESIGN]
|
|
||||||
|
|
||||||
# Maximum number of arguments for function / method
|
|
||||||
max-args=5
|
|
||||||
|
|
||||||
# Argument names that match this expression will be ignored. Default to name
|
|
||||||
# with leading underscore
|
|
||||||
ignored-argument-names=_.*
|
|
||||||
|
|
||||||
# Maximum number of locals for function / method body
|
|
||||||
max-locals=15
|
|
||||||
|
|
||||||
# Maximum number of return / yield for function / method body
|
|
||||||
max-returns=6
|
|
||||||
|
|
||||||
# Maximum number of branch for function / method body
|
|
||||||
max-branchs=12
|
|
||||||
|
|
||||||
# Maximum number of statements in function / method body
|
|
||||||
max-statements=50
|
|
||||||
|
|
||||||
# Maximum number of parents for a class (see R0901).
|
|
||||||
max-parents=7
|
|
||||||
|
|
||||||
# Maximum number of attributes for a class (see R0902).
|
|
||||||
max-attributes=7
|
|
||||||
|
|
||||||
# Minimum number of public methods for a class (see R0903).
|
|
||||||
min-public-methods=2
|
|
||||||
|
|
||||||
# Maximum number of public methods for a class (see R0904).
|
|
||||||
max-public-methods=20
|
|
||||||
|
|
||||||
|
|
||||||
[IMPORTS]
|
|
||||||
|
|
||||||
# Deprecated modules which should not be used, separated by a comma
|
|
||||||
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
|
|
||||||
|
|
||||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
|
||||||
# given file (report RP0402 must not be disabled)
|
|
||||||
import-graph=
|
|
||||||
|
|
||||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
|
||||||
# not be disabled)
|
|
||||||
ext-import-graph=
|
|
||||||
|
|
||||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
|
||||||
# not be disabled)
|
|
||||||
int-import-graph=
|
|
||||||
|
|
||||||
|
|
||||||
[EXCEPTIONS]
|
|
||||||
|
|
||||||
# Exceptions that will emit a warning when being caught. Defaults to
|
|
||||||
# "Exception"
|
|
||||||
overgeneral-exceptions=Exception
|
|
29
MANIFEST.in
Normal file
29
MANIFEST.in
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
# Root
|
||||||
|
include README.txt
|
||||||
|
include setup.cfg
|
||||||
|
include setup.py
|
||||||
|
include versioneer.py
|
||||||
|
include Makefile
|
||||||
|
include .coveragerc
|
||||||
|
include .pylintrc
|
||||||
|
include requirements.txt
|
||||||
|
|
||||||
|
# Cython files -- include .pyx source, but not the generated .c files
|
||||||
|
# (Downstream systems must have cython installed in order to build)
|
||||||
|
recursive-include nilmdb/server *.pyx *.pyxdep *.pxd
|
||||||
|
exclude nilmdb/server/interval.c
|
||||||
|
exclude nilmdb/server/rbtree.c
|
||||||
|
|
||||||
|
# Version
|
||||||
|
include nilmdb/_version.py
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
recursive-include tests *.py
|
||||||
|
recursive-include tests/data *
|
||||||
|
include tests/test.order
|
||||||
|
|
||||||
|
# Docs
|
||||||
|
recursive-include docs Makefile *.md
|
||||||
|
|
||||||
|
# Extras
|
||||||
|
recursive-include extras *
|
30
Makefile
30
Makefile
|
@ -2,45 +2,49 @@
|
||||||
all: test
|
all: test
|
||||||
|
|
||||||
version:
|
version:
|
||||||
python setup.py version
|
python3 setup.py version
|
||||||
|
|
||||||
build:
|
build:
|
||||||
python setup.py build_ext --inplace
|
python3 setup.py build_ext --inplace
|
||||||
|
|
||||||
dist: sdist
|
dist: sdist
|
||||||
sdist:
|
sdist:
|
||||||
python setup.py sdist
|
python3 setup.py sdist
|
||||||
|
|
||||||
install:
|
install:
|
||||||
python setup.py install
|
python3 setup.py install
|
||||||
|
|
||||||
develop:
|
develop:
|
||||||
python setup.py develop
|
python3 setup.py develop
|
||||||
|
|
||||||
docs:
|
docs:
|
||||||
make -C docs
|
make -C docs
|
||||||
|
|
||||||
|
ctrl: flake
|
||||||
|
flake:
|
||||||
|
flake8 nilmdb
|
||||||
lint:
|
lint:
|
||||||
pylint --rcfile=.pylintrc nilmdb
|
pylint3 --rcfile=setup.cfg nilmdb
|
||||||
|
|
||||||
test:
|
test:
|
||||||
ifeq ($(INSIDE_EMACS), t)
|
ifneq ($(INSIDE_EMACS),)
|
||||||
# Use the slightly more flexible script
|
# Use the slightly more flexible script
|
||||||
python setup.py build_ext --inplace
|
python3 setup.py build_ext --inplace
|
||||||
python tests/runtests.py
|
python3 tests/runtests.py
|
||||||
else
|
else
|
||||||
# Let setup.py check dependencies, build stuff, and run the test
|
# Let setup.py check dependencies, build stuff, and run the test
|
||||||
python setup.py nosetests
|
python3 setup.py nosetests
|
||||||
endif
|
endif
|
||||||
|
|
||||||
clean::
|
clean::
|
||||||
find . -name '*pyc' | xargs rm -f
|
find . -name '*.pyc' -o -name '__pycache__' -print0 | xargs -0 rm -rf
|
||||||
rm -f .coverage
|
rm -f .coverage
|
||||||
rm -rf tests/*testdb*
|
rm -rf tests/*testdb*
|
||||||
rm -rf nilmdb.egg-info/ build/ nilmdb/server/*.so MANIFEST.in
|
rm -rf nilmdb.egg-info/ build/ nilmdb/server/*.so
|
||||||
make -C docs clean
|
make -C docs clean
|
||||||
|
|
||||||
gitclean::
|
gitclean::
|
||||||
git clean -dXf
|
git clean -dXf
|
||||||
|
|
||||||
.PHONY: all version build dist sdist install docs lint test clean gitclean
|
.PHONY: all version build dist sdist install docs test
|
||||||
|
.PHONY: ctrl lint flake clean gitclean
|
||||||
|
|
40
README.md
Normal file
40
README.md
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
# nilmdb: Non-Intrusive Load Monitor Database
|
||||||
|
by Jim Paris <jim@jtan.com>
|
||||||
|
|
||||||
|
NilmDB requires Python 3.8 or newer.
|
||||||
|
|
||||||
|
## Prerequisites:
|
||||||
|
|
||||||
|
# Runtime and build environments
|
||||||
|
sudo apt install python3 python3-dev python3-venv python3-pip
|
||||||
|
|
||||||
|
# Create a new Python virtual environment to isolate deps.
|
||||||
|
python3 -m venv ../venv
|
||||||
|
source ../venv/bin/activate # run "deactivate" to leave
|
||||||
|
|
||||||
|
# Install all Python dependencies
|
||||||
|
pip3 install -r requirements.txt
|
||||||
|
|
||||||
|
## Test:
|
||||||
|
|
||||||
|
python3 setup.py nosetests
|
||||||
|
|
||||||
|
## Install:
|
||||||
|
|
||||||
|
Install it into the virtual environment
|
||||||
|
|
||||||
|
python3 setup.py install
|
||||||
|
|
||||||
|
If you want to instead install it system-wide, you will also need to
|
||||||
|
install the requirements system-wide:
|
||||||
|
|
||||||
|
sudo pip3 install -r requirements.txt
|
||||||
|
sudo python3 setup.py install
|
||||||
|
|
||||||
|
## Usage:
|
||||||
|
|
||||||
|
nilmdb-server --help
|
||||||
|
nilmdb-fsck --help
|
||||||
|
nilmtool --help
|
||||||
|
|
||||||
|
See docs/wsgi.md for info on setting up a WSGI application in Apache.
|
33
README.txt
33
README.txt
|
@ -1,33 +0,0 @@
|
||||||
nilmdb: Non-Intrusive Load Monitor Database
|
|
||||||
by Jim Paris <jim@jtan.com>
|
|
||||||
|
|
||||||
Prerequisites:
|
|
||||||
|
|
||||||
# Runtime and build environments
|
|
||||||
sudo apt-get install python2.7 python2.7-dev python-setuptools cython
|
|
||||||
|
|
||||||
# Base NilmDB dependencies
|
|
||||||
sudo apt-get install python-cherrypy3 python-decorator python-simplejson
|
|
||||||
sudo apt-get install python-requests python-dateutil python-tz
|
|
||||||
sudo apt-get install python-progressbar python-psutil
|
|
||||||
|
|
||||||
# Other dependencies (required by some modules)
|
|
||||||
sudo apt-get install python-numpy
|
|
||||||
|
|
||||||
# Tools for running tests
|
|
||||||
sudo apt-get install python-nose python-coverage
|
|
||||||
|
|
||||||
Test:
|
|
||||||
python setup.py nosetests
|
|
||||||
|
|
||||||
Install:
|
|
||||||
|
|
||||||
python setup.py install
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
|
|
||||||
nilmdb-server --help
|
|
||||||
nilmdb-fsck --help
|
|
||||||
nilmtool --help
|
|
||||||
|
|
||||||
See docs/wsgi.md for info on setting up a WSGI application in Apache.
|
|
|
@ -430,7 +430,7 @@ mod_wsgi requires "WSGIChunkedRequest On" to handle
|
||||||
"Transfer-encoding: Chunked" requests. However, `/stream/insert`
|
"Transfer-encoding: Chunked" requests. However, `/stream/insert`
|
||||||
doesn't handle this correctly right now, because:
|
doesn't handle this correctly right now, because:
|
||||||
|
|
||||||
- The `cherrpy.request.body.read()` call needs to be fixed for chunked requests
|
- The `cherrypy.request.body.read()` call needs to be fixed for chunked requests
|
||||||
|
|
||||||
- We don't want to just buffer endlessly in the server, and it will
|
- We don't want to just buffer endlessly in the server, and it will
|
||||||
require some thought on how to handle data in chunks (what to do about
|
require some thought on how to handle data in chunks (what to do about
|
||||||
|
@ -438,3 +438,32 @@ doesn't handle this correctly right now, because:
|
||||||
|
|
||||||
It is probably better to just keep the endpoint management on the client
|
It is probably better to just keep the endpoint management on the client
|
||||||
side, so leave "WSGIChunkedRequest off" for now.
|
side, so leave "WSGIChunkedRequest off" for now.
|
||||||
|
|
||||||
|
|
||||||
|
Unicode & character encoding
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
Stream data is passed back and forth as raw `bytes` objects in most
|
||||||
|
places, including the `nilmdb.client` and command-line interfaces.
|
||||||
|
This is done partially for performance reasons, and partially to
|
||||||
|
support the binary insert/extract options, where character-set encoding
|
||||||
|
would not apply.
|
||||||
|
|
||||||
|
For the HTTP server, the raw bytes transferred over HTTP are interpreted
|
||||||
|
as follows:
|
||||||
|
- For `/stream/insert`, the client-provided `Content-Type` is ignored,
|
||||||
|
and the data is read as if it were `application/octet-stream`.
|
||||||
|
- For `/stream/extract`, the returned data is `application/octet-stream`.
|
||||||
|
- All other endpoints communicate via JSON, which is specified to always
|
||||||
|
be encoded as UTF-8. This includes:
|
||||||
|
- `/version`
|
||||||
|
- `/dbinfo`
|
||||||
|
- `/stream/list`
|
||||||
|
- `/stream/create`
|
||||||
|
- `/stream/destroy`
|
||||||
|
- `/stream/rename`
|
||||||
|
- `/stream/get_metadata`
|
||||||
|
- `/stream/set_metadata`
|
||||||
|
- `/stream/update_metadata`
|
||||||
|
- `/stream/remove`
|
||||||
|
- `/stream/intervals`
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import cPickle as pickle
|
import pickle
|
||||||
import argparse
|
import argparse
|
||||||
import fcntl
|
import fcntl
|
||||||
import re
|
import re
|
||||||
|
@ -44,7 +44,7 @@ with open(lock, "w") as f:
|
||||||
maxsize = fix[fixpath]
|
maxsize = fix[fixpath]
|
||||||
if size > maxsize:
|
if size > maxsize:
|
||||||
diff = size - maxsize
|
diff = size - maxsize
|
||||||
print diff, "too big:", fn
|
print(diff, "too big:", fn)
|
||||||
if args.yes:
|
if args.yes:
|
||||||
with open(fn, "a+") as dbfile:
|
with open(fn, "a+") as dbfile:
|
||||||
dbfile.truncate(maxsize)
|
dbfile.truncate(maxsize)
|
||||||
|
|
|
@ -1,10 +1,5 @@
|
||||||
"""Main NilmDB import"""
|
"""Main NilmDB import"""
|
||||||
|
|
||||||
# These aren't imported automatically, because loading the server
|
from ._version import get_versions
|
||||||
# stuff isn't always necessary.
|
|
||||||
#from nilmdb.server import NilmDB, Server
|
|
||||||
#from nilmdb.client import Client
|
|
||||||
|
|
||||||
from nilmdb._version import get_versions
|
|
||||||
__version__ = get_versions()['version']
|
__version__ = get_versions()['version']
|
||||||
del get_versions
|
del get_versions
|
||||||
|
|
|
@ -1,197 +1,520 @@
|
||||||
|
|
||||||
IN_LONG_VERSION_PY = True
|
|
||||||
# This file helps to compute a version number in source trees obtained from
|
# This file helps to compute a version number in source trees obtained from
|
||||||
# git-archive tarball (such as those provided by githubs download-from-tag
|
# git-archive tarball (such as those provided by githubs download-from-tag
|
||||||
# feature). Distribution tarballs (build by setup.py sdist) and build
|
# feature). Distribution tarballs (built by setup.py sdist) and build
|
||||||
# directories (produced by setup.py build) will contain a much shorter file
|
# directories (produced by setup.py build) will contain a much shorter file
|
||||||
# that just contains the computed version number.
|
# that just contains the computed version number.
|
||||||
|
|
||||||
# This file is released into the public domain. Generated by
|
# This file is released into the public domain. Generated by
|
||||||
# versioneer-0.7+ (https://github.com/warner/python-versioneer)
|
# versioneer-0.18 (https://github.com/warner/python-versioneer)
|
||||||
|
|
||||||
# these strings will be replaced by git during git-archive
|
|
||||||
git_refnames = "$Format:%d$"
|
|
||||||
git_full = "$Format:%H$"
|
|
||||||
|
|
||||||
|
"""Git implementation of _version.py."""
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
def run_command(args, cwd=None, verbose=False):
|
|
||||||
|
def get_keywords():
|
||||||
|
"""Get the keywords needed to look up the version information."""
|
||||||
|
# these strings will be replaced by git during git-archive.
|
||||||
|
# setup.py/versioneer.py will grep for the variable names, so they must
|
||||||
|
# each be defined on a line of their own. _version.py will just call
|
||||||
|
# get_keywords().
|
||||||
|
git_refnames = "$Format:%d$"
|
||||||
|
git_full = "$Format:%H$"
|
||||||
|
git_date = "$Format:%ci$"
|
||||||
|
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
|
||||||
|
return keywords
|
||||||
|
|
||||||
|
|
||||||
|
class VersioneerConfig:
|
||||||
|
"""Container for Versioneer configuration parameters."""
|
||||||
|
|
||||||
|
|
||||||
|
def get_config():
|
||||||
|
"""Create, populate and return the VersioneerConfig() object."""
|
||||||
|
# these strings are filled in when 'setup.py versioneer' creates
|
||||||
|
# _version.py
|
||||||
|
cfg = VersioneerConfig()
|
||||||
|
cfg.VCS = "git"
|
||||||
|
cfg.style = "pep440"
|
||||||
|
cfg.tag_prefix = "nilmdb-"
|
||||||
|
cfg.parentdir_prefix = "nilmdb-"
|
||||||
|
cfg.versionfile_source = "nilmdb/_version.py"
|
||||||
|
cfg.verbose = False
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
|
||||||
|
class NotThisMethod(Exception):
|
||||||
|
"""Exception raised if a method is not valid for the current scenario."""
|
||||||
|
|
||||||
|
|
||||||
|
LONG_VERSION_PY = {}
|
||||||
|
HANDLERS = {}
|
||||||
|
|
||||||
|
|
||||||
|
def register_vcs_handler(vcs, method): # decorator
|
||||||
|
"""Decorator to mark a method as the handler for a particular VCS."""
|
||||||
|
def decorate(f):
|
||||||
|
"""Store f in HANDLERS[vcs][method]."""
|
||||||
|
if vcs not in HANDLERS:
|
||||||
|
HANDLERS[vcs] = {}
|
||||||
|
HANDLERS[vcs][method] = f
|
||||||
|
return f
|
||||||
|
return decorate
|
||||||
|
|
||||||
|
|
||||||
|
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
|
||||||
|
env=None):
|
||||||
|
"""Call the given command(s)."""
|
||||||
|
assert isinstance(commands, list)
|
||||||
|
p = None
|
||||||
|
for c in commands:
|
||||||
try:
|
try:
|
||||||
|
dispcmd = str([c] + args)
|
||||||
# remember shell=False, so use git.cmd on windows, not just git
|
# remember shell=False, so use git.cmd on windows, not just git
|
||||||
p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd)
|
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=(subprocess.PIPE if hide_stderr
|
||||||
|
else None))
|
||||||
|
break
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
e = sys.exc_info()[1]
|
e = sys.exc_info()[1]
|
||||||
|
if e.errno == errno.ENOENT:
|
||||||
|
continue
|
||||||
if verbose:
|
if verbose:
|
||||||
print("unable to run %s" % args[0])
|
print("unable to run %s" % dispcmd)
|
||||||
print(e)
|
print(e)
|
||||||
return None
|
return None, None
|
||||||
|
else:
|
||||||
|
if verbose:
|
||||||
|
print("unable to find command, tried %s" % (commands,))
|
||||||
|
return None, None
|
||||||
stdout = p.communicate()[0].strip()
|
stdout = p.communicate()[0].strip()
|
||||||
if sys.version >= '3':
|
if sys.version_info[0] >= 3:
|
||||||
stdout = stdout.decode()
|
stdout = stdout.decode()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
if verbose:
|
if verbose:
|
||||||
print("unable to run %s (error)" % args[0])
|
print("unable to run %s (error)" % dispcmd)
|
||||||
return None
|
print("stdout was %s" % stdout)
|
||||||
return stdout
|
return None, p.returncode
|
||||||
|
return stdout, p.returncode
|
||||||
|
|
||||||
|
|
||||||
import sys
|
def versions_from_parentdir(parentdir_prefix, root, verbose):
|
||||||
import re
|
"""Try to determine the version from the parent directory name.
|
||||||
import os.path
|
|
||||||
|
|
||||||
def get_expanded_variables(versionfile_source):
|
Source tarballs conventionally unpack into a directory that includes both
|
||||||
|
the project name and a version string. We will also support searching up
|
||||||
|
two directory levels for an appropriately named parent directory
|
||||||
|
"""
|
||||||
|
rootdirs = []
|
||||||
|
|
||||||
|
for i in range(3):
|
||||||
|
dirname = os.path.basename(root)
|
||||||
|
if dirname.startswith(parentdir_prefix):
|
||||||
|
return {"version": dirname[len(parentdir_prefix):],
|
||||||
|
"full-revisionid": None,
|
||||||
|
"dirty": False, "error": None, "date": None}
|
||||||
|
else:
|
||||||
|
rootdirs.append(root)
|
||||||
|
root = os.path.dirname(root) # up a level
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
print("Tried directories %s but none started with prefix %s" %
|
||||||
|
(str(rootdirs), parentdir_prefix))
|
||||||
|
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
|
||||||
|
|
||||||
|
|
||||||
|
@register_vcs_handler("git", "get_keywords")
|
||||||
|
def git_get_keywords(versionfile_abs):
|
||||||
|
"""Extract version information from the given file."""
|
||||||
# the code embedded in _version.py can just fetch the value of these
|
# the code embedded in _version.py can just fetch the value of these
|
||||||
# variables. When used from setup.py, we don't want to import
|
# keywords. When used from setup.py, we don't want to import _version.py,
|
||||||
# _version.py, so we do it with a regexp instead. This function is not
|
# so we do it with a regexp instead. This function is not used from
|
||||||
# used from _version.py.
|
# _version.py.
|
||||||
variables = {}
|
keywords = {}
|
||||||
try:
|
try:
|
||||||
for line in open(versionfile_source,"r").readlines():
|
f = open(versionfile_abs, "r")
|
||||||
|
for line in f.readlines():
|
||||||
if line.strip().startswith("git_refnames ="):
|
if line.strip().startswith("git_refnames ="):
|
||||||
mo = re.search(r'=\s*"(.*)"', line)
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
if mo:
|
if mo:
|
||||||
variables["refnames"] = mo.group(1)
|
keywords["refnames"] = mo.group(1)
|
||||||
if line.strip().startswith("git_full ="):
|
if line.strip().startswith("git_full ="):
|
||||||
mo = re.search(r'=\s*"(.*)"', line)
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
if mo:
|
if mo:
|
||||||
variables["full"] = mo.group(1)
|
keywords["full"] = mo.group(1)
|
||||||
|
if line.strip().startswith("git_date ="):
|
||||||
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
|
if mo:
|
||||||
|
keywords["date"] = mo.group(1)
|
||||||
|
f.close()
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
pass
|
pass
|
||||||
return variables
|
return keywords
|
||||||
|
|
||||||
def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
|
|
||||||
refnames = variables["refnames"].strip()
|
@register_vcs_handler("git", "keywords")
|
||||||
|
def git_versions_from_keywords(keywords, tag_prefix, verbose):
|
||||||
|
"""Get version information from git keywords."""
|
||||||
|
if not keywords:
|
||||||
|
raise NotThisMethod("no keywords at all, weird")
|
||||||
|
date = keywords.get("date")
|
||||||
|
if date is not None:
|
||||||
|
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
|
||||||
|
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
|
||||||
|
# -like" string, which we must then edit to make compliant), because
|
||||||
|
# it's been around since git-1.5.3, and it's too difficult to
|
||||||
|
# discover which version we're using, or to work around using an
|
||||||
|
# older one.
|
||||||
|
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
|
||||||
|
refnames = keywords["refnames"].strip()
|
||||||
if refnames.startswith("$Format"):
|
if refnames.startswith("$Format"):
|
||||||
if verbose:
|
if verbose:
|
||||||
print("variables are unexpanded, not using")
|
print("keywords are unexpanded, not using")
|
||||||
return {} # unexpanded, so not in an unpacked git-archive tarball
|
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
|
||||||
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
||||||
for ref in list(refs):
|
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
||||||
if not re.search(r'\d', ref):
|
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
||||||
|
TAG = "tag: "
|
||||||
|
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
|
||||||
|
if not tags:
|
||||||
|
# Either we're using git < 1.8.3, or there really are no tags. We use
|
||||||
|
# a heuristic: assume all version tags have a digit. The old git %d
|
||||||
|
# expansion behaves like git log --decorate=short and strips out the
|
||||||
|
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
|
||||||
|
# between branches and tags. By ignoring refnames without digits, we
|
||||||
|
# filter out many common branch names like "release" and
|
||||||
|
# "stabilization", as well as "HEAD" and "master".
|
||||||
|
tags = set([r for r in refs if re.search(r'\d', r)])
|
||||||
if verbose:
|
if verbose:
|
||||||
print("discarding '%s', no digits" % ref)
|
print("discarding '%s', no digits" % ",".join(refs - tags))
|
||||||
refs.discard(ref)
|
|
||||||
# Assume all version tags have a digit. git's %d expansion
|
|
||||||
# behaves like git log --decorate=short and strips out the
|
|
||||||
# refs/heads/ and refs/tags/ prefixes that would let us
|
|
||||||
# distinguish between branches and tags. By ignoring refnames
|
|
||||||
# without digits, we filter out many common branch names like
|
|
||||||
# "release" and "stabilization", as well as "HEAD" and "master".
|
|
||||||
if verbose:
|
if verbose:
|
||||||
print("remaining refs: %s" % ",".join(sorted(refs)))
|
print("likely tags: %s" % ",".join(sorted(tags)))
|
||||||
for ref in sorted(refs):
|
for ref in sorted(tags):
|
||||||
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
||||||
if ref.startswith(tag_prefix):
|
if ref.startswith(tag_prefix):
|
||||||
r = ref[len(tag_prefix):]
|
r = ref[len(tag_prefix):]
|
||||||
if verbose:
|
if verbose:
|
||||||
print("picking %s" % r)
|
print("picking %s" % r)
|
||||||
return {"version": r,
|
return {"version": r,
|
||||||
"full": variables["full"].strip() }
|
"full-revisionid": keywords["full"].strip(),
|
||||||
# no suitable tags, so we use the full revision id
|
"dirty": False, "error": None,
|
||||||
|
"date": date}
|
||||||
|
# no suitable tags, so version is "0+unknown", but full hex is still there
|
||||||
if verbose:
|
if verbose:
|
||||||
print("no suitable tags, using full revision id")
|
print("no suitable tags, using unknown + full revision id")
|
||||||
return { "version": variables["full"].strip(),
|
return {"version": "0+unknown",
|
||||||
"full": variables["full"].strip() }
|
"full-revisionid": keywords["full"].strip(),
|
||||||
|
"dirty": False, "error": "no suitable tags", "date": None}
|
||||||
|
|
||||||
def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
|
|
||||||
# this runs 'git' from the root of the source tree. That either means
|
|
||||||
# someone ran a setup.py command (and this code is in versioneer.py, so
|
|
||||||
# IN_LONG_VERSION_PY=False, thus the containing directory is the root of
|
|
||||||
# the source tree), or someone ran a project-specific entry point (and
|
|
||||||
# this code is in _version.py, so IN_LONG_VERSION_PY=True, thus the
|
|
||||||
# containing directory is somewhere deeper in the source tree). This only
|
|
||||||
# gets called if the git-archive 'subst' variables were *not* expanded,
|
|
||||||
# and _version.py hasn't already been rewritten with a short version
|
|
||||||
# string, meaning we're inside a checked out source tree.
|
|
||||||
|
|
||||||
try:
|
@register_vcs_handler("git", "pieces_from_vcs")
|
||||||
here = os.path.abspath(__file__)
|
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
|
||||||
except NameError:
|
"""Get version from 'git describe' in the root of the source tree.
|
||||||
# some py2exe/bbfreeze/non-CPython implementations don't do __file__
|
|
||||||
return {} # not always correct
|
|
||||||
|
|
||||||
# versionfile_source is the relative path from the top of the source tree
|
This only gets called if the git-archive 'subst' keywords were *not*
|
||||||
# (where the .git directory might live) to this file. Invert this to find
|
expanded, and _version.py hasn't already been rewritten with a short
|
||||||
# the root from __file__.
|
version string, meaning we're inside a checked out source tree.
|
||||||
root = here
|
"""
|
||||||
if IN_LONG_VERSION_PY:
|
GITS = ["git"]
|
||||||
for i in range(len(versionfile_source.split("/"))):
|
|
||||||
root = os.path.dirname(root)
|
|
||||||
else:
|
|
||||||
root = os.path.dirname(here)
|
|
||||||
if not os.path.exists(os.path.join(root, ".git")):
|
|
||||||
if verbose:
|
|
||||||
print("no .git in %s" % root)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
GIT = "git"
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
GIT = "git.cmd"
|
GITS = ["git.cmd", "git.exe"]
|
||||||
stdout = run_command([GIT, "describe", "--tags", "--dirty", "--always"],
|
|
||||||
|
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
|
||||||
|
hide_stderr=True)
|
||||||
|
if rc != 0:
|
||||||
|
if verbose:
|
||||||
|
print("Directory %s not under git control" % root)
|
||||||
|
raise NotThisMethod("'git rev-parse --git-dir' returned error")
|
||||||
|
|
||||||
|
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
|
||||||
|
# if there isn't one, this yields HEX[-dirty] (no NUM)
|
||||||
|
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
|
||||||
|
"--always", "--long",
|
||||||
|
"--match", "%s*" % tag_prefix],
|
||||||
cwd=root)
|
cwd=root)
|
||||||
if stdout is None:
|
# --long was added in git-1.5.5
|
||||||
return {}
|
if describe_out is None:
|
||||||
if not stdout.startswith(tag_prefix):
|
raise NotThisMethod("'git describe' failed")
|
||||||
|
describe_out = describe_out.strip()
|
||||||
|
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
|
||||||
|
if full_out is None:
|
||||||
|
raise NotThisMethod("'git rev-parse' failed")
|
||||||
|
full_out = full_out.strip()
|
||||||
|
|
||||||
|
pieces = {}
|
||||||
|
pieces["long"] = full_out
|
||||||
|
pieces["short"] = full_out[:7] # maybe improved later
|
||||||
|
pieces["error"] = None
|
||||||
|
|
||||||
|
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
|
||||||
|
# TAG might have hyphens.
|
||||||
|
git_describe = describe_out
|
||||||
|
|
||||||
|
# look for -dirty suffix
|
||||||
|
dirty = git_describe.endswith("-dirty")
|
||||||
|
pieces["dirty"] = dirty
|
||||||
|
if dirty:
|
||||||
|
git_describe = git_describe[:git_describe.rindex("-dirty")]
|
||||||
|
|
||||||
|
# now we have TAG-NUM-gHEX or HEX
|
||||||
|
|
||||||
|
if "-" in git_describe:
|
||||||
|
# TAG-NUM-gHEX
|
||||||
|
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
|
||||||
|
if not mo:
|
||||||
|
# unparseable. Maybe git-describe is misbehaving?
|
||||||
|
pieces["error"] = ("unable to parse git-describe output: '%s'"
|
||||||
|
% describe_out)
|
||||||
|
return pieces
|
||||||
|
|
||||||
|
# tag
|
||||||
|
full_tag = mo.group(1)
|
||||||
|
if not full_tag.startswith(tag_prefix):
|
||||||
if verbose:
|
if verbose:
|
||||||
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
|
fmt = "tag '%s' doesn't start with prefix '%s'"
|
||||||
return {}
|
print(fmt % (full_tag, tag_prefix))
|
||||||
tag = stdout[len(tag_prefix):]
|
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
|
||||||
stdout = run_command([GIT, "rev-parse", "HEAD"], cwd=root)
|
% (full_tag, tag_prefix))
|
||||||
if stdout is None:
|
return pieces
|
||||||
return {}
|
pieces["closest-tag"] = full_tag[len(tag_prefix):]
|
||||||
full = stdout.strip()
|
|
||||||
if tag.endswith("-dirty"):
|
|
||||||
full += "-dirty"
|
|
||||||
return {"version": tag, "full": full}
|
|
||||||
|
|
||||||
|
# distance: number of commits since tag
|
||||||
|
pieces["distance"] = int(mo.group(2))
|
||||||
|
|
||||||
|
# commit: short hex revision ID
|
||||||
|
pieces["short"] = mo.group(3)
|
||||||
|
|
||||||
def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False):
|
|
||||||
if IN_LONG_VERSION_PY:
|
|
||||||
# We're running from _version.py. If it's from a source tree
|
|
||||||
# (execute-in-place), we can work upwards to find the root of the
|
|
||||||
# tree, and then check the parent directory for a version string. If
|
|
||||||
# it's in an installed application, there's no hope.
|
|
||||||
try:
|
|
||||||
here = os.path.abspath(__file__)
|
|
||||||
except NameError:
|
|
||||||
# py2exe/bbfreeze/non-CPython don't have __file__
|
|
||||||
return {} # without __file__, we have no hope
|
|
||||||
# versionfile_source is the relative path from the top of the source
|
|
||||||
# tree to _version.py. Invert this to find the root from __file__.
|
|
||||||
root = here
|
|
||||||
for i in range(len(versionfile_source.split("/"))):
|
|
||||||
root = os.path.dirname(root)
|
|
||||||
else:
|
else:
|
||||||
# we're running from versioneer.py, which means we're running from
|
# HEX: no tags
|
||||||
# the setup.py in a source tree. sys.argv[0] is setup.py in the root.
|
pieces["closest-tag"] = None
|
||||||
here = os.path.abspath(sys.argv[0])
|
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
|
||||||
root = os.path.dirname(here)
|
cwd=root)
|
||||||
|
pieces["distance"] = int(count_out) # total number of commits
|
||||||
|
|
||||||
# Source tarballs conventionally unpack into a directory that includes
|
# commit date: see ISO-8601 comment in git_versions_from_keywords()
|
||||||
# both the project name and a version string.
|
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
|
||||||
dirname = os.path.basename(root)
|
cwd=root)[0].strip()
|
||||||
if not dirname.startswith(parentdir_prefix):
|
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
|
||||||
if verbose:
|
|
||||||
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
|
|
||||||
(root, dirname, parentdir_prefix))
|
|
||||||
return None
|
|
||||||
return {"version": dirname[len(parentdir_prefix):], "full": ""}
|
|
||||||
|
|
||||||
tag_prefix = "nilmdb-"
|
return pieces
|
||||||
parentdir_prefix = "nilmdb-"
|
|
||||||
versionfile_source = "nilmdb/_version.py"
|
|
||||||
|
|
||||||
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
|
|
||||||
variables = { "refnames": git_refnames, "full": git_full }
|
def plus_or_dot(pieces):
|
||||||
ver = versions_from_expanded_variables(variables, tag_prefix, verbose)
|
"""Return a + if we don't already have one, else return a ."""
|
||||||
if not ver:
|
if "+" in pieces.get("closest-tag", ""):
|
||||||
ver = versions_from_vcs(tag_prefix, versionfile_source, verbose)
|
return "."
|
||||||
if not ver:
|
return "+"
|
||||||
ver = versions_from_parentdir(parentdir_prefix, versionfile_source,
|
|
||||||
|
|
||||||
|
def render_pep440(pieces):
|
||||||
|
"""Build up version string, with post-release "local version identifier".
|
||||||
|
|
||||||
|
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
|
||||||
|
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"] or pieces["dirty"]:
|
||||||
|
rendered += plus_or_dot(pieces)
|
||||||
|
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dirty"
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
|
||||||
|
pieces["short"])
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dirty"
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_pep440_pre(pieces):
|
||||||
|
"""TAG[.post.devDISTANCE] -- No -dirty.
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. 0.post.devDISTANCE
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"]:
|
||||||
|
rendered += ".post.dev%d" % pieces["distance"]
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = "0.post.dev%d" % pieces["distance"]
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_pep440_post(pieces):
|
||||||
|
"""TAG[.postDISTANCE[.dev0]+gHEX] .
|
||||||
|
|
||||||
|
The ".dev0" means dirty. Note that .dev0 sorts backwards
|
||||||
|
(a dirty tree will appear "older" than the corresponding clean one),
|
||||||
|
but you shouldn't be releasing software with -dirty anyways.
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. 0.postDISTANCE[.dev0]
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"] or pieces["dirty"]:
|
||||||
|
rendered += ".post%d" % pieces["distance"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dev0"
|
||||||
|
rendered += plus_or_dot(pieces)
|
||||||
|
rendered += "g%s" % pieces["short"]
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = "0.post%d" % pieces["distance"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dev0"
|
||||||
|
rendered += "+g%s" % pieces["short"]
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_pep440_old(pieces):
|
||||||
|
"""TAG[.postDISTANCE[.dev0]] .
|
||||||
|
|
||||||
|
The ".dev0" means dirty.
|
||||||
|
|
||||||
|
Eexceptions:
|
||||||
|
1: no tags. 0.postDISTANCE[.dev0]
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"] or pieces["dirty"]:
|
||||||
|
rendered += ".post%d" % pieces["distance"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dev0"
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = "0.post%d" % pieces["distance"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dev0"
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_git_describe(pieces):
|
||||||
|
"""TAG[-DISTANCE-gHEX][-dirty].
|
||||||
|
|
||||||
|
Like 'git describe --tags --dirty --always'.
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"]:
|
||||||
|
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = pieces["short"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += "-dirty"
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_git_describe_long(pieces):
|
||||||
|
"""TAG-DISTANCE-gHEX[-dirty].
|
||||||
|
|
||||||
|
Like 'git describe --tags --dirty --always -long'.
|
||||||
|
The distance/hash is unconditional.
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = pieces["short"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += "-dirty"
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render(pieces, style):
|
||||||
|
"""Render the given version pieces into the requested style."""
|
||||||
|
if pieces["error"]:
|
||||||
|
return {"version": "unknown",
|
||||||
|
"full-revisionid": pieces.get("long"),
|
||||||
|
"dirty": None,
|
||||||
|
"error": pieces["error"],
|
||||||
|
"date": None}
|
||||||
|
|
||||||
|
if not style or style == "default":
|
||||||
|
style = "pep440" # the default
|
||||||
|
|
||||||
|
if style == "pep440":
|
||||||
|
rendered = render_pep440(pieces)
|
||||||
|
elif style == "pep440-pre":
|
||||||
|
rendered = render_pep440_pre(pieces)
|
||||||
|
elif style == "pep440-post":
|
||||||
|
rendered = render_pep440_post(pieces)
|
||||||
|
elif style == "pep440-old":
|
||||||
|
rendered = render_pep440_old(pieces)
|
||||||
|
elif style == "git-describe":
|
||||||
|
rendered = render_git_describe(pieces)
|
||||||
|
elif style == "git-describe-long":
|
||||||
|
rendered = render_git_describe_long(pieces)
|
||||||
|
else:
|
||||||
|
raise ValueError("unknown style '%s'" % style)
|
||||||
|
|
||||||
|
return {"version": rendered, "full-revisionid": pieces["long"],
|
||||||
|
"dirty": pieces["dirty"], "error": None,
|
||||||
|
"date": pieces.get("date")}
|
||||||
|
|
||||||
|
|
||||||
|
def get_versions():
|
||||||
|
"""Get version information or return default if unable to do so."""
|
||||||
|
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
|
||||||
|
# __file__, we can work backwards from there to the root. Some
|
||||||
|
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
|
||||||
|
# case we can only use expanded keywords.
|
||||||
|
|
||||||
|
cfg = get_config()
|
||||||
|
verbose = cfg.verbose
|
||||||
|
|
||||||
|
try:
|
||||||
|
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
|
||||||
verbose)
|
verbose)
|
||||||
if not ver:
|
except NotThisMethod:
|
||||||
ver = default
|
pass
|
||||||
return ver
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
root = os.path.realpath(__file__)
|
||||||
|
# versionfile_source is the relative path from the top of the source
|
||||||
|
# tree (where the .git directory might live) to this file. Invert
|
||||||
|
# this to find the root from __file__.
|
||||||
|
for i in cfg.versionfile_source.split('/'):
|
||||||
|
root = os.path.dirname(root)
|
||||||
|
except NameError:
|
||||||
|
return {"version": "0+unknown", "full-revisionid": None,
|
||||||
|
"dirty": None,
|
||||||
|
"error": "unable to find root of source tree",
|
||||||
|
"date": None}
|
||||||
|
|
||||||
|
try:
|
||||||
|
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
|
||||||
|
return render(pieces, cfg.style)
|
||||||
|
except NotThisMethod:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
if cfg.parentdir_prefix:
|
||||||
|
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
|
||||||
|
except NotThisMethod:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return {"version": "0+unknown", "full-revisionid": None,
|
||||||
|
"dirty": None,
|
||||||
|
"error": "unable to compute version", "date": None}
|
||||||
|
|
|
@ -2,21 +2,21 @@
|
||||||
|
|
||||||
"""Class for performing HTTP client requests via libcurl"""
|
"""Class for performing HTTP client requests via libcurl"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import contextlib
|
||||||
|
|
||||||
import nilmdb.utils
|
import nilmdb.utils
|
||||||
import nilmdb.client.httpclient
|
import nilmdb.client.httpclient
|
||||||
from nilmdb.client.errors import ClientError
|
from nilmdb.client.errors import ClientError
|
||||||
|
|
||||||
import time
|
|
||||||
import simplejson as json
|
|
||||||
import contextlib
|
|
||||||
|
|
||||||
from nilmdb.utils.time import timestamp_to_string, string_to_timestamp
|
from nilmdb.utils.time import timestamp_to_string, string_to_timestamp
|
||||||
|
|
||||||
|
|
||||||
def extract_timestamp(line):
|
def extract_timestamp(line):
|
||||||
"""Extract just the timestamp from a line of data text"""
|
"""Extract just the timestamp from a line of data text"""
|
||||||
return string_to_timestamp(line.split()[0])
|
return string_to_timestamp(line.split()[0])
|
||||||
|
|
||||||
class Client(object):
|
|
||||||
|
class Client():
|
||||||
"""Main client interface to the Nilm database."""
|
"""Main client interface to the Nilm database."""
|
||||||
|
|
||||||
def __init__(self, url, post_json=False):
|
def __init__(self, url, post_json=False):
|
||||||
|
@ -99,19 +99,25 @@ class Client(object):
|
||||||
|
|
||||||
def stream_create(self, path, layout):
|
def stream_create(self, path, layout):
|
||||||
"""Create a new stream"""
|
"""Create a new stream"""
|
||||||
params = { "path": path,
|
params = {
|
||||||
"layout" : layout }
|
"path": path,
|
||||||
|
"layout": layout
|
||||||
|
}
|
||||||
return self.http.post("stream/create", params)
|
return self.http.post("stream/create", params)
|
||||||
|
|
||||||
def stream_destroy(self, path):
|
def stream_destroy(self, path):
|
||||||
"""Delete stream. Fails if any data is still present."""
|
"""Delete stream. Fails if any data is still present."""
|
||||||
params = { "path": path }
|
params = {
|
||||||
|
"path": path
|
||||||
|
}
|
||||||
return self.http.post("stream/destroy", params)
|
return self.http.post("stream/destroy", params)
|
||||||
|
|
||||||
def stream_rename(self, oldpath, newpath):
|
def stream_rename(self, oldpath, newpath):
|
||||||
"""Rename a stream."""
|
"""Rename a stream."""
|
||||||
params = { "oldpath": oldpath,
|
params = {
|
||||||
"newpath": newpath }
|
"oldpath": oldpath,
|
||||||
|
"newpath": newpath
|
||||||
|
}
|
||||||
return self.http.post("stream/rename", params)
|
return self.http.post("stream/rename", params)
|
||||||
|
|
||||||
def stream_remove(self, path, start=None, end=None):
|
def stream_remove(self, path, start=None, end=None):
|
||||||
|
@ -139,8 +145,8 @@ class Client(object):
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
with client.stream_insert_context('/path', start, end) as ctx:
|
with client.stream_insert_context('/path', start, end) as ctx:
|
||||||
ctx.insert('1234567890.0 1 2 3 4\\n')
|
ctx.insert('1234567890000000 1 2 3 4\\n')
|
||||||
ctx.insert('1234567891.0 1 2 3 4\\n')
|
ctx.insert('1234567891000000 1 2 3 4\\n')
|
||||||
|
|
||||||
For more details, see help for nilmdb.client.client.StreamInserter
|
For more details, see help for nilmdb.client.client.StreamInserter
|
||||||
|
|
||||||
|
@ -159,7 +165,7 @@ class Client(object):
|
||||||
so it will be broken into reasonably-sized chunks and
|
so it will be broken into reasonably-sized chunks and
|
||||||
start/end will be deduced if missing."""
|
start/end will be deduced if missing."""
|
||||||
with self.stream_insert_context(path, start, end) as ctx:
|
with self.stream_insert_context(path, start, end) as ctx:
|
||||||
if isinstance(data, basestring):
|
if isinstance(data, bytes):
|
||||||
ctx.insert(data)
|
ctx.insert(data)
|
||||||
else:
|
else:
|
||||||
for chunk in data:
|
for chunk in data:
|
||||||
|
@ -181,7 +187,7 @@ class Client(object):
|
||||||
}
|
}
|
||||||
if binary:
|
if binary:
|
||||||
params["binary"] = 1
|
params["binary"] = 1
|
||||||
return self.http.put("stream/insert", data, params, binary = binary)
|
return self.http.put("stream/insert", data, params)
|
||||||
|
|
||||||
def stream_intervals(self, path, start=None, end=None, diffpath=None):
|
def stream_intervals(self, path, start=None, end=None, diffpath=None):
|
||||||
"""
|
"""
|
||||||
|
@ -242,7 +248,8 @@ class Client(object):
|
||||||
counts = list(self.stream_extract(path, start, end, count=True))
|
counts = list(self.stream_extract(path, start, end, count=True))
|
||||||
return int(counts[0])
|
return int(counts[0])
|
||||||
|
|
||||||
class StreamInserter(object):
|
|
||||||
|
class StreamInserter():
|
||||||
"""Object returned by stream_insert_context() that manages
|
"""Object returned by stream_insert_context() that manages
|
||||||
the insertion of rows of data into a particular path.
|
the insertion of rows of data into a particular path.
|
||||||
|
|
||||||
|
@ -331,7 +338,7 @@ class StreamInserter(object):
|
||||||
# Send the block once we have enough data
|
# Send the block once we have enough data
|
||||||
if self._block_len >= maxdata:
|
if self._block_len >= maxdata:
|
||||||
self._send_block(final=False)
|
self._send_block(final=False)
|
||||||
if self._block_len >= self._max_data_after_send: # pragma: no cover
|
if self._block_len >= self._max_data_after_send:
|
||||||
raise ValueError("too much data left over after trying"
|
raise ValueError("too much data left over after trying"
|
||||||
" to send intermediate block; is it"
|
" to send intermediate block; is it"
|
||||||
" missing newlines or malformed?")
|
" missing newlines or malformed?")
|
||||||
|
@ -370,10 +377,10 @@ class StreamInserter(object):
|
||||||
there isn't one."""
|
there isn't one."""
|
||||||
start = 0
|
start = 0
|
||||||
while True:
|
while True:
|
||||||
end = block.find('\n', start)
|
end = block.find(b'\n', start)
|
||||||
if end < 0:
|
if end < 0:
|
||||||
raise IndexError
|
raise IndexError
|
||||||
if block[start] != '#':
|
if block[start] != b'#'[0]:
|
||||||
return (start, (end + 1))
|
return (start, (end + 1))
|
||||||
start = end + 1
|
start = end + 1
|
||||||
|
|
||||||
|
@ -381,12 +388,12 @@ class StreamInserter(object):
|
||||||
"""Return the (start, end) indices of the last full line in
|
"""Return the (start, end) indices of the last full line in
|
||||||
block[:length] that isn't a comment, or raise IndexError if
|
block[:length] that isn't a comment, or raise IndexError if
|
||||||
there isn't one."""
|
there isn't one."""
|
||||||
end = block.rfind('\n')
|
end = block.rfind(b'\n')
|
||||||
if end <= 0:
|
if end <= 0:
|
||||||
raise IndexError
|
raise IndexError
|
||||||
while True:
|
while True:
|
||||||
start = block.rfind('\n', 0, end)
|
start = block.rfind(b'\n', 0, end)
|
||||||
if block[start + 1] != '#':
|
if block[start + 1] != b'#'[0]:
|
||||||
return ((start + 1), end)
|
return ((start + 1), end)
|
||||||
if start == -1:
|
if start == -1:
|
||||||
raise IndexError
|
raise IndexError
|
||||||
|
@ -396,7 +403,7 @@ class StreamInserter(object):
|
||||||
"""Send data currently in the block. The data sent will
|
"""Send data currently in the block. The data sent will
|
||||||
consist of full lines only, so some might be left over."""
|
consist of full lines only, so some might be left over."""
|
||||||
# Build the full string to send
|
# Build the full string to send
|
||||||
block = "".join(self._block_data)
|
block = b"".join(self._block_data)
|
||||||
|
|
||||||
start_ts = self._interval_start
|
start_ts = self._interval_start
|
||||||
if start_ts is None:
|
if start_ts is None:
|
||||||
|
@ -413,7 +420,7 @@ class StreamInserter(object):
|
||||||
# or the timestamp of the last line plus epsilon.
|
# or the timestamp of the last line plus epsilon.
|
||||||
end_ts = self._interval_end
|
end_ts = self._interval_end
|
||||||
try:
|
try:
|
||||||
if block[-1] != '\n':
|
if block[-1] != b'\n'[0]:
|
||||||
raise ValueError("final block didn't end with a newline")
|
raise ValueError("final block didn't end with a newline")
|
||||||
if end_ts is None:
|
if end_ts is None:
|
||||||
(spos, epos) = self._get_last_noncomment(block)
|
(spos, epos) = self._get_last_noncomment(block)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
"""HTTP client errors"""
|
"""HTTP client errors"""
|
||||||
|
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import sprintf
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
class Error(Exception):
|
||||||
"""Base exception for both ClientError and ServerError responses"""
|
"""Base exception for both ClientError and ServerError responses"""
|
||||||
|
@ -9,25 +10,32 @@ class Error(Exception):
|
||||||
message=None,
|
message=None,
|
||||||
url=None,
|
url=None,
|
||||||
traceback=None):
|
traceback=None):
|
||||||
Exception.__init__(self, status)
|
super().__init__(status)
|
||||||
self.status = status # e.g. "400 Bad Request"
|
self.status = status # e.g. "400 Bad Request"
|
||||||
self.message = message # textual message from the server
|
self.message = message # textual message from the server
|
||||||
self.url = url # URL we were requesting
|
self.url = url # URL we were requesting
|
||||||
self.traceback = traceback # server traceback, if available
|
self.traceback = traceback # server traceback, if available
|
||||||
|
|
||||||
def _format_error(self, show_url):
|
def _format_error(self, show_url):
|
||||||
s = sprintf("[%s]", self.status)
|
s = sprintf("[%s]", self.status)
|
||||||
if self.message:
|
if self.message:
|
||||||
s += sprintf(" %s", self.message)
|
s += sprintf(" %s", self.message)
|
||||||
if show_url and self.url: # pragma: no cover
|
if show_url and self.url:
|
||||||
s += sprintf(" (%s)", self.url)
|
s += sprintf(" (%s)", self.url)
|
||||||
if self.traceback: # pragma: no cover
|
if self.traceback:
|
||||||
s += sprintf("\nServer traceback:\n%s", self.traceback)
|
s += sprintf("\nServer traceback:\n%s", self.traceback)
|
||||||
return s
|
return s
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self._format_error(show_url=False)
|
return self._format_error(show_url=False)
|
||||||
def __repr__(self): # pragma: no cover
|
|
||||||
|
def __repr__(self):
|
||||||
return self._format_error(show_url=True)
|
return self._format_error(show_url=True)
|
||||||
|
|
||||||
|
|
||||||
class ClientError(Error):
|
class ClientError(Error):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ServerError(Error):
|
class ServerError(Error):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,21 +1,21 @@
|
||||||
"""HTTP client library"""
|
"""HTTP client library"""
|
||||||
|
|
||||||
import nilmdb.utils
|
import json
|
||||||
from nilmdb.client.errors import ClientError, ServerError, Error
|
import urllib.parse
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
import urlparse
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
class HTTPClient(object):
|
from nilmdb.client.errors import ClientError, ServerError, Error
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPClient():
|
||||||
"""Class to manage and perform HTTP requests from the client"""
|
"""Class to manage and perform HTTP requests from the client"""
|
||||||
def __init__(self, baseurl="", post_json=False, verify_ssl=True):
|
def __init__(self, baseurl="", post_json=False, verify_ssl=True):
|
||||||
"""If baseurl is supplied, all other functions that take
|
"""If baseurl is supplied, all other functions that take
|
||||||
a URL can be given a relative URL instead."""
|
a URL can be given a relative URL instead."""
|
||||||
# Verify / clean up URL
|
# Verify / clean up URL
|
||||||
reparsed = urlparse.urlparse(baseurl).geturl()
|
reparsed = urllib.parse.urlparse(baseurl).geturl()
|
||||||
if '://' not in reparsed:
|
if '://' not in reparsed:
|
||||||
reparsed = urlparse.urlparse("http://" + baseurl).geturl()
|
reparsed = urllib.parse.urlparse("http://" + baseurl).geturl()
|
||||||
self.baseurl = reparsed.rstrip('/') + '/'
|
self.baseurl = reparsed.rstrip('/') + '/'
|
||||||
|
|
||||||
# Note whether we want SSL verification
|
# Note whether we want SSL verification
|
||||||
|
@ -32,25 +32,27 @@ class HTTPClient(object):
|
||||||
# Default variables for exception. We use the entire body as
|
# Default variables for exception. We use the entire body as
|
||||||
# the default message, in case we can't extract it from a JSON
|
# the default message, in case we can't extract it from a JSON
|
||||||
# response.
|
# response.
|
||||||
args = { "url" : url,
|
args = {
|
||||||
|
"url": url,
|
||||||
"status": str(code),
|
"status": str(code),
|
||||||
"message": body,
|
"message": body,
|
||||||
"traceback" : None }
|
"traceback": None
|
||||||
|
}
|
||||||
try:
|
try:
|
||||||
# Fill with server-provided data if we can
|
# Fill with server-provided data if we can
|
||||||
jsonerror = json.loads(body)
|
jsonerror = json.loads(body)
|
||||||
args["status"] = jsonerror["status"]
|
args["status"] = jsonerror["status"]
|
||||||
args["message"] = jsonerror["message"]
|
args["message"] = jsonerror["message"]
|
||||||
args["traceback"] = jsonerror["traceback"]
|
args["traceback"] = jsonerror["traceback"]
|
||||||
except Exception: # pragma: no cover
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if code >= 400 and code <= 499:
|
if 400 <= code <= 499:
|
||||||
raise ClientError(**args)
|
raise ClientError(**args)
|
||||||
else: # pragma: no cover
|
else:
|
||||||
if code >= 500 and code <= 599:
|
if 500 <= code <= 599:
|
||||||
if args["message"] is None:
|
if args["message"] is None:
|
||||||
args["message"] = ("(no message; try disabling " +
|
args["message"] = ("(no message; try disabling "
|
||||||
"response.stream option in " +
|
"response.stream option in "
|
||||||
"nilmdb.server for better debugging)")
|
"nilmdb.server for better debugging)")
|
||||||
raise ServerError(**args)
|
raise ServerError(**args)
|
||||||
else:
|
else:
|
||||||
|
@ -60,7 +62,7 @@ class HTTPClient(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _do_req(self, method, url, query_data, body_data, stream, headers):
|
def _do_req(self, method, url, query_data, body_data, stream, headers):
|
||||||
url = urlparse.urljoin(self.baseurl, url)
|
url = urllib.parse.urljoin(self.baseurl, url)
|
||||||
try:
|
try:
|
||||||
# Create a new session, ensure we send "Connection: close",
|
# Create a new session, ensure we send "Connection: close",
|
||||||
# and explicitly close connection after the transfer.
|
# and explicitly close connection after the transfer.
|
||||||
|
@ -87,7 +89,7 @@ class HTTPClient(object):
|
||||||
session.close()
|
session.close()
|
||||||
except requests.RequestException as e:
|
except requests.RequestException as e:
|
||||||
raise ServerError(status="502 Error", url=url,
|
raise ServerError(status="502 Error", url=url,
|
||||||
message = str(e.message))
|
message=str(e))
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
self._handle_error(url, response.status_code, response.content)
|
self._handle_error(url, response.status_code, response.content)
|
||||||
self._last_response = response
|
self._last_response = response
|
||||||
|
@ -107,7 +109,7 @@ class HTTPClient(object):
|
||||||
stream=False, headers=headers)
|
stream=False, headers=headers)
|
||||||
if isjson:
|
if isjson:
|
||||||
return json.loads(response.content)
|
return json.loads(response.content)
|
||||||
return response.content
|
return response.text
|
||||||
|
|
||||||
def get(self, url, params=None):
|
def get(self, url, params=None):
|
||||||
"""Simple GET (parameters in URL)"""
|
"""Simple GET (parameters in URL)"""
|
||||||
|
@ -122,12 +124,10 @@ class HTTPClient(object):
|
||||||
else:
|
else:
|
||||||
return self._req("POST", url, None, params)
|
return self._req("POST", url, None, params)
|
||||||
|
|
||||||
def put(self, url, data, params = None, binary = False):
|
def put(self, url, data, params=None,
|
||||||
|
content_type="application/octet-stream"):
|
||||||
"""Simple PUT (parameters in URL, data in body)"""
|
"""Simple PUT (parameters in URL, data in body)"""
|
||||||
if binary:
|
h = {'Content-type': content_type}
|
||||||
h = { 'Content-type': 'application/octet-stream' }
|
|
||||||
else:
|
|
||||||
h = { 'Content-type': 'text/plain; charset=utf-8' }
|
|
||||||
return self._req("PUT", url, query=params, body=data, headers=h)
|
return self._req("PUT", url, query=params, body=data, headers=h)
|
||||||
|
|
||||||
# Generator versions that return data one line at a time.
|
# Generator versions that return data one line at a time.
|
||||||
|
@ -156,7 +156,7 @@ class HTTPClient(object):
|
||||||
pending = tmp[-1]
|
pending = tmp[-1]
|
||||||
for line in lines:
|
for line in lines:
|
||||||
yield line
|
yield line
|
||||||
if pending is not None: # pragma: no cover (missing newline)
|
if pending is not None:
|
||||||
yield pending
|
yield pending
|
||||||
|
|
||||||
# Yield the chunks or lines as requested
|
# Yield the chunks or lines as requested
|
||||||
|
@ -165,11 +165,11 @@ class HTTPClient(object):
|
||||||
yield chunk
|
yield chunk
|
||||||
elif isjson:
|
elif isjson:
|
||||||
for line in lines(response.iter_content(chunk_size=1),
|
for line in lines(response.iter_content(chunk_size=1),
|
||||||
ending = '\r\n'):
|
ending=b'\r\n'):
|
||||||
yield json.loads(line)
|
yield json.loads(line)
|
||||||
else:
|
else:
|
||||||
for line in lines(response.iter_content(chunk_size=65536),
|
for line in lines(response.iter_content(chunk_size=65536),
|
||||||
ending = '\n'):
|
ending=b'\n'):
|
||||||
yield line
|
yield line
|
||||||
|
|
||||||
def get_gen(self, url, params=None, binary=False):
|
def get_gen(self, url, params=None, binary=False):
|
||||||
|
|
|
@ -3,29 +3,33 @@
|
||||||
"""Provide a NumpyClient class that is based on normal Client, but has
|
"""Provide a NumpyClient class that is based on normal Client, but has
|
||||||
additional methods for extracting and inserting data via Numpy arrays."""
|
additional methods for extracting and inserting data via Numpy arrays."""
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
|
||||||
|
import numpy
|
||||||
|
|
||||||
import nilmdb.utils
|
import nilmdb.utils
|
||||||
import nilmdb.client.client
|
import nilmdb.client.client
|
||||||
import nilmdb.client.httpclient
|
import nilmdb.client.httpclient
|
||||||
from nilmdb.client.errors import ClientError
|
from nilmdb.client.errors import ClientError
|
||||||
|
|
||||||
import contextlib
|
|
||||||
from nilmdb.utils.time import timestamp_to_string, string_to_timestamp
|
|
||||||
|
|
||||||
import numpy
|
|
||||||
import cStringIO
|
|
||||||
|
|
||||||
def layout_to_dtype(layout):
|
def layout_to_dtype(layout):
|
||||||
ltype = layout.split('_')[0]
|
ltype = layout.split('_')[0]
|
||||||
lcount = int(layout.split('_')[1])
|
lcount = int(layout.split('_')[1])
|
||||||
if ltype.startswith('int'):
|
if ltype.startswith('int'):
|
||||||
atype = '<i' + str(int(ltype[3:]) / 8)
|
atype = '<i' + str(int(ltype[3:]) // 8)
|
||||||
elif ltype.startswith('uint'):
|
elif ltype.startswith('uint'):
|
||||||
atype = '<u' + str(int(ltype[4:]) / 8)
|
atype = '<u' + str(int(ltype[4:]) // 8)
|
||||||
elif ltype.startswith('float'):
|
elif ltype.startswith('float'):
|
||||||
atype = '<f' + str(int(ltype[5:]) / 8)
|
atype = '<f' + str(int(ltype[5:]) // 8)
|
||||||
else:
|
else:
|
||||||
raise ValueError("bad layout")
|
raise ValueError("bad layout")
|
||||||
return numpy.dtype([('timestamp', '<i8'), ('data', atype, lcount)])
|
if lcount == 1:
|
||||||
|
dtype = [('timestamp', '<i8'), ('data', atype)]
|
||||||
|
else:
|
||||||
|
dtype = [('timestamp', '<i8'), ('data', atype, lcount)]
|
||||||
|
return numpy.dtype(dtype)
|
||||||
|
|
||||||
|
|
||||||
class NumpyClient(nilmdb.client.client.Client):
|
class NumpyClient(nilmdb.client.client.Client):
|
||||||
"""Subclass of nilmdb.client.Client that adds additional methods for
|
"""Subclass of nilmdb.client.Client that adds additional methods for
|
||||||
|
@ -55,7 +59,7 @@ class NumpyClient(nilmdb.client.client.Client):
|
||||||
dtype = self._get_dtype(path, layout)
|
dtype = self._get_dtype(path, layout)
|
||||||
|
|
||||||
def to_numpy(data):
|
def to_numpy(data):
|
||||||
a = numpy.fromstring(data, dtype)
|
a = numpy.frombuffer(data, dtype)
|
||||||
if structured:
|
if structured:
|
||||||
return a
|
return a
|
||||||
return numpy.c_[a['timestamp'], a['data']]
|
return numpy.c_[a['timestamp'], a['data']]
|
||||||
|
@ -70,14 +74,14 @@ class NumpyClient(nilmdb.client.client.Client):
|
||||||
|
|
||||||
# See if we have enough to make the requested Numpy array
|
# See if we have enough to make the requested Numpy array
|
||||||
while total_len >= maxsize:
|
while total_len >= maxsize:
|
||||||
assembled = "".join(chunks)
|
assembled = b"".join(chunks)
|
||||||
total_len -= maxsize
|
total_len -= maxsize
|
||||||
chunks = [assembled[maxsize:]]
|
chunks = [assembled[maxsize:]]
|
||||||
block = assembled[:maxsize]
|
block = assembled[:maxsize]
|
||||||
yield to_numpy(block)
|
yield to_numpy(block)
|
||||||
|
|
||||||
if total_len:
|
if total_len:
|
||||||
yield to_numpy("".join(chunks))
|
yield to_numpy(b"".join(chunks))
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def stream_insert_numpy_context(self, path, start=None, end=None,
|
def stream_insert_numpy_context(self, path, start=None, end=None,
|
||||||
|
@ -114,6 +118,7 @@ class NumpyClient(nilmdb.client.client.Client):
|
||||||
ctx.insert(chunk)
|
ctx.insert(chunk)
|
||||||
return ctx.last_response
|
return ctx.last_response
|
||||||
|
|
||||||
|
|
||||||
class StreamInserterNumpy(nilmdb.client.client.StreamInserter):
|
class StreamInserterNumpy(nilmdb.client.client.StreamInserter):
|
||||||
"""Object returned by stream_insert_numpy_context() that manages
|
"""Object returned by stream_insert_numpy_context() that manages
|
||||||
the insertion of rows of data into a particular path.
|
the insertion of rows of data into a particular path.
|
||||||
|
@ -146,7 +151,7 @@ class StreamInserterNumpy(nilmdb.client.client.StreamInserter):
|
||||||
|
|
||||||
def insert(self, array):
|
def insert(self, array):
|
||||||
"""Insert Numpy data, which must match the layout type."""
|
"""Insert Numpy data, which must match the layout type."""
|
||||||
if type(array) != numpy.ndarray:
|
if not isinstance(array, numpy.ndarray):
|
||||||
array = numpy.array(array)
|
array = numpy.array(array)
|
||||||
if array.ndim == 1:
|
if array.ndim == 1:
|
||||||
# Already a structured array; just verify the type
|
# Already a structured array; just verify the type
|
||||||
|
@ -246,7 +251,7 @@ class StreamInserterNumpy(nilmdb.client.client.StreamInserter):
|
||||||
# If we have no endpoints, or equal endpoints, it's OK as long
|
# If we have no endpoints, or equal endpoints, it's OK as long
|
||||||
# as there's no data to send
|
# as there's no data to send
|
||||||
if (start_ts is None or end_ts is None) or (start_ts == end_ts):
|
if (start_ts is None or end_ts is None) or (start_ts == end_ts):
|
||||||
if len(array) == 0:
|
if not array:
|
||||||
return
|
return
|
||||||
raise ClientError("have data to send, but invalid start/end times")
|
raise ClientError("have data to send, but invalid start/end times")
|
||||||
|
|
||||||
|
|
|
@ -1,21 +1,17 @@
|
||||||
"""Command line client functionality"""
|
"""Command line client functionality"""
|
||||||
|
|
||||||
import nilmdb.client
|
|
||||||
|
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
from nilmdb.utils import datetime_tz
|
|
||||||
import nilmdb.utils.time
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
import signal
|
||||||
import argparse
|
import argparse
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
import signal
|
|
||||||
|
|
||||||
try: # pragma: no cover
|
import nilmdb.client
|
||||||
|
from nilmdb.utils.printf import fprintf, sprintf
|
||||||
|
import nilmdb.utils.time
|
||||||
|
|
||||||
import argcomplete
|
import argcomplete
|
||||||
except ImportError: # pragma: no cover
|
import datetime_tz
|
||||||
argcomplete = None
|
|
||||||
|
|
||||||
# Valid subcommands. Defined in separate files just to break
|
# Valid subcommands. Defined in separate files just to break
|
||||||
# things up -- they're still called with Cmdline as self.
|
# things up -- they're still called with Cmdline as self.
|
||||||
|
@ -27,6 +23,7 @@ subcmd_mods = {}
|
||||||
for cmd in subcommands:
|
for cmd in subcommands:
|
||||||
subcmd_mods[cmd] = __import__("nilmdb.cmdline." + cmd, fromlist=[cmd])
|
subcmd_mods[cmd] = __import__("nilmdb.cmdline." + cmd, fromlist=[cmd])
|
||||||
|
|
||||||
|
|
||||||
class JimArgumentParser(argparse.ArgumentParser):
|
class JimArgumentParser(argparse.ArgumentParser):
|
||||||
def parse_args(self, args=None, namespace=None):
|
def parse_args(self, args=None, namespace=None):
|
||||||
# Look for --version anywhere and change it to just "nilmtool
|
# Look for --version anywhere and change it to just "nilmtool
|
||||||
|
@ -40,7 +37,8 @@ class JimArgumentParser(argparse.ArgumentParser):
|
||||||
self.print_usage(sys.stderr)
|
self.print_usage(sys.stderr)
|
||||||
self.exit(2, sprintf("error: %s\n", message))
|
self.exit(2, sprintf("error: %s\n", message))
|
||||||
|
|
||||||
class Complete(object): # pragma: no cover
|
|
||||||
|
class Complete():
|
||||||
# Completion helpers, for using argcomplete (see
|
# Completion helpers, for using argcomplete (see
|
||||||
# extras/nilmtool-bash-completion.sh)
|
# extras/nilmtool-bash-completion.sh)
|
||||||
def escape(self, s):
|
def escape(self, s):
|
||||||
|
@ -68,7 +66,7 @@ class Complete(object): # pragma: no cover
|
||||||
layouts = []
|
layouts = []
|
||||||
for i in range(1, 10):
|
for i in range(1, 10):
|
||||||
layouts.extend([(t + "_" + str(i)) for t in types])
|
layouts.extend([(t + "_" + str(i)) for t in types])
|
||||||
return ( l for l in layouts if l.startswith(prefix) )
|
return (lay for lay in layouts if lay.startswith(prefix))
|
||||||
|
|
||||||
def meta_key(self, prefix, parsed_args, **kwargs):
|
def meta_key(self, prefix, parsed_args, **kwargs):
|
||||||
return (kv.split('=')[0] for kv
|
return (kv.split('=')[0] for kv
|
||||||
|
@ -80,30 +78,22 @@ class Complete(object): # pragma: no cover
|
||||||
if not path:
|
if not path:
|
||||||
return []
|
return []
|
||||||
results = []
|
results = []
|
||||||
# prefix comes in as UTF-8, but results need to be Unicode,
|
for (k, v) in client.stream_get_metadata(path).items():
|
||||||
# weird. Still doesn't work in all cases, but that's bugs in
|
|
||||||
# argcomplete.
|
|
||||||
prefix = nilmdb.utils.unicode.decode(prefix)
|
|
||||||
for (k,v) in client.stream_get_metadata(path).iteritems():
|
|
||||||
kv = self.escape(k + '=' + v)
|
kv = self.escape(k + '=' + v)
|
||||||
if kv.startswith(prefix):
|
if kv.startswith(prefix):
|
||||||
results.append(kv)
|
results.append(kv)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
class Cmdline(object):
|
|
||||||
|
class Cmdline():
|
||||||
|
|
||||||
def __init__(self, argv=None):
|
def __init__(self, argv=None):
|
||||||
self.argv = argv or sys.argv[1:]
|
self.argv = argv or sys.argv[1:]
|
||||||
try:
|
|
||||||
# Assume command line arguments are encoded with stdin's encoding,
|
|
||||||
# and reverse it. Won't be needed in Python 3, but for now..
|
|
||||||
self.argv = [ x.decode(sys.stdin.encoding) for x in self.argv ]
|
|
||||||
except Exception: # pragma: no cover
|
|
||||||
pass
|
|
||||||
self.client = None
|
self.client = None
|
||||||
self.def_url = os.environ.get("NILMDB_URL", "http://localhost/nilmdb/")
|
self.def_url = os.environ.get("NILMDB_URL", "http://localhost/nilmdb/")
|
||||||
self.subcmd = {}
|
self.subcmd = {}
|
||||||
self.complete = Complete()
|
self.complete = Complete()
|
||||||
|
self.complete_output_stream = None # overridden by test suite
|
||||||
|
|
||||||
def arg_time(self, toparse):
|
def arg_time(self, toparse):
|
||||||
"""Parse a time string argument"""
|
"""Parse a time string argument"""
|
||||||
|
@ -131,7 +121,7 @@ class Cmdline(object):
|
||||||
).completer = self.complete.url
|
).completer = self.complete.url
|
||||||
|
|
||||||
sub = self.parser.add_subparsers(
|
sub = self.parser.add_subparsers(
|
||||||
title="Commands", dest="command",
|
title="Commands", dest="command", required=True,
|
||||||
description="Use 'help command' or 'command --help' for more "
|
description="Use 'help command' or 'command --help' for more "
|
||||||
"details on a particular command.")
|
"details on a particular command.")
|
||||||
|
|
||||||
|
@ -148,10 +138,7 @@ class Cmdline(object):
|
||||||
def run(self):
|
def run(self):
|
||||||
# Set SIGPIPE to its default handler -- we don't need Python
|
# Set SIGPIPE to its default handler -- we don't need Python
|
||||||
# to catch it for us.
|
# to catch it for us.
|
||||||
try:
|
|
||||||
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
||||||
except ValueError: # pragma: no cover
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Clear cached timezone, so that we can pick up timezone changes
|
# Clear cached timezone, so that we can pick up timezone changes
|
||||||
# while running this from the test suite.
|
# while running this from the test suite.
|
||||||
|
@ -159,8 +146,8 @@ class Cmdline(object):
|
||||||
|
|
||||||
# Run parser
|
# Run parser
|
||||||
self.parser_setup()
|
self.parser_setup()
|
||||||
if argcomplete: # pragma: no cover
|
argcomplete.autocomplete(self.parser, exit_method=sys.exit,
|
||||||
argcomplete.autocomplete(self.parser)
|
output_stream=self.complete_output_stream)
|
||||||
self.args = self.parser.parse_args(self.argv)
|
self.args = self.parser.parse_args(self.argv)
|
||||||
|
|
||||||
# Run arg verify handler if there is one
|
# Run arg verify handler if there is one
|
||||||
|
@ -173,7 +160,7 @@ class Cmdline(object):
|
||||||
# unless the particular command requests that we don't.
|
# unless the particular command requests that we don't.
|
||||||
if "no_test_connect" not in self.args:
|
if "no_test_connect" not in self.args:
|
||||||
try:
|
try:
|
||||||
server_version = self.client.version()
|
self.client.version()
|
||||||
except nilmdb.client.Error as e:
|
except nilmdb.client.Error as e:
|
||||||
self.die("error connecting to server: %s", str(e))
|
self.die("error connecting to server: %s", str(e))
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from nilmdb.utils.printf import *
|
from argparse import RawDescriptionHelpFormatter as raw_form
|
||||||
|
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
|
|
||||||
from argparse import RawDescriptionHelpFormatter as raw_form
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("create", help="Create a new stream",
|
cmd = sub.add_parser("create", help="Create a new stream",
|
||||||
|
@ -29,6 +29,7 @@ Layout types are of the format: type_count
|
||||||
).completer = self.complete.layout
|
).completer = self.complete.layout
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_create(self):
|
def cmd_create(self):
|
||||||
"""Create new stream"""
|
"""Create new stream"""
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
import nilmdb.client
|
|
||||||
import fnmatch
|
import fnmatch
|
||||||
|
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
|
import nilmdb.client
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("destroy", help="Delete a stream and all data",
|
cmd = sub.add_parser("destroy", help="Delete a stream and all data",
|
||||||
formatter_class=def_form,
|
formatter_class=def_form,
|
||||||
|
@ -27,6 +29,7 @@ def setup(self, sub):
|
||||||
).completer = self.complete.path
|
).completer = self.complete.path
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_destroy(self):
|
def cmd_destroy(self):
|
||||||
"""Destroy stream"""
|
"""Destroy stream"""
|
||||||
streams = [s[0] for s in self.client.stream_list()]
|
streams = [s[0] for s in self.client.stream_list()]
|
||||||
|
@ -43,7 +46,7 @@ def cmd_destroy(self):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if self.args.remove:
|
if self.args.remove:
|
||||||
count = self.client.stream_remove(path)
|
self.client.stream_remove(path)
|
||||||
self.client.stream_destroy(path)
|
self.client.stream_destroy(path)
|
||||||
except nilmdb.client.ClientError as e:
|
except nilmdb.client.ClientError as e:
|
||||||
self.die("error destroying stream: %s", str(e))
|
self.die("error destroying stream: %s", str(e))
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
from __future__ import print_function
|
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
import nilmdb.client
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
|
import nilmdb.client
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("extract", help="Extract data",
|
cmd = sub.add_parser("extract", help="Extract data",
|
||||||
description="""
|
description="""
|
||||||
|
@ -40,8 +41,8 @@ def setup(self, sub):
|
||||||
help="Just output a count of matched data points")
|
help="Just output a count of matched data points")
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_extract_verify(self):
|
def cmd_extract_verify(self):
|
||||||
if self.args.start is not None and self.args.end is not None:
|
|
||||||
if self.args.start > self.args.end:
|
if self.args.start > self.args.end:
|
||||||
self.parser.error("start is after end")
|
self.parser.error("start is after end")
|
||||||
|
|
||||||
|
@ -50,6 +51,7 @@ def cmd_extract_verify(self):
|
||||||
self.args.timestamp_raw or self.args.count):
|
self.args.timestamp_raw or self.args.count):
|
||||||
self.parser.error("--binary cannot be combined with other options")
|
self.parser.error("--binary cannot be combined with other options")
|
||||||
|
|
||||||
|
|
||||||
def cmd_extract(self):
|
def cmd_extract(self):
|
||||||
streams = self.client.stream_list(self.args.path)
|
streams = self.client.stream_list(self.args.path)
|
||||||
if len(streams) != 1:
|
if len(streams) != 1:
|
||||||
|
@ -69,9 +71,9 @@ def cmd_extract(self):
|
||||||
|
|
||||||
printed = False
|
printed = False
|
||||||
if self.args.binary:
|
if self.args.binary:
|
||||||
printer = sys.stdout.write
|
printer = sys.stdout.buffer.write
|
||||||
else:
|
else:
|
||||||
printer = print
|
printer = lambda x: print(x.decode('utf-8'))
|
||||||
bare = self.args.bare
|
bare = self.args.bare
|
||||||
count = self.args.count
|
count = self.args.count
|
||||||
for dataline in self.client.stream_extract(self.args.path,
|
for dataline in self.client.stream_extract(self.args.path,
|
||||||
|
@ -83,7 +85,7 @@ def cmd_extract(self):
|
||||||
if bare and not count:
|
if bare and not count:
|
||||||
# Strip timestamp (first element). Doesn't make sense
|
# Strip timestamp (first element). Doesn't make sense
|
||||||
# if we are only returning a count.
|
# if we are only returning a count.
|
||||||
dataline = ' '.join(dataline.split(' ')[1:])
|
dataline = b' '.join(dataline.split(b' ')[1:])
|
||||||
printer(dataline)
|
printer(dataline)
|
||||||
printed = True
|
printed = True
|
||||||
if not printed:
|
if not printed:
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("help", help="Show detailed help for a command",
|
cmd = sub.add_parser("help", help="Show detailed help for a command",
|
||||||
|
@ -17,6 +15,7 @@ def setup(self, sub):
|
||||||
help=argparse.SUPPRESS)
|
help=argparse.SUPPRESS)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_help(self):
|
def cmd_help(self):
|
||||||
if self.args.command in self.subcmd:
|
if self.args.command in self.subcmd:
|
||||||
self.subcmd[self.args.command].print_help()
|
self.subcmd[self.args.command].print_help()
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import printf
|
||||||
from nilmdb.utils import human_size
|
from nilmdb.utils import human_size
|
||||||
|
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("info", help="Server information",
|
cmd = sub.add_parser("info", help="Server information",
|
||||||
|
@ -14,6 +15,7 @@ def setup(self, sub):
|
||||||
cmd.set_defaults(handler=cmd_info)
|
cmd.set_defaults(handler=cmd_info)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_info(self):
|
def cmd_info(self):
|
||||||
"""Print info about the server"""
|
"""Print info about the server"""
|
||||||
printf("Client version: %s\n", nilmdb.__version__)
|
printf("Client version: %s\n", nilmdb.__version__)
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
from nilmdb.utils.printf import *
|
import sys
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
import nilmdb.utils.timestamper as timestamper
|
import nilmdb.utils.timestamper as timestamper
|
||||||
import nilmdb.utils.time
|
import nilmdb.utils.time
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("insert", help="Insert data",
|
cmd = sub.add_parser("insert", help="Insert data",
|
||||||
|
@ -65,17 +66,20 @@ def setup(self, sub):
|
||||||
help="File to insert (default: - (stdin))")
|
help="File to insert (default: - (stdin))")
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_insert_verify(self):
|
def cmd_insert_verify(self):
|
||||||
if self.args.timestamp:
|
if self.args.timestamp:
|
||||||
if not self.args.rate:
|
if not self.args.rate:
|
||||||
self.die("error: --rate is needed, but was not specified")
|
self.die("error: --rate is needed, but was not specified")
|
||||||
if not self.args.filename and self.args.start is None:
|
if not self.args.filename and self.args.start is None:
|
||||||
self.die("error: need --start or --filename when adding timestamps")
|
self.die("error: need --start or --filename "
|
||||||
|
"when adding timestamps")
|
||||||
else:
|
else:
|
||||||
if self.args.start is None or self.args.end is None:
|
if self.args.start is None or self.args.end is None:
|
||||||
self.die("error: when not adding timestamps, --start and "
|
self.die("error: when not adding timestamps, --start and "
|
||||||
"--end are required")
|
"--end are required")
|
||||||
|
|
||||||
|
|
||||||
def cmd_insert(self):
|
def cmd_insert(self):
|
||||||
# Find requested stream
|
# Find requested stream
|
||||||
streams = self.client.stream_list(self.args.path)
|
streams = self.client.stream_list(self.args.path)
|
||||||
|
@ -87,7 +91,7 @@ def cmd_insert(self):
|
||||||
try:
|
try:
|
||||||
filename = arg.file
|
filename = arg.file
|
||||||
if filename == '-':
|
if filename == '-':
|
||||||
infile = sys.stdin
|
infile = sys.stdin.buffer
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
infile = open(filename, "rb")
|
infile = open(filename, "rb")
|
||||||
|
@ -104,7 +108,7 @@ def cmd_insert(self):
|
||||||
if arg.timestamp:
|
if arg.timestamp:
|
||||||
data = timestamper.TimestamperRate(infile, arg.start, arg.rate)
|
data = timestamper.TimestamperRate(infile, arg.start, arg.rate)
|
||||||
else:
|
else:
|
||||||
data = iter(lambda: infile.read(1048576), '')
|
data = iter(lambda: infile.read(1048576), b'')
|
||||||
|
|
||||||
# Print info
|
# Print info
|
||||||
if not arg.quiet:
|
if not arg.quiet:
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
from nilmdb.utils.printf import *
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
import nilmdb.utils.time
|
import nilmdb.utils.time
|
||||||
from nilmdb.utils.interval import Interval
|
from nilmdb.utils.interval import Interval
|
||||||
|
|
||||||
import fnmatch
|
|
||||||
import argparse
|
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("intervals", help="List intervals",
|
cmd = sub.add_parser("intervals", help="List intervals",
|
||||||
|
@ -48,11 +47,13 @@ def setup(self, sub):
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_intervals_verify(self):
|
def cmd_intervals_verify(self):
|
||||||
if self.args.start is not None and self.args.end is not None:
|
if self.args.start is not None and self.args.end is not None:
|
||||||
if self.args.start >= self.args.end:
|
if self.args.start >= self.args.end:
|
||||||
self.parser.error("start must precede end")
|
self.parser.error("start must precede end")
|
||||||
|
|
||||||
|
|
||||||
def cmd_intervals(self):
|
def cmd_intervals(self):
|
||||||
"""List intervals in a stream"""
|
"""List intervals in a stream"""
|
||||||
if self.args.timestamp_raw:
|
if self.args.timestamp_raw:
|
||||||
|
@ -73,4 +74,3 @@ def cmd_intervals(self):
|
||||||
|
|
||||||
except nilmdb.client.ClientError as e:
|
except nilmdb.client.ClientError as e:
|
||||||
self.die("error listing intervals: %s", str(e))
|
self.die("error listing intervals: %s", str(e))
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
from nilmdb.utils.printf import *
|
import fnmatch
|
||||||
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
import nilmdb.utils.time
|
import nilmdb.utils.time
|
||||||
|
|
||||||
import fnmatch
|
|
||||||
import argparse
|
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("list", help="List streams",
|
cmd = sub.add_parser("list", help="List streams",
|
||||||
|
@ -50,6 +50,7 @@ def setup(self, sub):
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_list_verify(self):
|
def cmd_list_verify(self):
|
||||||
if self.args.start is not None and self.args.end is not None:
|
if self.args.start is not None and self.args.end is not None:
|
||||||
if self.args.start >= self.args.end:
|
if self.args.start >= self.args.end:
|
||||||
|
@ -57,7 +58,9 @@ def cmd_list_verify(self):
|
||||||
|
|
||||||
if self.args.start is not None or self.args.end is not None:
|
if self.args.start is not None or self.args.end is not None:
|
||||||
if not self.args.detail:
|
if not self.args.detail:
|
||||||
self.parser.error("--start and --end only make sense with --detail")
|
self.parser.error("--start and --end only make sense "
|
||||||
|
"with --detail")
|
||||||
|
|
||||||
|
|
||||||
def cmd_list(self):
|
def cmd_list(self):
|
||||||
"""List available streams"""
|
"""List available streams"""
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import printf
|
||||||
import nilmdb
|
import nilmdb
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("metadata", help="Get or set stream metadata",
|
cmd = sub.add_parser("metadata", help="Get or set stream metadata",
|
||||||
description="""
|
description="""
|
||||||
|
@ -36,15 +37,16 @@ def setup(self, sub):
|
||||||
).completer = self.complete.meta_key
|
).completer = self.complete.meta_key
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_metadata(self):
|
def cmd_metadata(self):
|
||||||
"""Manipulate metadata"""
|
"""Manipulate metadata"""
|
||||||
if self.args.set is not None or self.args.update is not None:
|
if self.args.set is not None or self.args.update is not None:
|
||||||
# Either set, or update
|
# Either set, or update
|
||||||
if self.args.set is not None:
|
if self.args.set is not None:
|
||||||
keyvals = map(nilmdb.utils.unicode.decode, self.args.set)
|
keyvals = self.args.set
|
||||||
handler = self.client.stream_set_metadata
|
handler = self.client.stream_set_metadata
|
||||||
else:
|
else:
|
||||||
keyvals = map(nilmdb.utils.unicode.decode, self.args.update)
|
keyvals = self.args.update
|
||||||
handler = self.client.stream_update_metadata
|
handler = self.client.stream_update_metadata
|
||||||
|
|
||||||
# Extract key=value pairs
|
# Extract key=value pairs
|
||||||
|
@ -64,7 +66,7 @@ def cmd_metadata(self):
|
||||||
# Delete (by setting values to empty strings)
|
# Delete (by setting values to empty strings)
|
||||||
keys = None
|
keys = None
|
||||||
if self.args.delete:
|
if self.args.delete:
|
||||||
keys = map(nilmdb.utils.unicode.decode, self.args.delete)
|
keys = list(self.args.delete)
|
||||||
try:
|
try:
|
||||||
data = self.client.stream_get_metadata(self.args.path, keys)
|
data = self.client.stream_get_metadata(self.args.path, keys)
|
||||||
for key in data:
|
for key in data:
|
||||||
|
@ -76,7 +78,7 @@ def cmd_metadata(self):
|
||||||
# Get (or unspecified)
|
# Get (or unspecified)
|
||||||
keys = None
|
keys = None
|
||||||
if self.args.get:
|
if self.args.get:
|
||||||
keys = map(nilmdb.utils.unicode.decode, self.args.get)
|
keys = list(self.args.get)
|
||||||
try:
|
try:
|
||||||
data = self.client.stream_get_metadata(self.args.path, keys)
|
data = self.client.stream_get_metadata(self.args.path, keys)
|
||||||
except nilmdb.client.ClientError as e:
|
except nilmdb.client.ClientError as e:
|
||||||
|
@ -85,6 +87,4 @@ def cmd_metadata(self):
|
||||||
# Print nonexistant keys as having empty value
|
# Print nonexistant keys as having empty value
|
||||||
if value is None:
|
if value is None:
|
||||||
value = ""
|
value = ""
|
||||||
printf("%s=%s\n",
|
printf("%s=%s\n", key, value)
|
||||||
nilmdb.utils.unicode.encode(key),
|
|
||||||
nilmdb.utils.unicode.encode(value))
|
|
||||||
|
|
|
@ -1,13 +1,16 @@
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
import nilmdb.client
|
|
||||||
import fnmatch
|
import fnmatch
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
|
import nilmdb.client
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("remove", help="Remove data",
|
cmd = sub.add_parser("remove", help="Remove data",
|
||||||
description="""
|
description="""
|
||||||
Remove all data from a specified time range within a
|
Remove all data from a specified time range within a
|
||||||
stream. If multiple streams or wildcards are provided,
|
stream. If multiple streams or wildcards are
|
||||||
the same time range is removed from all streams.
|
provided, the same time range is removed from all
|
||||||
|
streams.
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(handler=cmd_remove)
|
cmd.set_defaults(handler=cmd_remove)
|
||||||
|
|
||||||
|
@ -32,6 +35,7 @@ def setup(self, sub):
|
||||||
help="Output number of data points removed")
|
help="Output number of data points removed")
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_remove(self):
|
def cmd_remove(self):
|
||||||
streams = [s[0] for s in self.client.stream_list()]
|
streams = [s[0] for s in self.client.stream_list()]
|
||||||
paths = []
|
paths = []
|
||||||
|
@ -48,7 +52,7 @@ def cmd_remove(self):
|
||||||
count = self.client.stream_remove(path,
|
count = self.client.stream_remove(path,
|
||||||
self.args.start, self.args.end)
|
self.args.start, self.args.end)
|
||||||
if self.args.count:
|
if self.args.count:
|
||||||
printf("%d\n", count);
|
printf("%d\n", count)
|
||||||
except nilmdb.client.ClientError as e:
|
except nilmdb.client.ClientError as e:
|
||||||
self.die("error removing data: %s", str(e))
|
self.die("error removing data: %s", str(e))
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from nilmdb.utils.printf import *
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
|
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("rename", help="Rename a stream",
|
cmd = sub.add_parser("rename", help="Rename a stream",
|
||||||
|
@ -23,6 +23,7 @@ def setup(self, sub):
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_rename(self):
|
def cmd_rename(self):
|
||||||
"""Rename a stream"""
|
"""Rename a stream"""
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
"""nilmdb.fsck"""
|
"""nilmdb.fsck"""
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
from nilmdb.fsck.fsck import Fsck
|
from nilmdb.fsck.fsck import Fsck
|
||||||
|
|
|
@ -10,8 +10,7 @@ import nilmdb.server
|
||||||
import nilmdb.client.numpyclient
|
import nilmdb.client.numpyclient
|
||||||
from nilmdb.utils.interval import IntervalError
|
from nilmdb.utils.interval import IntervalError
|
||||||
from nilmdb.server.interval import Interval, IntervalSet
|
from nilmdb.server.interval import Interval, IntervalSet
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import printf, fprintf, sprintf
|
||||||
from nilmdb.utils.time import timestamp_to_string
|
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
@ -19,44 +18,55 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import progressbar
|
import progressbar
|
||||||
import re
|
import re
|
||||||
import time
|
|
||||||
import shutil
|
import shutil
|
||||||
import cPickle as pickle
|
import pickle
|
||||||
import numpy
|
import numpy
|
||||||
|
|
||||||
|
|
||||||
class FsckError(Exception):
|
class FsckError(Exception):
|
||||||
def __init__(self, msg="", *args):
|
def __init__(self, msg="", *args):
|
||||||
if args:
|
if args:
|
||||||
msg = sprintf(msg, *args)
|
msg = sprintf(msg, *args)
|
||||||
Exception.__init__(self, msg)
|
Exception.__init__(self, msg)
|
||||||
|
|
||||||
|
|
||||||
class FixableFsckError(FsckError):
|
class FixableFsckError(FsckError):
|
||||||
def __init__(self, msg = "", *args):
|
def __init__(self, msg=""):
|
||||||
if args:
|
FsckError.__init__(self, f'{msg}\nThis may be fixable with "--fix".')
|
||||||
msg = sprintf(msg, *args)
|
|
||||||
FsckError.__init__(self, "%s\nThis may be fixable with \"--fix\".", msg)
|
|
||||||
class RetryFsck(FsckError):
|
class RetryFsck(FsckError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FsckFormatError(FsckError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def log(format, *args):
|
def log(format, *args):
|
||||||
printf(format, *args)
|
printf(format, *args)
|
||||||
|
|
||||||
|
|
||||||
def err(format, *args):
|
def err(format, *args):
|
||||||
fprintf(sys.stderr, format, *args)
|
fprintf(sys.stderr, format, *args)
|
||||||
|
|
||||||
|
|
||||||
# Decorator that retries a function if it returns a specific value
|
# Decorator that retries a function if it returns a specific value
|
||||||
def retry_if_raised(exc, message = None, max_retries = 100):
|
def retry_if_raised(exc, message=None, max_retries=1000):
|
||||||
def f1(func):
|
def f1(func):
|
||||||
def f2(*args, **kwargs):
|
def f2(*args, **kwargs):
|
||||||
for n in range(max_retries):
|
for n in range(max_retries):
|
||||||
try:
|
try:
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
except exc as e:
|
except exc:
|
||||||
if message:
|
if message:
|
||||||
log("%s\n\n", message)
|
log(f"{message} ({n+1})\n\n")
|
||||||
raise Exception("Max number of retries (%d) exceeded; giving up")
|
raise Exception("Max number of retries (%d) exceeded; giving up" %
|
||||||
|
max_retries)
|
||||||
return f2
|
return f2
|
||||||
return f1
|
return f1
|
||||||
|
|
||||||
|
|
||||||
class Progress(object):
|
class Progress(object):
|
||||||
def __init__(self, maxval):
|
def __init__(self, maxval):
|
||||||
if maxval == 0:
|
if maxval == 0:
|
||||||
|
@ -66,22 +76,24 @@ class Progress(object):
|
||||||
widgets=[progressbar.Percentage(), ' ',
|
widgets=[progressbar.Percentage(), ' ',
|
||||||
progressbar.Bar(), ' ',
|
progressbar.Bar(), ' ',
|
||||||
progressbar.ETA()])
|
progressbar.ETA()])
|
||||||
if self.bar.term_width == 0:
|
self.bar.term_width = self.bar.term_width or 75
|
||||||
self.bar.term_width = 75
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
self.bar.start()
|
self.bar.start()
|
||||||
self.last_update = 0
|
self.last_update = 0
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_value, traceback):
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
if exc_type is None:
|
if exc_type is None:
|
||||||
self.bar.finish()
|
self.bar.finish()
|
||||||
else:
|
else:
|
||||||
printf("\n")
|
printf("\n")
|
||||||
|
|
||||||
def update(self, val):
|
def update(self, val):
|
||||||
self.bar.update(val)
|
self.bar.update(val)
|
||||||
|
|
||||||
class Fsck(object):
|
|
||||||
|
|
||||||
|
class Fsck(object):
|
||||||
def __init__(self, path, fix=False):
|
def __init__(self, path, fix=False):
|
||||||
self.basepath = path
|
self.basepath = path
|
||||||
self.sqlpath = os.path.join(path, "data.sql")
|
self.sqlpath = os.path.join(path, "data.sql")
|
||||||
|
@ -107,7 +119,9 @@ class Fsck(object):
|
||||||
finally:
|
finally:
|
||||||
if self.bulk:
|
if self.bulk:
|
||||||
self.bulk.close()
|
self.bulk.close()
|
||||||
if self.sql:
|
if self.sql: # pragma: no cover
|
||||||
|
# (coverage doesn't handle finally clauses correctly;
|
||||||
|
# both branches here are tested)
|
||||||
self.sql.commit()
|
self.sql.commit()
|
||||||
self.sql.close()
|
self.sql.close()
|
||||||
log("ok\n")
|
log("ok\n")
|
||||||
|
@ -162,7 +176,7 @@ class Fsck(object):
|
||||||
"ORDER BY start_time")
|
"ORDER BY start_time")
|
||||||
for r in result:
|
for r in result:
|
||||||
if r[0] not in self.stream_path:
|
if r[0] not in self.stream_path:
|
||||||
raise FsckError("interval ID %d not in streams", k)
|
raise FsckError("interval ID %d not in streams", r[0])
|
||||||
self.stream_interval[r[0]].append((r[1], r[2], r[3], r[4]))
|
self.stream_interval[r[0]].append((r[1], r[2], r[3], r[4]))
|
||||||
|
|
||||||
log(" loading metadata\n")
|
log(" loading metadata\n")
|
||||||
|
@ -170,16 +184,17 @@ class Fsck(object):
|
||||||
result = cur.execute("SELECT stream_id, key, value FROM metadata")
|
result = cur.execute("SELECT stream_id, key, value FROM metadata")
|
||||||
for r in result:
|
for r in result:
|
||||||
if r[0] not in self.stream_path:
|
if r[0] not in self.stream_path:
|
||||||
raise FsckError("metadata ID %d not in streams", k)
|
raise FsckError("metadata ID %d not in streams", r[0])
|
||||||
if r[1] in self.stream_meta[r[0]]:
|
if r[1] in self.stream_meta[r[0]]:
|
||||||
raise FsckError("duplicate metadata key '%s' for stream %d",
|
raise FsckError(
|
||||||
|
"duplicate metadata key '%s' for stream %d",
|
||||||
r[1], r[0])
|
r[1], r[0])
|
||||||
self.stream_meta[r[0]][r[1]] = r[2]
|
self.stream_meta[r[0]][r[1]] = r[2]
|
||||||
|
|
||||||
### Check streams and basic interval overlap
|
### Check streams and basic interval overlap
|
||||||
|
|
||||||
def check_streams(self):
|
def check_streams(self):
|
||||||
ids = self.stream_path.keys()
|
ids = list(self.stream_path.keys())
|
||||||
log("checking %s streams\n", "{:,d}".format(len(ids)))
|
log("checking %s streams\n", "{:,d}".format(len(ids)))
|
||||||
with Progress(len(ids)) as pbar:
|
with Progress(len(ids)) as pbar:
|
||||||
for i, sid in enumerate(ids):
|
for i, sid in enumerate(ids):
|
||||||
|
@ -187,7 +202,7 @@ class Fsck(object):
|
||||||
path = self.stream_path[sid]
|
path = self.stream_path[sid]
|
||||||
|
|
||||||
# unique path, valid layout
|
# unique path, valid layout
|
||||||
if self.stream_path.values().count(path) != 1:
|
if list(self.stream_path.values()).count(path) != 1:
|
||||||
raise FsckError("duplicated path %s", path)
|
raise FsckError("duplicated path %s", path)
|
||||||
layout = self.stream_layout[sid].split('_')[0]
|
layout = self.stream_layout[sid].split('_')[0]
|
||||||
if layout not in ('int8', 'int16', 'int32', 'int64',
|
if layout not in ('int8', 'int16', 'int32', 'int64',
|
||||||
|
@ -200,6 +215,7 @@ class Fsck(object):
|
||||||
|
|
||||||
# must exist in bulkdata
|
# must exist in bulkdata
|
||||||
bulk = self.bulkpath + path
|
bulk = self.bulkpath + path
|
||||||
|
bulk = bulk.encode('utf-8')
|
||||||
if not os.path.isdir(bulk):
|
if not os.path.isdir(bulk):
|
||||||
raise FsckError("%s: missing bulkdata dir", path)
|
raise FsckError("%s: missing bulkdata dir", path)
|
||||||
if not nilmdb.server.bulkdata.Table.exists(bulk):
|
if not nilmdb.server.bulkdata.Table.exists(bulk):
|
||||||
|
@ -222,39 +238,97 @@ class Fsck(object):
|
||||||
try:
|
try:
|
||||||
posiset += new
|
posiset += new
|
||||||
except IntervalError:
|
except IntervalError:
|
||||||
raise FsckError("%s: overlap in file offsets:\n"
|
self.fix_row_overlap(sid, path, posiset, new)
|
||||||
"set: %s\nnew: %s",
|
|
||||||
path, str(posiset), str(new))
|
|
||||||
|
|
||||||
# check bulkdata
|
try:
|
||||||
|
# Check bulkdata
|
||||||
self.check_bulkdata(sid, path, bulk)
|
self.check_bulkdata(sid, path, bulk)
|
||||||
|
|
||||||
# Check that we can open bulkdata
|
# Check that we can open bulkdata
|
||||||
try:
|
|
||||||
tab = None
|
|
||||||
try:
|
|
||||||
tab = nilmdb.server.bulkdata.Table(bulk)
|
tab = nilmdb.server.bulkdata.Table(bulk)
|
||||||
except Exception as e:
|
except FsckFormatError:
|
||||||
|
# If there are no files except _format, try deleting
|
||||||
|
# the entire stream; this may remove metadata, but
|
||||||
|
# it's probably unimportant.
|
||||||
|
files = list(os.listdir(bulk))
|
||||||
|
if len(files) > 1:
|
||||||
|
raise FsckFormatError(f"{path}: can't load _format, "
|
||||||
|
f"but data is also present")
|
||||||
|
|
||||||
|
# Since the stream was empty, just remove it
|
||||||
|
self.fix_remove_stream(sid, path, bulk,
|
||||||
|
"empty, with corrupted format file")
|
||||||
|
except FsckError as e:
|
||||||
|
raise e
|
||||||
|
except Exception as e: # pragma: no cover
|
||||||
|
# No coverage because this is an unknown/unexpected error
|
||||||
raise FsckError("%s: can't open bulkdata: %s",
|
raise FsckError("%s: can't open bulkdata: %s",
|
||||||
path, str(e))
|
path, str(e))
|
||||||
finally:
|
|
||||||
if tab:
|
|
||||||
tab.close()
|
tab.close()
|
||||||
|
|
||||||
|
def fix_row_overlap(self, sid, path, existing, new):
|
||||||
|
# If the file rows (spos, epos) overlap in the interval table,
|
||||||
|
# and the overlapping ranges look like this:
|
||||||
|
# A --------- C
|
||||||
|
# B -------- D
|
||||||
|
# Then we can try changing the first interval to go from
|
||||||
|
# A to B instead.
|
||||||
|
msg = (f"{path}: overlap in file offsets:\n"
|
||||||
|
f"existing ranges: {existing}\n"
|
||||||
|
f"overlapping interval: {new}")
|
||||||
|
if not self.fix:
|
||||||
|
raise FixableFsckError(msg)
|
||||||
|
err(f"\n{msg}\nSeeing if we can truncate one of them...\n")
|
||||||
|
|
||||||
|
# See if there'e exactly one interval that overlaps the
|
||||||
|
# conflicting one in the right way
|
||||||
|
match = None
|
||||||
|
for intv in self.stream_interval[sid]:
|
||||||
|
(stime, etime, spos, epos) = intv
|
||||||
|
if spos < new.start and epos > new.start:
|
||||||
|
if match:
|
||||||
|
err(f"no, more than one interval matched:\n"
|
||||||
|
f"{intv}\n{match}\n")
|
||||||
|
raise FsckError(f"{path}: unfixable overlap")
|
||||||
|
match = intv
|
||||||
|
if match is None:
|
||||||
|
err("no intervals overlapped in the right way\n")
|
||||||
|
raise FsckError(f"{path}: unfixable overlap")
|
||||||
|
|
||||||
|
# Truncate the file position
|
||||||
|
err(f"truncating {match}\n")
|
||||||
|
with self.sql:
|
||||||
|
cur = self.sql.cursor()
|
||||||
|
cur.execute("UPDATE ranges SET end_pos=? "
|
||||||
|
"WHERE stream_id=? AND start_time=? AND "
|
||||||
|
"end_time=? AND start_pos=? AND end_pos=?",
|
||||||
|
(new.start, sid, *match))
|
||||||
|
if cur.rowcount != 1: # pragma: no cover (shouldn't fail)
|
||||||
|
raise FsckError("failed to fix SQL database")
|
||||||
|
raise RetryFsck
|
||||||
|
|
||||||
### Check that bulkdata is good enough to be opened
|
### Check that bulkdata is good enough to be opened
|
||||||
|
|
||||||
@retry_if_raised(RetryFsck)
|
@retry_if_raised(RetryFsck)
|
||||||
def check_bulkdata(self, sid, path, bulk):
|
def check_bulkdata(self, sid, path, bulk):
|
||||||
with open(os.path.join(bulk, "_format"), "rb") as f:
|
try:
|
||||||
|
with open(os.path.join(bulk, b"_format"), "rb") as f:
|
||||||
fmt = pickle.load(f)
|
fmt = pickle.load(f)
|
||||||
|
except Exception as e:
|
||||||
|
raise FsckFormatError(f"{path}: can't load _format file ({e})")
|
||||||
|
|
||||||
if fmt["version"] != 3:
|
if fmt["version"] != 3:
|
||||||
raise FsckError("%s: bad or unsupported bulkdata version %d",
|
raise FsckFormatError("%s: bad or unsupported bulkdata version %d",
|
||||||
path, fmt["version"])
|
path, fmt["version"])
|
||||||
row_per_file = int(fmt["rows_per_file"])
|
rows_per_file = int(fmt["rows_per_file"])
|
||||||
|
if rows_per_file < 1:
|
||||||
|
raise FsckFormatError(f"{path}: bad rows_per_file {rows_per_file}")
|
||||||
files_per_dir = int(fmt["files_per_dir"])
|
files_per_dir = int(fmt["files_per_dir"])
|
||||||
|
if files_per_dir < 1:
|
||||||
|
raise FsckFormatError(f"{path}: bad files_per_dir {files_per_dir}")
|
||||||
layout = fmt["layout"]
|
layout = fmt["layout"]
|
||||||
if layout != self.stream_layout[sid]:
|
if layout != self.stream_layout[sid]:
|
||||||
raise FsckError("%s: layout mismatch %s != %s", path,
|
raise FsckFormatError("%s: layout mismatch %s != %s", path,
|
||||||
layout, self.stream_layout[sid])
|
layout, self.stream_layout[sid])
|
||||||
|
|
||||||
# Every file should have a size that's the multiple of the row size
|
# Every file should have a size that's the multiple of the row size
|
||||||
|
@ -263,16 +337,16 @@ class Fsck(object):
|
||||||
rkt.close()
|
rkt.close()
|
||||||
|
|
||||||
# Find all directories
|
# Find all directories
|
||||||
regex = re.compile("^[0-9a-f]{4,}$")
|
regex = re.compile(b"^[0-9a-f]{4,}$")
|
||||||
subdirs = sorted(filter(regex.search, os.listdir(bulk)),
|
subdirs = sorted(filter(regex.search, os.listdir(bulk)),
|
||||||
key=lambda x: int(x, 16), reverse=True)
|
key=lambda x: int(x, 16), reverse=True)
|
||||||
for subdir in subdirs:
|
for subdir in subdirs:
|
||||||
# Find all files in that dir
|
# Find all files in that dir
|
||||||
subpath = os.path.join(bulk, subdir)
|
subpath = os.path.join(bulk, subdir)
|
||||||
files = filter(regex.search, os.listdir(subpath))
|
files = list(filter(regex.search, os.listdir(subpath)))
|
||||||
if not files:
|
if not files:
|
||||||
self.fix_empty_subdir(subpath)
|
self.fix_empty_subdir(subpath)
|
||||||
raise RetryFsck
|
|
||||||
# Verify that their size is a multiple of the row size
|
# Verify that their size is a multiple of the row size
|
||||||
for filename in files:
|
for filename in files:
|
||||||
filepath = os.path.join(subpath, filename)
|
filepath = os.path.join(subpath, filename)
|
||||||
|
@ -288,10 +362,11 @@ class Fsck(object):
|
||||||
# as long as it's only ".removed" files.
|
# as long as it's only ".removed" files.
|
||||||
err("\n%s\n", msg)
|
err("\n%s\n", msg)
|
||||||
for fn in os.listdir(subpath):
|
for fn in os.listdir(subpath):
|
||||||
if not fn.endswith(".removed"):
|
if not fn.endswith(b".removed"):
|
||||||
raise FsckError("can't fix automatically: please manually "
|
raise FsckError("can't fix automatically: please manually "
|
||||||
"remove the file %s and try again",
|
"remove the file '%s' and try again",
|
||||||
os.path.join(subpath, fn))
|
os.path.join(subpath, fn).decode(
|
||||||
|
'utf-8', errors='backslashreplace'))
|
||||||
# Remove the whole thing
|
# Remove the whole thing
|
||||||
err("Removing empty subpath\n")
|
err("Removing empty subpath\n")
|
||||||
shutil.rmtree(subpath)
|
shutil.rmtree(subpath)
|
||||||
|
@ -312,19 +387,40 @@ class Fsck(object):
|
||||||
f.truncate(newsize)
|
f.truncate(newsize)
|
||||||
raise RetryFsck
|
raise RetryFsck
|
||||||
|
|
||||||
|
def fix_remove_stream(self, sid, path, bulk, reason):
|
||||||
|
msg = f"stream {path} is corrupted: {reason}"
|
||||||
|
if not self.fix:
|
||||||
|
raise FixableFsckError(msg)
|
||||||
|
# Remove the stream from disk and the database
|
||||||
|
err(f"\n{msg}\n")
|
||||||
|
err(f"Removing stream {path} from disk and database\n")
|
||||||
|
shutil.rmtree(bulk)
|
||||||
|
with self.sql:
|
||||||
|
cur = self.sql.cursor()
|
||||||
|
cur.execute("DELETE FROM streams WHERE id=?",
|
||||||
|
(sid,))
|
||||||
|
if cur.rowcount != 1: # pragma: no cover (shouldn't fail)
|
||||||
|
raise FsckError("failed to remove stream")
|
||||||
|
cur.execute("DELETE FROM ranges WHERE stream_id=?", (sid,))
|
||||||
|
cur.execute("DELETE FROM metadata WHERE stream_id=?", (sid,))
|
||||||
|
raise RetryFsck
|
||||||
|
|
||||||
### Check interval endpoints
|
### Check interval endpoints
|
||||||
|
|
||||||
def check_intervals(self):
|
def check_intervals(self):
|
||||||
total_ints = sum(len(x) for x in self.stream_interval.values())
|
total_ints = sum(len(x) for x in list(self.stream_interval.values()))
|
||||||
log("checking %s intervals\n", "{:,d}".format(total_ints))
|
log("checking %s intervals\n", "{:,d}".format(total_ints))
|
||||||
done = 0
|
done = 0
|
||||||
with Progress(total_ints) as pbar:
|
with Progress(total_ints) as pbar:
|
||||||
for sid in self.stream_interval:
|
for sid in self.stream_interval:
|
||||||
try:
|
try:
|
||||||
bulk = self.bulkpath + self.stream_path[sid]
|
bulk = self.bulkpath + self.stream_path[sid]
|
||||||
|
bulk = bulk.encode('utf-8')
|
||||||
tab = nilmdb.server.bulkdata.Table(bulk)
|
tab = nilmdb.server.bulkdata.Table(bulk)
|
||||||
|
|
||||||
def update(x):
|
def update(x):
|
||||||
pbar.update(done + x)
|
pbar.update(done + x)
|
||||||
|
|
||||||
ints = self.stream_interval[sid]
|
ints = self.stream_interval[sid]
|
||||||
done += self.check_table_intervals(sid, ints, tab, update)
|
done += self.check_table_intervals(sid, ints, tab, update)
|
||||||
finally:
|
finally:
|
||||||
|
@ -333,7 +429,7 @@ class Fsck(object):
|
||||||
def check_table_intervals(self, sid, ints, tab, update):
|
def check_table_intervals(self, sid, ints, tab, update):
|
||||||
# look in the table to make sure we can pick out the interval's
|
# look in the table to make sure we can pick out the interval's
|
||||||
# endpoints
|
# endpoints
|
||||||
path = self.stream_path[sid]
|
path = self.stream_path[sid] # noqa: F841 unused
|
||||||
tab.file_open.cache_remove_all()
|
tab.file_open.cache_remove_all()
|
||||||
for (i, intv) in enumerate(ints):
|
for (i, intv) in enumerate(ints):
|
||||||
update(i)
|
update(i)
|
||||||
|
@ -341,11 +437,11 @@ class Fsck(object):
|
||||||
if spos == epos and spos >= 0 and spos <= tab.nrows:
|
if spos == epos and spos >= 0 and spos <= tab.nrows:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
srow = tab[spos]
|
srow = tab[spos] # noqa: F841 unused
|
||||||
erow = tab[epos-1]
|
erow = tab[epos-1] # noqa: F841 unused
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.fix_bad_interval(sid, intv, tab, str(e))
|
self.fix_bad_interval(sid, intv, tab, str(e))
|
||||||
raise RetryFsck
|
|
||||||
return len(ints)
|
return len(ints)
|
||||||
|
|
||||||
def fix_bad_interval(self, sid, intv, tab, msg):
|
def fix_bad_interval(self, sid, intv, tab, msg):
|
||||||
|
@ -374,23 +470,23 @@ class Fsck(object):
|
||||||
"end_time=? AND start_pos=? AND end_pos=?",
|
"end_time=? AND start_pos=? AND end_pos=?",
|
||||||
(new_etime, new_epos, sid, stime, etime,
|
(new_etime, new_epos, sid, stime, etime,
|
||||||
spos, epos))
|
spos, epos))
|
||||||
if cur.rowcount != 1:
|
if cur.rowcount != 1: # pragma: no cover (shouldn't fail)
|
||||||
raise FsckError("failed to fix SQL database")
|
raise FsckError("failed to fix SQL database")
|
||||||
raise RetryFsck
|
raise RetryFsck
|
||||||
err("actually it can't be truncated; times are bad too")
|
err("actually it can't be truncated; times are bad too\n")
|
||||||
|
|
||||||
# Otherwise, the only hope is to delete the interval entirely.
|
# Otherwise, the only hope is to delete the interval entirely.
|
||||||
err("*** Deleting the entire interval from SQL.\n")
|
err("*** Deleting the entire interval from SQL.\n")
|
||||||
err("This may leave stale data on disk. To fix that, copy all\n")
|
err("This may leave stale data on disk. To fix that, copy all "
|
||||||
err("data from this stream to a new stream, then remove all data\n")
|
"data from this stream to a new stream using nilm-copy, then\n")
|
||||||
err("from and destroy %s.\n", path)
|
err("remove all data from and destroy %s.\n", path)
|
||||||
with self.sql:
|
with self.sql:
|
||||||
cur = self.sql.cursor()
|
cur = self.sql.cursor()
|
||||||
cur.execute("DELETE FROM ranges WHERE "
|
cur.execute("DELETE FROM ranges WHERE "
|
||||||
"stream_id=? AND start_time=? AND "
|
"stream_id=? AND start_time=? AND "
|
||||||
"end_time=? AND start_pos=? AND end_pos=?",
|
"end_time=? AND start_pos=? AND end_pos=?",
|
||||||
(sid, stime, etime, spos, epos))
|
(sid, stime, etime, spos, epos))
|
||||||
if cur.rowcount != 1:
|
if cur.rowcount != 1: # pragma: no cover (shouldn't fail)
|
||||||
raise FsckError("failed to remove interval")
|
raise FsckError("failed to remove interval")
|
||||||
raise RetryFsck
|
raise RetryFsck
|
||||||
|
|
||||||
|
@ -398,16 +494,19 @@ class Fsck(object):
|
||||||
|
|
||||||
def check_data(self):
|
def check_data(self):
|
||||||
total_rows = sum(sum((y[3] - y[2]) for y in x)
|
total_rows = sum(sum((y[3] - y[2]) for y in x)
|
||||||
for x in self.stream_interval.values())
|
for x in list(self.stream_interval.values()))
|
||||||
log("checking %s rows of data\n", "{:,d}".format(total_rows))
|
log("checking %s rows of data\n", "{:,d}".format(total_rows))
|
||||||
done = 0
|
done = 0
|
||||||
with Progress(total_rows) as pbar:
|
with Progress(total_rows) as pbar:
|
||||||
for sid in self.stream_interval:
|
for sid in self.stream_interval:
|
||||||
try:
|
try:
|
||||||
bulk = self.bulkpath + self.stream_path[sid]
|
bulk = self.bulkpath + self.stream_path[sid]
|
||||||
|
bulk = bulk.encode('utf-8')
|
||||||
tab = nilmdb.server.bulkdata.Table(bulk)
|
tab = nilmdb.server.bulkdata.Table(bulk)
|
||||||
|
|
||||||
def update(x):
|
def update(x):
|
||||||
pbar.update(done + x)
|
pbar.update(done + x)
|
||||||
|
|
||||||
ints = self.stream_interval[sid]
|
ints = self.stream_interval[sid]
|
||||||
done += self.check_table_data(sid, ints, tab, update)
|
done += self.check_table_data(sid, ints, tab, update)
|
||||||
finally:
|
finally:
|
||||||
|
@ -416,7 +515,7 @@ class Fsck(object):
|
||||||
def check_table_data(self, sid, ints, tab, update):
|
def check_table_data(self, sid, ints, tab, update):
|
||||||
# Pull out all of the interval's data and verify that it's
|
# Pull out all of the interval's data and verify that it's
|
||||||
# monotonic.
|
# monotonic.
|
||||||
maxrows = 100000
|
maxrows = getattr(self, 'maxrows_override', 100000)
|
||||||
path = self.stream_path[sid]
|
path = self.stream_path[sid]
|
||||||
layout = self.stream_layout[sid]
|
layout = self.stream_layout[sid]
|
||||||
dtype = nilmdb.client.numpyclient.layout_to_dtype(layout)
|
dtype = nilmdb.client.numpyclient.layout_to_dtype(layout)
|
||||||
|
@ -437,28 +536,75 @@ class Fsck(object):
|
||||||
# Get raw data, convert to NumPy arary
|
# Get raw data, convert to NumPy arary
|
||||||
try:
|
try:
|
||||||
raw = tab.get_data(start, stop, binary=True)
|
raw = tab.get_data(start, stop, binary=True)
|
||||||
data = numpy.fromstring(raw, dtype)
|
data = numpy.frombuffer(raw, dtype)
|
||||||
except Exception as e:
|
except Exception as e: # pragma: no cover
|
||||||
raise FsckError("%s: failed to grab rows %d through %d: %s",
|
# No coverage because it's hard to trigger this -- earlier
|
||||||
|
# checks check the ranges, so this would probably be a real
|
||||||
|
# disk error, malloc failure, etc.
|
||||||
|
raise FsckError(
|
||||||
|
"%s: failed to grab rows %d through %d: %s",
|
||||||
path, start, stop, repr(e))
|
path, start, stop, repr(e))
|
||||||
|
|
||||||
|
ts = data['timestamp']
|
||||||
|
|
||||||
|
# Verify that all timestamps are in range.
|
||||||
|
match = (ts < stime) | (ts >= etime)
|
||||||
|
if match.any():
|
||||||
|
row = numpy.argmax(match)
|
||||||
|
if ts[row] != 0:
|
||||||
|
raise FsckError("%s: data timestamp %d at row %d "
|
||||||
|
"outside interval range [%d,%d)",
|
||||||
|
path, ts[row], row + start,
|
||||||
|
stime, etime)
|
||||||
|
|
||||||
|
# Timestamp is zero and out of the expected range;
|
||||||
|
# assume file ends with zeroed data and just truncate it.
|
||||||
|
self.fix_table_by_truncating(
|
||||||
|
path, tab, row + start,
|
||||||
|
"data timestamp is out of range, and zero")
|
||||||
|
|
||||||
# Verify that timestamps are monotonic
|
# Verify that timestamps are monotonic
|
||||||
if (numpy.diff(data['timestamp']) <= 0).any():
|
match = numpy.diff(ts) <= 0
|
||||||
raise FsckError("%s: non-monotonic timestamp(s) in rows "
|
if match.any():
|
||||||
"%d through %d", path, start, stop)
|
row = numpy.argmax(match)
|
||||||
first_ts = data['timestamp'][0]
|
if ts[row+1] != 0:
|
||||||
|
raise FsckError(
|
||||||
|
"%s: non-monotonic timestamp (%d -> %d) "
|
||||||
|
"at row %d", path, ts[row], ts[row+1],
|
||||||
|
row + start)
|
||||||
|
|
||||||
|
# Timestamp is zero and non-monotonic;
|
||||||
|
# assume file ends with zeroed data and just truncate it.
|
||||||
|
self.fix_table_by_truncating(
|
||||||
|
path, tab, row + start + 1,
|
||||||
|
"data timestamp is non-monotonic, and zero")
|
||||||
|
|
||||||
|
first_ts = ts[0]
|
||||||
if last_ts is not None and first_ts <= last_ts:
|
if last_ts is not None and first_ts <= last_ts:
|
||||||
raise FsckError("%s: first interval timestamp %d is not "
|
raise FsckError("%s: first interval timestamp %d is not "
|
||||||
"greater than the previous last interval "
|
"greater than the previous last interval "
|
||||||
"timestamp %d, at row %d",
|
"timestamp %d, at row %d",
|
||||||
path, first_ts, last_ts, start)
|
path, first_ts, last_ts, start)
|
||||||
last_ts = data['timestamp'][-1]
|
last_ts = ts[-1]
|
||||||
|
|
||||||
# These are probably fixable, by removing the offending
|
# The previous errors are fixable, by removing the
|
||||||
# intervals. But I'm not going to bother implementing
|
# offending intervals, or changing the data
|
||||||
# that yet.
|
# timestamps. But these are probably unlikely errors,
|
||||||
|
# so it's not worth implementing that yet.
|
||||||
|
|
||||||
# Done
|
# Done
|
||||||
done += count
|
done += count
|
||||||
update(done)
|
update(done)
|
||||||
return done
|
return done
|
||||||
|
|
||||||
|
def fix_table_by_truncating(self, path, tab, row, reason):
|
||||||
|
# Simple fix for bad data: truncate the table at the given row.
|
||||||
|
# On retry, fix_bad_interval will correct the database and timestamps
|
||||||
|
# to account for this truncation.
|
||||||
|
msg = f"{path}: bad data in table, starting at row {row}: {reason}"
|
||||||
|
if not self.fix:
|
||||||
|
raise FixableFsckError(msg)
|
||||||
|
err(f"\n{msg}\nWill try truncating table\n")
|
||||||
|
(subdir, fname, offs, count) = tab._offset_from_row(row)
|
||||||
|
tab._remove_or_truncate_file(subdir, fname, offs)
|
||||||
|
raise RetryFsck
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import nilmdb.fsck
|
import nilmdb.fsck
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Main entry point for the 'nilmdb-fsck' command line script"""
|
"""Main entry point for the 'nilmdb-fsck' command line script"""
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description='Check database consistency',
|
description='Check database consistency',
|
||||||
formatter_class = argparse.ArgumentDefaultsHelpFormatter,
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
parser.add_argument("-v", "--version", action="version",
|
||||||
version=nilmdb.__version__)
|
version=nilmdb.__version__)
|
||||||
parser.add_argument("-f", "--fix", action="store_true",
|
parser.add_argument("-f", "--fix", action="store_true",
|
||||||
default=False, help='Fix errors when possible '
|
default=False, help='Fix errors when possible '
|
||||||
|
@ -22,5 +22,6 @@ def main():
|
||||||
|
|
||||||
nilmdb.fsck.Fsck(args.database, args.fix).check(skip_data=args.no_data)
|
nilmdb.fsck.Fsck(args.database, args.fix).check(skip_data=args.no_data)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -1,16 +1,23 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import socket
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
import cherrypy
|
||||||
|
|
||||||
import nilmdb.server
|
import nilmdb.server
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
import socket
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Main entry point for the 'nilmdb-server' command line script"""
|
"""Main entry point for the 'nilmdb-server' command line script"""
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description='Run the NilmDB server',
|
description='Run the NilmDB server',
|
||||||
formatter_class = argparse.ArgumentDefaultsHelpFormatter,
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
|
||||||
|
parser.add_argument("-v", "--version", action="version",
|
||||||
version=nilmdb.__version__)
|
version=nilmdb.__version__)
|
||||||
|
|
||||||
group = parser.add_argument_group("Standard options")
|
group = parser.add_argument_group("Standard options")
|
||||||
|
@ -39,47 +46,54 @@ def main():
|
||||||
db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(args.database)
|
db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(args.database)
|
||||||
|
|
||||||
# Configure the server
|
# Configure the server
|
||||||
if args.quiet:
|
if not args.quiet:
|
||||||
embedded = True
|
cherrypy._cpconfig.environments['embedded']['log.screen'] = True
|
||||||
else:
|
|
||||||
embedded = False
|
|
||||||
server = nilmdb.server.Server(db,
|
server = nilmdb.server.Server(db,
|
||||||
host=args.address,
|
host=args.address,
|
||||||
port=args.port,
|
port=args.port,
|
||||||
embedded = embedded,
|
|
||||||
force_traceback=args.traceback)
|
force_traceback=args.traceback)
|
||||||
|
|
||||||
# Print info
|
# Print info
|
||||||
if not args.quiet:
|
if not args.quiet:
|
||||||
print "Version: %s" % nilmdb.__version__
|
print("Version: %s" % nilmdb.__version__)
|
||||||
print "Database: %s" % (os.path.realpath(args.database))
|
print("Database: %s" % (os.path.realpath(args.database)))
|
||||||
if args.address == '0.0.0.0' or args.address == '::':
|
if args.address == '0.0.0.0' or args.address == '::':
|
||||||
host = socket.getfqdn()
|
host = socket.getfqdn()
|
||||||
else:
|
else:
|
||||||
host = args.address
|
host = args.address
|
||||||
print "Server URL: http://%s:%d/" % ( host, args.port)
|
print("Server URL: http://%s:%d/" % (host, args.port))
|
||||||
print "----"
|
print("----")
|
||||||
|
|
||||||
# Run it
|
# Run it
|
||||||
|
try:
|
||||||
if args.yappi:
|
if args.yappi:
|
||||||
print "Running in yappi"
|
print("Running in yappi")
|
||||||
try:
|
try:
|
||||||
import yappi
|
import yappi
|
||||||
yappi.start()
|
yappi.start()
|
||||||
server.start(blocking=True)
|
server.start(blocking=True)
|
||||||
finally:
|
finally:
|
||||||
yappi.stop()
|
yappi.stop()
|
||||||
yappi.print_stats(sort_type = yappi.SORTTYPE_TTOT, limit = 50)
|
stats = yappi.get_func_stats()
|
||||||
|
stats.sort("ttot")
|
||||||
|
stats.print_all()
|
||||||
|
try:
|
||||||
from IPython import embed
|
from IPython import embed
|
||||||
embed(header = "Use the yappi object to explore further, "
|
embed(header="Use the `yappi` or `stats` object to "
|
||||||
"quit to exit")
|
"explore further, `quit` to exit")
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
print("\nInstall ipython to explore further")
|
||||||
else:
|
else:
|
||||||
server.start(blocking=True)
|
server.start(blocking=True)
|
||||||
|
except nilmdb.server.serverutil.CherryPyExit:
|
||||||
# Clean up
|
print("Exiting due to CherryPy error", file=sys.stderr)
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
if not args.quiet:
|
if not args.quiet:
|
||||||
print "Closing database"
|
print("Closing database")
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import nilmdb.cmdline
|
import nilmdb.cmdline
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Main entry point for the 'nilmtool' command line script"""
|
"""Main entry point for the 'nilmtool' command line script"""
|
||||||
nilmdb.cmdline.Cmdline().run()
|
nilmdb.cmdline.Cmdline().run()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -1,20 +1,8 @@
|
||||||
"""nilmdb.server"""
|
"""nilmdb.server"""
|
||||||
|
|
||||||
from __future__ import absolute_import
|
# Set up pyximport to automatically rebuild Cython modules if needed.
|
||||||
|
|
||||||
# Try to set up pyximport to automatically rebuild Cython modules. If
|
|
||||||
# this doesn't work, it's OK, as long as the modules were built externally.
|
|
||||||
# (e.g. python setup.py build_ext --inplace)
|
|
||||||
try: # pragma: no cover
|
|
||||||
import Cython
|
|
||||||
import distutils.version
|
|
||||||
if (distutils.version.LooseVersion(Cython.__version__) <
|
|
||||||
distutils.version.LooseVersion("0.17")): # pragma: no cover
|
|
||||||
raise ImportError("Cython version too old")
|
|
||||||
import pyximport
|
import pyximport
|
||||||
pyximport.install(inplace=True, build_in_temp=False)
|
pyximport.install(inplace=True, build_in_temp=False)
|
||||||
except (ImportError, TypeError): # pragma: no cover
|
|
||||||
pass
|
|
||||||
|
|
||||||
from nilmdb.server.nilmdb import NilmDB
|
from nilmdb.server.nilmdb import NilmDB
|
||||||
from nilmdb.server.server import Server, wsgi_application
|
from nilmdb.server.server import Server, wsgi_application
|
||||||
|
|
|
@ -1,19 +1,15 @@
|
||||||
# Fixed record size bulk data storage
|
# Fixed record size bulk data storage
|
||||||
|
|
||||||
# Need absolute_import so that "import nilmdb" won't pull in
|
|
||||||
# nilmdb.py, but will pull the parent nilmdb module instead.
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
from nilmdb.utils.time import timestamp_to_string as timestamp_to_string
|
|
||||||
import nilmdb.utils
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import cPickle as pickle
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
import pickle
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import sprintf
|
||||||
|
from nilmdb.utils.time import timestamp_to_string
|
||||||
|
import nilmdb.utils
|
||||||
|
|
||||||
import nilmdb.utils.lock
|
import nilmdb.utils.lock
|
||||||
from . import rocket
|
from . import rocket
|
||||||
|
|
||||||
|
@ -22,28 +18,32 @@ from . import rocket
|
||||||
table_cache_size = 32
|
table_cache_size = 32
|
||||||
fd_cache_size = 8
|
fd_cache_size = 8
|
||||||
|
|
||||||
|
|
||||||
@nilmdb.utils.must_close(wrap_verify=False)
|
@nilmdb.utils.must_close(wrap_verify=False)
|
||||||
class BulkData(object):
|
class BulkData():
|
||||||
def __init__(self, basepath, **kwargs):
|
def __init__(self, basepath, **kwargs):
|
||||||
|
if isinstance(basepath, str):
|
||||||
|
self.basepath = self._encode_filename(basepath)
|
||||||
|
else:
|
||||||
self.basepath = basepath
|
self.basepath = basepath
|
||||||
self.root = os.path.join(self.basepath, "data")
|
self.root = os.path.join(self.basepath, b"data")
|
||||||
self.lock = self.root + ".lock"
|
self.lock = self.root + b".lock"
|
||||||
self.lockfile = None
|
self.lockfile = None
|
||||||
|
|
||||||
# Tuneables
|
# Tuneables
|
||||||
if "file_size" in kwargs:
|
if "file_size" in kwargs and kwargs["file_size"] is not None:
|
||||||
self.file_size = kwargs["file_size"]
|
self.file_size = kwargs["file_size"]
|
||||||
else:
|
else:
|
||||||
# Default to approximately 128 MiB per file
|
# Default to approximately 128 MiB per file
|
||||||
self.file_size = 128 * 1024 * 1024
|
self.file_size = 128 * 1024 * 1024
|
||||||
|
|
||||||
if "files_per_dir" in kwargs:
|
if "files_per_dir" in kwargs and kwargs["files_per_dir"] is not None:
|
||||||
self.files_per_dir = kwargs["files_per_dir"]
|
self.files_per_dir = kwargs["files_per_dir"]
|
||||||
else:
|
else:
|
||||||
# 32768 files per dir should work even on FAT32
|
# 32768 files per dir should work even on FAT32
|
||||||
self.files_per_dir = 32768
|
self.files_per_dir = 32768
|
||||||
|
|
||||||
if "initial_nrows" in kwargs:
|
if "initial_nrows" in kwargs and kwargs["initial_nrows"] is not None:
|
||||||
self.initial_nrows = kwargs["initial_nrows"]
|
self.initial_nrows = kwargs["initial_nrows"]
|
||||||
else:
|
else:
|
||||||
# First row is 0
|
# First row is 0
|
||||||
|
@ -56,7 +56,8 @@ class BulkData(object):
|
||||||
# Create the lock
|
# Create the lock
|
||||||
self.lockfile = open(self.lock, "w")
|
self.lockfile = open(self.lock, "w")
|
||||||
if not nilmdb.utils.lock.exclusive_lock(self.lockfile):
|
if not nilmdb.utils.lock.exclusive_lock(self.lockfile):
|
||||||
raise IOError('database at "' + self.basepath +
|
raise IOError('database at "' +
|
||||||
|
self._decode_filename(self.basepath) +
|
||||||
'" is already locked by another process')
|
'" is already locked by another process')
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
|
@ -66,21 +67,21 @@ class BulkData(object):
|
||||||
self.lockfile.close()
|
self.lockfile.close()
|
||||||
try:
|
try:
|
||||||
os.unlink(self.lock)
|
os.unlink(self.lock)
|
||||||
except OSError: # pragma: no cover
|
except OSError:
|
||||||
pass
|
pass
|
||||||
self.lockfile = None
|
self.lockfile = None
|
||||||
|
|
||||||
def _encode_filename(self, path):
|
def _encode_filename(self, path):
|
||||||
# Encode all paths to UTF-8, regardless of sys.getfilesystemencoding(),
|
# Translate unicode strings to raw bytes, if needed. We
|
||||||
# because we want to be able to represent all code points and the user
|
# always manipulate paths internally as bytes.
|
||||||
# will never be directly exposed to filenames. We can then do path
|
|
||||||
# manipulations on the UTF-8 directly.
|
|
||||||
if isinstance(path, unicode):
|
|
||||||
return path.encode('utf-8')
|
return path.encode('utf-8')
|
||||||
return path
|
|
||||||
|
def _decode_filename(self, path):
|
||||||
|
# Translate raw bytes to unicode strings, escaping if needed
|
||||||
|
return path.decode('utf-8', errors='backslashreplace')
|
||||||
|
|
||||||
def _create_check_ospath(self, ospath):
|
def _create_check_ospath(self, ospath):
|
||||||
if ospath[-1] == '/':
|
if ospath[-1:] == b'/':
|
||||||
raise ValueError("invalid path; should not end with a /")
|
raise ValueError("invalid path; should not end with a /")
|
||||||
if Table.exists(ospath):
|
if Table.exists(ospath):
|
||||||
raise ValueError("stream already exists at this path")
|
raise ValueError("stream already exists at this path")
|
||||||
|
@ -88,7 +89,7 @@ class BulkData(object):
|
||||||
# Look for any files in subdirectories. Fully empty subdirectories
|
# Look for any files in subdirectories. Fully empty subdirectories
|
||||||
# are OK; they might be there during a rename
|
# are OK; they might be there during a rename
|
||||||
for (root, dirs, files) in os.walk(ospath):
|
for (root, dirs, files) in os.walk(ospath):
|
||||||
if len(files):
|
if files:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"non-empty subdirs of this path already exist")
|
"non-empty subdirs of this path already exist")
|
||||||
|
|
||||||
|
@ -97,13 +98,13 @@ class BulkData(object):
|
||||||
don't exist. Returns a list of elements that got created."""
|
don't exist. Returns a list of elements that got created."""
|
||||||
path = self._encode_filename(unicodepath)
|
path = self._encode_filename(unicodepath)
|
||||||
|
|
||||||
if path[0] != '/':
|
if path[0:1] != b'/':
|
||||||
raise ValueError("paths must start with / ")
|
raise ValueError("paths must start with / ")
|
||||||
[ group, node ] = path.rsplit("/", 1)
|
[group, node] = path.rsplit(b"/", 1)
|
||||||
if group == '':
|
if group == b'':
|
||||||
raise ValueError("invalid path; path must contain at least one "
|
raise ValueError("invalid path; path must contain at least one "
|
||||||
"folder")
|
"folder")
|
||||||
if node == '':
|
if node == b'':
|
||||||
raise ValueError("invalid path; should not end with a /")
|
raise ValueError("invalid path; should not end with a /")
|
||||||
if not Table.valid_path(path):
|
if not Table.valid_path(path):
|
||||||
raise ValueError("path name is invalid or contains reserved words")
|
raise ValueError("path name is invalid or contains reserved words")
|
||||||
|
@ -114,7 +115,7 @@ class BulkData(object):
|
||||||
# os.path.join)
|
# os.path.join)
|
||||||
|
|
||||||
# Make directories leading up to this one
|
# Make directories leading up to this one
|
||||||
elements = path.lstrip('/').split('/')
|
elements = path.lstrip(b'/').split(b'/')
|
||||||
made_dirs = []
|
made_dirs = []
|
||||||
try:
|
try:
|
||||||
# Make parent elements
|
# Make parent elements
|
||||||
|
@ -125,15 +126,11 @@ class BulkData(object):
|
||||||
if not os.path.isdir(ospath):
|
if not os.path.isdir(ospath):
|
||||||
os.mkdir(ospath)
|
os.mkdir(ospath)
|
||||||
made_dirs.append(ospath)
|
made_dirs.append(ospath)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
# Try to remove paths that we created; ignore errors
|
# Remove paths that we created
|
||||||
exc_info = sys.exc_info()
|
for ospath in reversed(made_dirs):
|
||||||
for ospath in reversed(made_dirs): # pragma: no cover (hard to hit)
|
|
||||||
try:
|
|
||||||
os.rmdir(ospath)
|
os.rmdir(ospath)
|
||||||
except OSError:
|
raise
|
||||||
pass
|
|
||||||
raise exc_info[1], None, exc_info[2]
|
|
||||||
|
|
||||||
return elements
|
return elements
|
||||||
|
|
||||||
|
@ -168,7 +165,7 @@ class BulkData(object):
|
||||||
os.rmdir(ospath)
|
os.rmdir(ospath)
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
raise exc_info[1], None, exc_info[2]
|
raise exc_info[1].with_traceback(exc_info[2])
|
||||||
|
|
||||||
# Success
|
# Success
|
||||||
return
|
return
|
||||||
|
@ -176,8 +173,8 @@ class BulkData(object):
|
||||||
def _remove_leaves(self, unicodepath):
|
def _remove_leaves(self, unicodepath):
|
||||||
"""Remove empty directories starting at the leaves of unicodepath"""
|
"""Remove empty directories starting at the leaves of unicodepath"""
|
||||||
path = self._encode_filename(unicodepath)
|
path = self._encode_filename(unicodepath)
|
||||||
elements = path.lstrip('/').split('/')
|
elements = path.lstrip(b'/').split(b'/')
|
||||||
for i in reversed(range(len(elements))):
|
for i in reversed(list(range(len(elements)))):
|
||||||
ospath = os.path.join(self.root, *elements[0:i+1])
|
ospath = os.path.join(self.root, *elements[0:i+1])
|
||||||
try:
|
try:
|
||||||
os.rmdir(ospath)
|
os.rmdir(ospath)
|
||||||
|
@ -191,9 +188,9 @@ class BulkData(object):
|
||||||
newpath = self._encode_filename(newunicodepath)
|
newpath = self._encode_filename(newunicodepath)
|
||||||
|
|
||||||
# Get OS paths
|
# Get OS paths
|
||||||
oldelements = oldpath.lstrip('/').split('/')
|
oldelements = oldpath.lstrip(b'/').split(b'/')
|
||||||
oldospath = os.path.join(self.root, *oldelements)
|
oldospath = os.path.join(self.root, *oldelements)
|
||||||
newelements = newpath.lstrip('/').split('/')
|
newelements = newpath.lstrip(b'/').split(b'/')
|
||||||
newospath = os.path.join(self.root, *newelements)
|
newospath = os.path.join(self.root, *newelements)
|
||||||
|
|
||||||
# Basic checks
|
# Basic checks
|
||||||
|
@ -204,8 +201,8 @@ class BulkData(object):
|
||||||
self.getnode.cache_remove(self, oldunicodepath)
|
self.getnode.cache_remove(self, oldunicodepath)
|
||||||
|
|
||||||
# Move the table to a temporary location
|
# Move the table to a temporary location
|
||||||
tmpdir = tempfile.mkdtemp(prefix = "rename-", dir = self.root)
|
tmpdir = tempfile.mkdtemp(prefix=b"rename-", dir=self.root)
|
||||||
tmppath = os.path.join(tmpdir, "table")
|
tmppath = os.path.join(tmpdir, b"table")
|
||||||
os.rename(oldospath, tmppath)
|
os.rename(oldospath, tmppath)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -233,7 +230,7 @@ class BulkData(object):
|
||||||
path = self._encode_filename(unicodepath)
|
path = self._encode_filename(unicodepath)
|
||||||
|
|
||||||
# Get OS path
|
# Get OS path
|
||||||
elements = path.lstrip('/').split('/')
|
elements = path.lstrip(b'/').split(b'/')
|
||||||
ospath = os.path.join(self.root, *elements)
|
ospath = os.path.join(self.root, *elements)
|
||||||
|
|
||||||
# Remove Table object from cache
|
# Remove Table object from cache
|
||||||
|
@ -258,12 +255,13 @@ class BulkData(object):
|
||||||
"""Return a Table object corresponding to the given database
|
"""Return a Table object corresponding to the given database
|
||||||
path, which must exist."""
|
path, which must exist."""
|
||||||
path = self._encode_filename(unicodepath)
|
path = self._encode_filename(unicodepath)
|
||||||
elements = path.lstrip('/').split('/')
|
elements = path.lstrip(b'/').split(b'/')
|
||||||
ospath = os.path.join(self.root, *elements)
|
ospath = os.path.join(self.root, *elements)
|
||||||
return Table(ospath, self.initial_nrows)
|
return Table(ospath, self.initial_nrows)
|
||||||
|
|
||||||
|
|
||||||
@nilmdb.utils.must_close(wrap_verify=False)
|
@nilmdb.utils.must_close(wrap_verify=False)
|
||||||
class Table(object):
|
class Table():
|
||||||
"""Tools to help access a single table (data at a specific OS path)."""
|
"""Tools to help access a single table (data at a specific OS path)."""
|
||||||
# See design.md for design details
|
# See design.md for design details
|
||||||
|
|
||||||
|
@ -271,12 +269,12 @@ class Table(object):
|
||||||
@classmethod
|
@classmethod
|
||||||
def valid_path(cls, root):
|
def valid_path(cls, root):
|
||||||
"""Return True if a root path is a valid name"""
|
"""Return True if a root path is a valid name"""
|
||||||
return "_format" not in root.split("/")
|
return b"_format" not in root.split(b"/")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def exists(cls, root):
|
def exists(cls, root):
|
||||||
"""Return True if a table appears to exist at this OS path"""
|
"""Return True if a table appears to exist at this OS path"""
|
||||||
return os.path.isfile(os.path.join(root, "_format"))
|
return os.path.isfile(os.path.join(root, b"_format"))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, root, layout, file_size, files_per_dir):
|
def create(cls, root, layout, file_size, files_per_dir):
|
||||||
|
@ -289,12 +287,14 @@ class Table(object):
|
||||||
rows_per_file = max(file_size // rkt.binary_size, 1)
|
rows_per_file = max(file_size // rkt.binary_size, 1)
|
||||||
rkt.close()
|
rkt.close()
|
||||||
|
|
||||||
fmt = { "rows_per_file": rows_per_file,
|
fmt = {
|
||||||
|
"rows_per_file": rows_per_file,
|
||||||
"files_per_dir": files_per_dir,
|
"files_per_dir": files_per_dir,
|
||||||
"layout": layout,
|
"layout": layout,
|
||||||
"version": 3 }
|
"version": 3
|
||||||
with open(os.path.join(root, "_format"), "wb") as f:
|
}
|
||||||
pickle.dump(fmt, f, 2)
|
nilmdb.utils.atomic.replace_file(
|
||||||
|
os.path.join(root, b"_format"), pickle.dumps(fmt, 2))
|
||||||
|
|
||||||
# Normal methods
|
# Normal methods
|
||||||
def __init__(self, root, initial_nrows=0):
|
def __init__(self, root, initial_nrows=0):
|
||||||
|
@ -303,10 +303,10 @@ class Table(object):
|
||||||
self.initial_nrows = initial_nrows
|
self.initial_nrows = initial_nrows
|
||||||
|
|
||||||
# Load the format
|
# Load the format
|
||||||
with open(os.path.join(self.root, "_format"), "rb") as f:
|
with open(os.path.join(self.root, b"_format"), "rb") as f:
|
||||||
fmt = pickle.load(f)
|
fmt = pickle.load(f)
|
||||||
|
|
||||||
if fmt["version"] != 3: # pragma: no cover
|
if fmt["version"] != 3:
|
||||||
# Old versions used floating point timestamps, which aren't
|
# Old versions used floating point timestamps, which aren't
|
||||||
# valid anymore.
|
# valid anymore.
|
||||||
raise NotImplementedError("old version " + str(fmt["version"]) +
|
raise NotImplementedError("old version " + str(fmt["version"]) +
|
||||||
|
@ -336,7 +336,7 @@ class Table(object):
|
||||||
# greater than the row number of any piece of data that
|
# greater than the row number of any piece of data that
|
||||||
# currently exists, not necessarily all data that _ever_
|
# currently exists, not necessarily all data that _ever_
|
||||||
# existed.
|
# existed.
|
||||||
regex = re.compile("^[0-9a-f]{4,}$")
|
regex = re.compile(b"^[0-9a-f]{4,}$")
|
||||||
|
|
||||||
# Find the last directory. We sort and loop through all of them,
|
# Find the last directory. We sort and loop through all of them,
|
||||||
# starting with the numerically greatest, because the dirs could be
|
# starting with the numerically greatest, because the dirs could be
|
||||||
|
@ -348,8 +348,8 @@ class Table(object):
|
||||||
for subdir in subdirs:
|
for subdir in subdirs:
|
||||||
# Now find the last file in that dir
|
# Now find the last file in that dir
|
||||||
path = os.path.join(self.root, subdir)
|
path = os.path.join(self.root, subdir)
|
||||||
files = filter(regex.search, os.listdir(path))
|
files = list(filter(regex.search, os.listdir(path)))
|
||||||
if not files: # pragma: no cover (shouldn't occur)
|
if not files:
|
||||||
# Empty dir: try the next one
|
# Empty dir: try the next one
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -380,8 +380,8 @@ class Table(object):
|
||||||
filenum = row // self.rows_per_file
|
filenum = row // self.rows_per_file
|
||||||
# It's OK if these format specifiers are too short; the filenames
|
# It's OK if these format specifiers are too short; the filenames
|
||||||
# will just get longer but will still sort correctly.
|
# will just get longer but will still sort correctly.
|
||||||
dirname = sprintf("%04x", filenum // self.files_per_dir)
|
dirname = sprintf(b"%04x", filenum // self.files_per_dir)
|
||||||
filename = sprintf("%04x", filenum % self.files_per_dir)
|
filename = sprintf(b"%04x", filenum % self.files_per_dir)
|
||||||
offset = (row % self.rows_per_file) * self.row_size
|
offset = (row % self.rows_per_file) * self.row_size
|
||||||
count = self.rows_per_file - (row % self.rows_per_file)
|
count = self.rows_per_file - (row % self.rows_per_file)
|
||||||
return (dirname, filename, offset, count)
|
return (dirname, filename, offset, count)
|
||||||
|
@ -389,7 +389,7 @@ class Table(object):
|
||||||
def _row_from_offset(self, subdir, filename, offset):
|
def _row_from_offset(self, subdir, filename, offset):
|
||||||
"""Return the row number that corresponds to the given
|
"""Return the row number that corresponds to the given
|
||||||
'subdir/filename' and byte-offset within that file."""
|
'subdir/filename' and byte-offset within that file."""
|
||||||
if (offset % self.row_size) != 0: # pragma: no cover
|
if (offset % self.row_size) != 0:
|
||||||
# this shouldn't occur, unless there is some corruption somewhere
|
# this shouldn't occur, unless there is some corruption somewhere
|
||||||
raise ValueError("file offset is not a multiple of data size")
|
raise ValueError("file offset is not a multiple of data size")
|
||||||
filenum = int(subdir, 16) * self.files_per_dir + int(filename, 16)
|
filenum = int(subdir, 16) * self.files_per_dir + int(filename, 16)
|
||||||
|
@ -436,6 +436,8 @@ class Table(object):
|
||||||
are non-monotonic, or don't fall between 'start' and 'end',
|
are non-monotonic, or don't fall between 'start' and 'end',
|
||||||
a ValueError is raised.
|
a ValueError is raised.
|
||||||
|
|
||||||
|
Note that data is always of 'bytes' type.
|
||||||
|
|
||||||
If 'binary' is True, the data should be in raw binary format
|
If 'binary' is True, the data should be in raw binary format
|
||||||
instead: little-endian, matching the current table's layout,
|
instead: little-endian, matching the current table's layout,
|
||||||
including the int64 timestamp.
|
including the int64 timestamp.
|
||||||
|
@ -452,7 +454,7 @@ class Table(object):
|
||||||
while data_offset < len(data):
|
while data_offset < len(data):
|
||||||
# See how many rows we can fit into the current file,
|
# See how many rows we can fit into the current file,
|
||||||
# and open it
|
# and open it
|
||||||
(subdir, fname, offset, count) = self._offset_from_row(tot_rows)
|
(subdir, fname, offs, count) = self._offset_from_row(tot_rows)
|
||||||
f = self.file_open(subdir, fname)
|
f = self.file_open(subdir, fname)
|
||||||
|
|
||||||
# Ask the rocket object to parse and append up to "count"
|
# Ask the rocket object to parse and append up to "count"
|
||||||
|
@ -476,9 +478,9 @@ class Table(object):
|
||||||
if binary:
|
if binary:
|
||||||
raise IndexError
|
raise IndexError
|
||||||
bad = data.splitlines()[linenum-1]
|
bad = data.splitlines()[linenum-1]
|
||||||
bad += '\n' + ' ' * (colnum - 1) + '^'
|
bad += b'\n' + b' ' * (colnum - 1) + b'^'
|
||||||
except IndexError:
|
except IndexError:
|
||||||
bad = ""
|
bad = b""
|
||||||
if errtype == rocket.ERR_NON_MONOTONIC:
|
if errtype == rocket.ERR_NON_MONOTONIC:
|
||||||
err = "timestamp is not monotonically increasing"
|
err = "timestamp is not monotonically increasing"
|
||||||
elif errtype == rocket.ERR_OUT_OF_INTERVAL:
|
elif errtype == rocket.ERR_OUT_OF_INTERVAL:
|
||||||
|
@ -492,16 +494,17 @@ class Table(object):
|
||||||
timestamp_to_string(end))
|
timestamp_to_string(end))
|
||||||
else:
|
else:
|
||||||
err = str(obj)
|
err = str(obj)
|
||||||
|
bad_str = bad.decode('utf-8', errors='backslashreplace')
|
||||||
raise ValueError("error parsing input data: " +
|
raise ValueError("error parsing input data: " +
|
||||||
where + err + "\n" + bad)
|
where + err + "\n" + bad_str)
|
||||||
tot_rows += added_rows
|
tot_rows += added_rows
|
||||||
except Exception:
|
except Exception:
|
||||||
# Some failure, so try to roll things back by truncating or
|
# Some failure, so try to roll things back by truncating or
|
||||||
# deleting files that we may have appended data to.
|
# deleting files that we may have appended data to.
|
||||||
cleanpos = self.nrows
|
cleanpos = self.nrows
|
||||||
while cleanpos <= tot_rows:
|
while cleanpos <= tot_rows:
|
||||||
(subdir, fname, offset, count) = self._offset_from_row(cleanpos)
|
(subdir, fname, offs, count) = self._offset_from_row(cleanpos)
|
||||||
self._remove_or_truncate_file(subdir, fname, offset)
|
self._remove_or_truncate_file(subdir, fname, offs)
|
||||||
cleanpos += count
|
cleanpos += count
|
||||||
# Re-raise original exception
|
# Re-raise original exception
|
||||||
raise
|
raise
|
||||||
|
@ -512,11 +515,8 @@ class Table(object):
|
||||||
def get_data(self, start, stop, binary=False):
|
def get_data(self, start, stop, binary=False):
|
||||||
"""Extract data corresponding to Python range [n:m],
|
"""Extract data corresponding to Python range [n:m],
|
||||||
and returns a formatted string"""
|
and returns a formatted string"""
|
||||||
if (start is None or
|
if (start is None or stop is None or
|
||||||
stop is None or
|
start > stop or start < 0 or stop > self.nrows):
|
||||||
start > stop or
|
|
||||||
start < 0 or
|
|
||||||
stop > self.nrows):
|
|
||||||
raise IndexError("Index out of range")
|
raise IndexError("Index out of range")
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
|
@ -556,7 +556,7 @@ class Table(object):
|
||||||
# file. Only when the list covers the entire extent of the
|
# file. Only when the list covers the entire extent of the
|
||||||
# file will that file be removed.
|
# file will that file be removed.
|
||||||
datafile = os.path.join(self.root, subdir, filename)
|
datafile = os.path.join(self.root, subdir, filename)
|
||||||
cachefile = datafile + ".removed"
|
cachefile = datafile + b".removed"
|
||||||
try:
|
try:
|
||||||
with open(cachefile, "rb") as f:
|
with open(cachefile, "rb") as f:
|
||||||
ranges = pickle.load(f)
|
ranges = pickle.load(f)
|
||||||
|
@ -583,7 +583,8 @@ class Table(object):
|
||||||
# Not connected; append previous and start again
|
# Not connected; append previous and start again
|
||||||
merged.append(prev)
|
merged.append(prev)
|
||||||
prev = new
|
prev = new
|
||||||
if prev is not None:
|
# Last range we were looking at goes into the file. We know
|
||||||
|
# there was at least one (the one we just removed).
|
||||||
merged.append(prev)
|
merged.append(prev)
|
||||||
|
|
||||||
# If the range covered the whole file, we can delete it now.
|
# If the range covered the whole file, we can delete it now.
|
||||||
|
|
|
@ -1,12 +1,15 @@
|
||||||
"""Exceptions"""
|
"""Exceptions"""
|
||||||
|
|
||||||
|
|
||||||
class NilmDBError(Exception):
|
class NilmDBError(Exception):
|
||||||
"""Base exception for NilmDB errors"""
|
"""Base exception for NilmDB errors"""
|
||||||
def __init__(self, message = "Unspecified error"):
|
def __init__(self, msg="Unspecified error"):
|
||||||
Exception.__init__(self, message)
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class StreamError(NilmDBError):
|
class StreamError(NilmDBError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class OverlapError(NilmDBError):
|
class OverlapError(NilmDBError):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# cython: language_level=2
|
||||||
|
|
||||||
"""Interval, IntervalSet
|
"""Interval, IntervalSet
|
||||||
|
|
||||||
The Interval implemented here is just like
|
The Interval implemented here is just like
|
||||||
|
@ -58,9 +60,19 @@ cdef class Interval:
|
||||||
return ("[" + timestamp_to_string(self.start) +
|
return ("[" + timestamp_to_string(self.start) +
|
||||||
" -> " + timestamp_to_string(self.end) + ")")
|
" -> " + timestamp_to_string(self.end) + ")")
|
||||||
|
|
||||||
def __cmp__(self, Interval other):
|
# Compare two intervals. If non-equal, order by start then end
|
||||||
"""Compare two intervals. If non-equal, order by start then end"""
|
def __lt__(self, Interval other):
|
||||||
return cmp(self.start, other.start) or cmp(self.end, other.end)
|
return (self.start, self.end) < (other.start, other.end)
|
||||||
|
def __gt__(self, Interval other):
|
||||||
|
return (self.start, self.end) > (other.start, other.end)
|
||||||
|
def __le__(self, Interval other):
|
||||||
|
return (self.start, self.end) <= (other.start, other.end)
|
||||||
|
def __ge__(self, Interval other):
|
||||||
|
return (self.start, self.end) >= (other.start, other.end)
|
||||||
|
def __eq__(self, Interval other):
|
||||||
|
return (self.start, self.end) == (other.start, other.end)
|
||||||
|
def __ne__(self, Interval other):
|
||||||
|
return (self.start, self.end) != (other.start, other.end)
|
||||||
|
|
||||||
cpdef intersects(self, Interval other):
|
cpdef intersects(self, Interval other):
|
||||||
"""Return True if two Interval objects intersect"""
|
"""Return True if two Interval objects intersect"""
|
||||||
|
|
|
@ -7,12 +7,13 @@ Object that represents a NILM database file.
|
||||||
Manages both the SQL database and the table storage backend.
|
Manages both the SQL database and the table storage backend.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Need absolute_import so that "import nilmdb" won't pull in
|
import os
|
||||||
# nilmdb.py, but will pull the parent nilmdb module instead.
|
import errno
|
||||||
from __future__ import absolute_import
|
import sqlite3
|
||||||
|
|
||||||
import nilmdb.utils
|
import nilmdb.utils
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import printf
|
||||||
from nilmdb.utils.time import timestamp_to_string
|
from nilmdb.utils.time import timestamp_to_bytes
|
||||||
|
|
||||||
from nilmdb.utils.interval import IntervalError
|
from nilmdb.utils.interval import IntervalError
|
||||||
from nilmdb.server.interval import Interval, DBInterval, IntervalSet
|
from nilmdb.server.interval import Interval, DBInterval, IntervalSet
|
||||||
|
@ -20,10 +21,6 @@ from nilmdb.server.interval import Interval, DBInterval, IntervalSet
|
||||||
from nilmdb.server import bulkdata
|
from nilmdb.server import bulkdata
|
||||||
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
|
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
|
||||||
|
|
||||||
import sqlite3
|
|
||||||
import os
|
|
||||||
import errno
|
|
||||||
|
|
||||||
# Note about performance and transactions:
|
# Note about performance and transactions:
|
||||||
#
|
#
|
||||||
# Committing a transaction in the default sync mode (PRAGMA synchronous=FULL)
|
# Committing a transaction in the default sync mode (PRAGMA synchronous=FULL)
|
||||||
|
@ -78,8 +75,9 @@ _sql_schema_updates = {
|
||||||
3: {"next": None},
|
3: {"next": None},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@nilmdb.utils.must_close()
|
@nilmdb.utils.must_close()
|
||||||
class NilmDB(object):
|
class NilmDB():
|
||||||
verbose = 0
|
verbose = 0
|
||||||
|
|
||||||
def __init__(self, basepath,
|
def __init__(self, basepath,
|
||||||
|
@ -111,9 +109,7 @@ class NilmDB(object):
|
||||||
try:
|
try:
|
||||||
os.makedirs(self.basepath)
|
os.makedirs(self.basepath)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.EEXIST: # pragma: no cover
|
if e.errno != errno.EEXIST:
|
||||||
# (no coverage, because it's hard to trigger this case
|
|
||||||
# if tests are run as root)
|
|
||||||
raise IOError("can't create tree " + self.basepath)
|
raise IOError("can't create tree " + self.basepath)
|
||||||
|
|
||||||
# Our data goes inside it
|
# Our data goes inside it
|
||||||
|
@ -124,7 +120,7 @@ class NilmDB(object):
|
||||||
self.con = sqlite3.connect(sqlfilename, check_same_thread=True)
|
self.con = sqlite3.connect(sqlfilename, check_same_thread=True)
|
||||||
try:
|
try:
|
||||||
self._sql_schema_update()
|
self._sql_schema_update()
|
||||||
except Exception: # pragma: no cover
|
except Exception:
|
||||||
self.data.close()
|
self.data.close()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
@ -149,6 +145,7 @@ class NilmDB(object):
|
||||||
if self.con:
|
if self.con:
|
||||||
self.con.commit()
|
self.con.commit()
|
||||||
self.con.close()
|
self.con.close()
|
||||||
|
self.con = None
|
||||||
self.data.close()
|
self.data.close()
|
||||||
|
|
||||||
def _sql_schema_update(self):
|
def _sql_schema_update(self):
|
||||||
|
@ -157,18 +154,18 @@ class NilmDB(object):
|
||||||
oldversion = version
|
oldversion = version
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
if version not in _sql_schema_updates: # pragma: no cover
|
if version not in _sql_schema_updates:
|
||||||
raise Exception(self.basepath + ": unknown database version "
|
raise Exception(self.basepath + ": unknown database version "
|
||||||
+ str(version))
|
+ str(version))
|
||||||
update = _sql_schema_updates[version]
|
update = _sql_schema_updates[version]
|
||||||
if "error" in update: # pragma: no cover
|
if "error" in update:
|
||||||
raise Exception(self.basepath + ": can't use database version "
|
raise Exception(self.basepath + ": can't use database version "
|
||||||
+ str(version) + ": " + update["error"])
|
+ str(version) + ": " + update["error"])
|
||||||
if update["next"] is None:
|
if update["next"] is None:
|
||||||
break
|
break
|
||||||
cur.executescript(update["sql"])
|
cur.executescript(update["sql"])
|
||||||
version = update["next"]
|
version = update["next"]
|
||||||
if self.verbose: # pragma: no cover
|
if self.verbose:
|
||||||
printf("Database schema updated to %d\n", version)
|
printf("Database schema updated to %d\n", version)
|
||||||
|
|
||||||
if version != oldversion:
|
if version != oldversion:
|
||||||
|
@ -199,7 +196,7 @@ class NilmDB(object):
|
||||||
iset += DBInterval(start_time, end_time,
|
iset += DBInterval(start_time, end_time,
|
||||||
start_time, end_time,
|
start_time, end_time,
|
||||||
start_pos, end_pos)
|
start_pos, end_pos)
|
||||||
except IntervalError: # pragma: no cover
|
except IntervalError:
|
||||||
raise NilmDBError("unexpected overlap in ranges table!")
|
raise NilmDBError("unexpected overlap in ranges table!")
|
||||||
|
|
||||||
return iset
|
return iset
|
||||||
|
@ -226,10 +223,6 @@ class NilmDB(object):
|
||||||
# Load this stream's intervals
|
# Load this stream's intervals
|
||||||
iset = self._get_intervals(stream_id)
|
iset = self._get_intervals(stream_id)
|
||||||
|
|
||||||
# Check for overlap
|
|
||||||
if iset.intersects(interval): # pragma: no cover (gets caught earlier)
|
|
||||||
raise NilmDBError("new interval overlaps existing data")
|
|
||||||
|
|
||||||
# Check for adjacency. If there's a stream in the database
|
# Check for adjacency. If there's a stream in the database
|
||||||
# that ends exactly when this one starts, and the database
|
# that ends exactly when this one starts, and the database
|
||||||
# rows match up, we can make one interval that covers the
|
# rows match up, we can make one interval that covers the
|
||||||
|
@ -272,10 +265,6 @@ class NilmDB(object):
|
||||||
original: original DBInterval; must be already present in DB
|
original: original DBInterval; must be already present in DB
|
||||||
to_remove: DBInterval to remove; must be subset of 'original'
|
to_remove: DBInterval to remove; must be subset of 'original'
|
||||||
"""
|
"""
|
||||||
# Just return if we have nothing to remove
|
|
||||||
if remove.start == remove.end: # pragma: no cover
|
|
||||||
return
|
|
||||||
|
|
||||||
# Load this stream's intervals
|
# Load this stream's intervals
|
||||||
iset = self._get_intervals(stream_id)
|
iset = self._get_intervals(stream_id)
|
||||||
|
|
||||||
|
@ -290,7 +279,8 @@ class NilmDB(object):
|
||||||
# the removed piece was in the middle.
|
# the removed piece was in the middle.
|
||||||
def add(iset, start, end, start_pos, end_pos):
|
def add(iset, start, end, start_pos, end_pos):
|
||||||
iset += DBInterval(start, end, start, end, start_pos, end_pos)
|
iset += DBInterval(start, end, start, end, start_pos, end_pos)
|
||||||
self._sql_interval_insert(stream_id, start, end, start_pos, end_pos)
|
self._sql_interval_insert(stream_id, start, end,
|
||||||
|
start_pos, end_pos)
|
||||||
|
|
||||||
if original.start != remove.start:
|
if original.start != remove.start:
|
||||||
# Interval before the removed region
|
# Interval before the removed region
|
||||||
|
@ -420,8 +410,8 @@ class NilmDB(object):
|
||||||
|
|
||||||
def stream_set_metadata(self, path, data):
|
def stream_set_metadata(self, path, data):
|
||||||
"""Set stream metadata from a dictionary, e.g.
|
"""Set stream metadata from a dictionary, e.g.
|
||||||
{ description = 'Downstairs lighting',
|
{ description: 'Downstairs lighting',
|
||||||
v_scaling = 123.45 }
|
v_scaling: 123.45 }
|
||||||
This replaces all existing metadata.
|
This replaces all existing metadata.
|
||||||
"""
|
"""
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
|
@ -469,7 +459,7 @@ class NilmDB(object):
|
||||||
|
|
||||||
# Verify that no intervals are present, and clear the cache
|
# Verify that no intervals are present, and clear the cache
|
||||||
iset = self._get_intervals(stream_id)
|
iset = self._get_intervals(stream_id)
|
||||||
if len(iset):
|
if iset:
|
||||||
raise NilmDBError("all intervals must be removed before "
|
raise NilmDBError("all intervals must be removed before "
|
||||||
"destroying a stream")
|
"destroying a stream")
|
||||||
self._get_intervals.cache_remove(self, stream_id)
|
self._get_intervals.cache_remove(self, stream_id)
|
||||||
|
@ -519,7 +509,7 @@ class NilmDB(object):
|
||||||
# Like bisect.bisect_left, but doesn't choke on large indices on
|
# Like bisect.bisect_left, but doesn't choke on large indices on
|
||||||
# 32-bit systems, like bisect's fast C implementation does.
|
# 32-bit systems, like bisect's fast C implementation does.
|
||||||
while lo < hi:
|
while lo < hi:
|
||||||
mid = (lo + hi) / 2
|
mid = (lo + hi) // 2
|
||||||
if a[mid] < x:
|
if a[mid] < x:
|
||||||
lo = mid + 1
|
lo = mid + 1
|
||||||
else:
|
else:
|
||||||
|
@ -617,8 +607,8 @@ class NilmDB(object):
|
||||||
|
|
||||||
# Add markup
|
# Add markup
|
||||||
if markup:
|
if markup:
|
||||||
result.append("# interval-start " +
|
result.append(b"# interval-start " +
|
||||||
timestamp_to_string(interval.start) + "\n")
|
timestamp_to_bytes(interval.start) + b"\n")
|
||||||
|
|
||||||
# Gather these results up
|
# Gather these results up
|
||||||
result.append(table.get_data(row_start, row_end, binary))
|
result.append(table.get_data(row_start, row_end, binary))
|
||||||
|
@ -629,16 +619,17 @@ class NilmDB(object):
|
||||||
# Add markup, and exit if restart is set.
|
# Add markup, and exit if restart is set.
|
||||||
if restart is not None:
|
if restart is not None:
|
||||||
if markup:
|
if markup:
|
||||||
result.append("# interval-end " +
|
result.append(b"# interval-end " +
|
||||||
timestamp_to_string(restart) + "\n")
|
timestamp_to_bytes(restart) + b"\n")
|
||||||
break
|
break
|
||||||
if markup:
|
if markup:
|
||||||
result.append("# interval-end " +
|
result.append(b"# interval-end " +
|
||||||
timestamp_to_string(interval.end) + "\n")
|
timestamp_to_bytes(interval.end) + b"\n")
|
||||||
|
|
||||||
if count:
|
if count:
|
||||||
return matched
|
return matched
|
||||||
return ("".join(result), restart)
|
full_result = b"".join(result)
|
||||||
|
return (full_result, restart)
|
||||||
|
|
||||||
def stream_remove(self, path, start=None, end=None):
|
def stream_remove(self, path, start=None, end=None):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# cython: language_level=2
|
||||||
|
|
||||||
cdef class RBNode:
|
cdef class RBNode:
|
||||||
cdef public object obj
|
cdef public object obj
|
||||||
cdef public double start, end
|
cdef public double start, end
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# cython: profile=False
|
# cython: profile=False
|
||||||
# cython: cdivision=True
|
# cython: cdivision=True
|
||||||
|
# cython: language_level=2
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Jim Paris <jim@jtan.com>
|
Jim Paris <jim@jtan.com>
|
||||||
|
|
|
@ -138,7 +138,7 @@ static void Rocket_dealloc(Rocket *self)
|
||||||
fclose(self->file);
|
fclose(self->file);
|
||||||
self->file = NULL;
|
self->file = NULL;
|
||||||
}
|
}
|
||||||
self->ob_type->tp_free((PyObject *)self);
|
Py_TYPE(self)->tp_free((PyObject *)self);
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
|
@ -160,13 +160,19 @@ static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
static int Rocket_init(Rocket *self, PyObject *args, PyObject *kwds)
|
static int Rocket_init(Rocket *self, PyObject *args, PyObject *kwds)
|
||||||
{
|
{
|
||||||
const char *layout, *path;
|
const char *layout, *path;
|
||||||
|
int pathlen;
|
||||||
static char *kwlist[] = { "layout", "file", NULL };
|
static char *kwlist[] = { "layout", "file", NULL };
|
||||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "sz", kwlist,
|
if (!PyArg_ParseTupleAndKeywords(args, kwds, "sz#", kwlist,
|
||||||
&layout, &path))
|
&layout, &path, &pathlen))
|
||||||
return -1;
|
return -1;
|
||||||
if (!layout)
|
if (!layout)
|
||||||
return -1;
|
return -1;
|
||||||
if (path) {
|
if (path) {
|
||||||
|
if (strlen(path) != (size_t)pathlen) {
|
||||||
|
PyErr_SetString(PyExc_ValueError, "path must not "
|
||||||
|
"contain NUL characters");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
if ((self->file = fopen(path, "a+b")) == NULL) {
|
if ((self->file = fopen(path, "a+b")) == NULL) {
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
PyErr_SetFromErrno(PyExc_OSError);
|
||||||
return -1;
|
return -1;
|
||||||
|
@ -239,7 +245,7 @@ static PyObject *Rocket_get_file_size(Rocket *self)
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return PyInt_FromLong(self->file_size);
|
return PyLong_FromLong(self->file_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
/****
|
/****
|
||||||
|
@ -273,11 +279,9 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
|
||||||
union64_t t64;
|
union64_t t64;
|
||||||
int i;
|
int i;
|
||||||
|
|
||||||
/* It would be nice to use 't#' instead of 's' for data,
|
/* Input data is bytes. Using 'y#' instead of 'y' might be
|
||||||
but we need the null termination for strto*. If we had
|
preferable, but strto* requires the null terminator. */
|
||||||
strnto* that took a length, we could use t# and not require
|
if (!PyArg_ParseTuple(args, "iyiiLLL:append_string", &count,
|
||||||
a copy. */
|
|
||||||
if (!PyArg_ParseTuple(args, "isiiLLL:append_string", &count,
|
|
||||||
&data, &offset, &linenum,
|
&data, &offset, &linenum,
|
||||||
&ll1, &ll2, &ll3))
|
&ll1, &ll2, &ll3))
|
||||||
return NULL;
|
return NULL;
|
||||||
|
@ -437,7 +441,7 @@ static PyObject *Rocket_append_binary(Rocket *self, PyObject *args)
|
||||||
timestamp_t end;
|
timestamp_t end;
|
||||||
timestamp_t last_timestamp;
|
timestamp_t last_timestamp;
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "it#iiLLL:append_binary",
|
if (!PyArg_ParseTuple(args, "iy#iiLLL:append_binary",
|
||||||
&count, &data, &data_len, &offset,
|
&count, &data, &data_len, &offset,
|
||||||
&linenum, &ll1, &ll2, &ll3))
|
&linenum, &ll1, &ll2, &ll3))
|
||||||
return NULL;
|
return NULL;
|
||||||
|
@ -473,7 +477,7 @@ static PyObject *Rocket_append_binary(Rocket *self, PyObject *args)
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Write binary data */
|
/* Write binary data */
|
||||||
if (fwrite(data, self->binary_size, rows, self->file) != rows) {
|
if (fwrite(data, self->binary_size, rows, self->file) != (size_t)rows) {
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
PyErr_SetFromErrno(PyExc_OSError);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
@ -487,7 +491,7 @@ static PyObject *Rocket_append_binary(Rocket *self, PyObject *args)
|
||||||
}
|
}
|
||||||
|
|
||||||
/****
|
/****
|
||||||
* Extract to string
|
* Extract to binary bytes object containing ASCII text-formatted data
|
||||||
*/
|
*/
|
||||||
|
|
||||||
static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
|
static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
|
||||||
|
@ -585,7 +589,7 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
|
||||||
str[len++] = '\n';
|
str[len++] = '\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
PyObject *pystr = PyString_FromStringAndSize(str, len);
|
PyObject *pystr = PyBytes_FromStringAndSize(str, len);
|
||||||
free(str);
|
free(str);
|
||||||
return pystr;
|
return pystr;
|
||||||
err:
|
err:
|
||||||
|
@ -595,7 +599,7 @@ err:
|
||||||
}
|
}
|
||||||
|
|
||||||
/****
|
/****
|
||||||
* Extract to binary string containing raw little-endian binary data
|
* Extract to binary bytes object containing raw little-endian binary data
|
||||||
*/
|
*/
|
||||||
static PyObject *Rocket_extract_binary(Rocket *self, PyObject *args)
|
static PyObject *Rocket_extract_binary(Rocket *self, PyObject *args)
|
||||||
{
|
{
|
||||||
|
@ -624,7 +628,7 @@ static PyObject *Rocket_extract_binary(Rocket *self, PyObject *args)
|
||||||
|
|
||||||
/* Data in the file is already in the desired little-endian
|
/* Data in the file is already in the desired little-endian
|
||||||
binary format, so just read it directly. */
|
binary format, so just read it directly. */
|
||||||
if (fread(str, self->binary_size, count, self->file) != count) {
|
if (fread(str, self->binary_size, count, self->file) != (size_t)count) {
|
||||||
free(str);
|
free(str);
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
PyErr_SetFromErrno(PyExc_OSError);
|
||||||
return NULL;
|
return NULL;
|
||||||
|
@ -748,7 +752,7 @@ static PyMethodDef Rocket_methods[] = {
|
||||||
};
|
};
|
||||||
|
|
||||||
static PyTypeObject RocketType = {
|
static PyTypeObject RocketType = {
|
||||||
PyObject_HEAD_INIT(NULL)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
|
|
||||||
.tp_name = "rocket.Rocket",
|
.tp_name = "rocket.Rocket",
|
||||||
.tp_basicsize = sizeof(Rocket),
|
.tp_basicsize = sizeof(Rocket),
|
||||||
|
@ -773,17 +777,23 @@ static PyMethodDef module_methods[] = {
|
||||||
{ NULL },
|
{ NULL },
|
||||||
};
|
};
|
||||||
|
|
||||||
PyMODINIT_FUNC
|
static struct PyModuleDef moduledef = {
|
||||||
initrocket(void)
|
PyModuleDef_HEAD_INIT,
|
||||||
|
.m_name = "rocker",
|
||||||
|
.m_doc = "Rocket data parsing and formatting module",
|
||||||
|
.m_size = -1,
|
||||||
|
.m_methods = module_methods,
|
||||||
|
};
|
||||||
|
|
||||||
|
PyMODINIT_FUNC PyInit_rocket(void)
|
||||||
{
|
{
|
||||||
PyObject *module;
|
PyObject *module;
|
||||||
|
|
||||||
RocketType.tp_new = PyType_GenericNew;
|
RocketType.tp_new = PyType_GenericNew;
|
||||||
if (PyType_Ready(&RocketType) < 0)
|
if (PyType_Ready(&RocketType) < 0)
|
||||||
return;
|
return NULL;
|
||||||
|
|
||||||
module = Py_InitModule3("rocket", module_methods,
|
module = PyModule_Create(&moduledef);
|
||||||
"Rocket data parsing and formatting module");
|
|
||||||
Py_INCREF(&RocketType);
|
Py_INCREF(&RocketType);
|
||||||
PyModule_AddObject(module, "Rocket", (PyObject *)&RocketType);
|
PyModule_AddObject(module, "Rocket", (PyObject *)&RocketType);
|
||||||
|
|
||||||
|
@ -792,5 +802,5 @@ initrocket(void)
|
||||||
PyModule_AddObject(module, "ParseError", ParseError);
|
PyModule_AddObject(module, "ParseError", ParseError);
|
||||||
add_parseerror_codes(module);
|
add_parseerror_codes(module);
|
||||||
|
|
||||||
return;
|
return module;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,26 +1,21 @@
|
||||||
"""CherryPy-based server for accessing NILM database via HTTP"""
|
"""CherryPy-based server for accessing NILM database via HTTP"""
|
||||||
|
|
||||||
# Need absolute_import so that "import nilmdb" won't pull in
|
import os
|
||||||
# nilmdb.py, but will pull the nilmdb module instead.
|
import json
|
||||||
from __future__ import absolute_import
|
import socket
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
import psutil
|
||||||
|
import cherrypy
|
||||||
|
|
||||||
import nilmdb.server
|
import nilmdb.server
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import sprintf
|
||||||
from nilmdb.server.errors import NilmDBError
|
from nilmdb.server.errors import NilmDBError
|
||||||
from nilmdb.utils.time import string_to_timestamp
|
from nilmdb.utils.time import string_to_timestamp
|
||||||
|
|
||||||
import cherrypy
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import socket
|
|
||||||
import simplejson as json
|
|
||||||
import decorator
|
|
||||||
import psutil
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
from nilmdb.server.serverutil import (
|
from nilmdb.server.serverutil import (
|
||||||
chunked_response,
|
chunked_response,
|
||||||
response_type,
|
response_type,
|
||||||
workaround_cp_bug_1200,
|
|
||||||
exception_to_httperror,
|
exception_to_httperror,
|
||||||
CORS_allow,
|
CORS_allow,
|
||||||
json_to_request_params,
|
json_to_request_params,
|
||||||
|
@ -33,17 +28,15 @@ from nilmdb.server.serverutil import (
|
||||||
# Add CORS_allow tool
|
# Add CORS_allow tool
|
||||||
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
|
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
|
||||||
|
|
||||||
class NilmApp(object):
|
|
||||||
|
class NilmApp():
|
||||||
def __init__(self, db):
|
def __init__(self, db):
|
||||||
self.db = db
|
self.db = db
|
||||||
|
|
||||||
|
|
||||||
# CherryPy apps
|
# CherryPy apps
|
||||||
class Root(NilmApp):
|
class Root(NilmApp):
|
||||||
"""Root application for NILM database"""
|
"""Root application for NILM database"""
|
||||||
|
|
||||||
def __init__(self, db):
|
|
||||||
super(Root, self).__init__(db)
|
|
||||||
|
|
||||||
# /
|
# /
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def index(self):
|
def index(self):
|
||||||
|
@ -72,11 +65,14 @@ class Root(NilmApp):
|
||||||
path = self.db.get_basepath()
|
path = self.db.get_basepath()
|
||||||
usage = psutil.disk_usage(path)
|
usage = psutil.disk_usage(path)
|
||||||
dbsize = nilmdb.utils.du(path)
|
dbsize = nilmdb.utils.du(path)
|
||||||
return { "path": path,
|
return {
|
||||||
|
"path": path,
|
||||||
"size": dbsize,
|
"size": dbsize,
|
||||||
"other": max(usage.used - dbsize, 0),
|
"other": max(usage.used - dbsize, 0),
|
||||||
"reserved": max(usage.total - usage.used - usage.free, 0),
|
"reserved": max(usage.total - usage.used - usage.free, 0),
|
||||||
"free": usage.free }
|
"free": usage.free
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class Stream(NilmApp):
|
class Stream(NilmApp):
|
||||||
"""Stream-specific operations"""
|
"""Stream-specific operations"""
|
||||||
|
@ -89,7 +85,8 @@ class Stream(NilmApp):
|
||||||
start = string_to_timestamp(start_param)
|
start = string_to_timestamp(start_param)
|
||||||
except Exception:
|
except Exception:
|
||||||
raise cherrypy.HTTPError("400 Bad Request", sprintf(
|
raise cherrypy.HTTPError("400 Bad Request", sprintf(
|
||||||
"invalid start (%s): must be a numeric timestamp", start_param))
|
"invalid start (%s): must be a numeric timestamp",
|
||||||
|
start_param))
|
||||||
try:
|
try:
|
||||||
if end_param is not None:
|
if end_param is not None:
|
||||||
end = string_to_timestamp(end_param)
|
end = string_to_timestamp(end_param)
|
||||||
|
@ -166,9 +163,9 @@ class Stream(NilmApp):
|
||||||
try:
|
try:
|
||||||
data = self.db.stream_get_metadata(path)
|
data = self.db.stream_get_metadata(path)
|
||||||
except nilmdb.server.nilmdb.StreamError as e:
|
except nilmdb.server.nilmdb.StreamError as e:
|
||||||
raise cherrypy.HTTPError("404 Not Found", e.message)
|
raise cherrypy.HTTPError("404 Not Found", str(e))
|
||||||
if key is None: # If no keys specified, return them all
|
if key is None: # If no keys specified, return them all
|
||||||
key = data.keys()
|
key = list(data.keys())
|
||||||
elif not isinstance(key, list):
|
elif not isinstance(key, list):
|
||||||
key = [key]
|
key = [key]
|
||||||
result = {}
|
result = {}
|
||||||
|
@ -185,11 +182,9 @@ class Stream(NilmApp):
|
||||||
try:
|
try:
|
||||||
data = dict(json.loads(data))
|
data = dict(json.loads(data))
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
raise NilmDBError("can't parse 'data' parameter: " + e.message)
|
raise NilmDBError("can't parse 'data' parameter: " + str(e))
|
||||||
for key in data:
|
for key in data:
|
||||||
if not (isinstance(data[key], basestring) or
|
if not isinstance(data[key], (str, float, int)):
|
||||||
isinstance(data[key], float) or
|
|
||||||
isinstance(data[key], int)):
|
|
||||||
raise NilmDBError("metadata values must be a string or number")
|
raise NilmDBError("metadata values must be a string or number")
|
||||||
function(path, data)
|
function(path, data)
|
||||||
|
|
||||||
|
@ -246,6 +241,9 @@ class Stream(NilmApp):
|
||||||
"application/octet-stream for "
|
"application/octet-stream for "
|
||||||
"binary data, not " + content_type)
|
"binary data, not " + content_type)
|
||||||
|
|
||||||
|
# Note that non-binary data is *not* decoded from bytes to string,
|
||||||
|
# but rather passed directly to stream_insert.
|
||||||
|
|
||||||
# Check path and get layout
|
# Check path and get layout
|
||||||
if len(self.db.stream_list(path=path)) != 1:
|
if len(self.db.stream_list(path=path)) != 1:
|
||||||
raise cherrypy.HTTPError("404", "No such stream: " + path)
|
raise cherrypy.HTTPError("404", "No such stream: " + path)
|
||||||
|
@ -282,12 +280,12 @@ class Stream(NilmApp):
|
||||||
if len(self.db.stream_list(path=path)) != 1:
|
if len(self.db.stream_list(path=path)) != 1:
|
||||||
raise cherrypy.HTTPError("404", "No such stream: " + path)
|
raise cherrypy.HTTPError("404", "No such stream: " + path)
|
||||||
|
|
||||||
@workaround_cp_bug_1200
|
|
||||||
def content(start, end):
|
def content(start, end):
|
||||||
# Note: disable chunked responses to see tracebacks from here.
|
# Note: disable chunked responses to see tracebacks from here.
|
||||||
while True:
|
while True:
|
||||||
(removed, restart) = self.db.stream_remove(path, start, end)
|
(removed, restart) = self.db.stream_remove(path, start, end)
|
||||||
yield json.dumps(removed) + "\r\n"
|
response = json.dumps(removed) + "\r\n"
|
||||||
|
yield response.encode('utf-8')
|
||||||
if restart is None:
|
if restart is None:
|
||||||
break
|
break
|
||||||
start = restart
|
start = restart
|
||||||
|
@ -322,14 +320,13 @@ class Stream(NilmApp):
|
||||||
if diffpath and len(self.db.stream_list(path=diffpath)) != 1:
|
if diffpath and len(self.db.stream_list(path=diffpath)) != 1:
|
||||||
raise cherrypy.HTTPError("404", "No such stream: " + diffpath)
|
raise cherrypy.HTTPError("404", "No such stream: " + diffpath)
|
||||||
|
|
||||||
@workaround_cp_bug_1200
|
|
||||||
def content(start, end):
|
def content(start, end):
|
||||||
# Note: disable chunked responses to see tracebacks from here.
|
# Note: disable chunked responses to see tracebacks from here.
|
||||||
while True:
|
while True:
|
||||||
(ints, restart) = self.db.stream_intervals(path, start, end,
|
(ints, restart) = self.db.stream_intervals(path, start, end,
|
||||||
diffpath)
|
diffpath)
|
||||||
response = ''.join([json.dumps(i) + "\r\n" for i in ints])
|
response = ''.join([json.dumps(i) + "\r\n" for i in ints])
|
||||||
yield response
|
yield response.encode('utf-8')
|
||||||
if restart is None:
|
if restart is None:
|
||||||
break
|
break
|
||||||
start = restart
|
start = restart
|
||||||
|
@ -375,13 +372,12 @@ class Stream(NilmApp):
|
||||||
content_type = "text/plain"
|
content_type = "text/plain"
|
||||||
cherrypy.response.headers['Content-Type'] = content_type
|
cherrypy.response.headers['Content-Type'] = content_type
|
||||||
|
|
||||||
@workaround_cp_bug_1200
|
|
||||||
def content(start, end):
|
def content(start, end):
|
||||||
# Note: disable chunked responses to see tracebacks from here.
|
# Note: disable chunked responses to see tracebacks from here.
|
||||||
if count:
|
if count:
|
||||||
matched = self.db.stream_extract(path, start, end,
|
matched = self.db.stream_extract(path, start, end,
|
||||||
count=True)
|
count=True)
|
||||||
yield sprintf("%d\n", matched)
|
yield sprintf(b"%d\n", matched)
|
||||||
return
|
return
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
@ -395,21 +391,24 @@ class Stream(NilmApp):
|
||||||
start = restart
|
start = restart
|
||||||
return content(start, end)
|
return content(start, end)
|
||||||
|
|
||||||
class Exiter(object):
|
|
||||||
|
class Exiter():
|
||||||
"""App that exits the server, for testing"""
|
"""App that exits the server, for testing"""
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def index(self):
|
def index(self):
|
||||||
cherrypy.response.headers['Content-Type'] = 'text/plain'
|
cherrypy.response.headers['Content-Type'] = 'text/plain'
|
||||||
|
|
||||||
def content():
|
def content():
|
||||||
yield 'Exiting by request'
|
yield b'Exiting by request'
|
||||||
raise SystemExit
|
raise SystemExit
|
||||||
|
|
||||||
return content()
|
return content()
|
||||||
index._cp_config = {'response.stream': True}
|
index._cp_config = {'response.stream': True}
|
||||||
|
|
||||||
class Server(object):
|
|
||||||
|
class Server():
|
||||||
def __init__(self, db, host='127.0.0.1', port=8080,
|
def __init__(self, db, host='127.0.0.1', port=8080,
|
||||||
stoppable=False, # whether /exit URL exists
|
stoppable=False, # whether /exit URL exists
|
||||||
embedded = True, # hide diagnostics and output, etc
|
|
||||||
fast_shutdown=False, # don't wait for clients to disconn.
|
fast_shutdown=False, # don't wait for clients to disconn.
|
||||||
force_traceback=False, # include traceback in all errors
|
force_traceback=False, # include traceback in all errors
|
||||||
basepath='', # base URL path for cherrypy.tree
|
basepath='', # base URL path for cherrypy.tree
|
||||||
|
@ -417,7 +416,6 @@ class Server(object):
|
||||||
# Save server version, just for verification during tests
|
# Save server version, just for verification during tests
|
||||||
self.version = nilmdb.__version__
|
self.version = nilmdb.__version__
|
||||||
|
|
||||||
self.embedded = embedded
|
|
||||||
self.db = db
|
self.db = db
|
||||||
if not getattr(db, "_thread_safe", None):
|
if not getattr(db, "_thread_safe", None):
|
||||||
raise KeyError("Database object " + str(db) + " doesn't claim "
|
raise KeyError("Database object " + str(db) + " doesn't claim "
|
||||||
|
@ -427,13 +425,12 @@ class Server(object):
|
||||||
|
|
||||||
# Build up global server configuration
|
# Build up global server configuration
|
||||||
cherrypy.config.update({
|
cherrypy.config.update({
|
||||||
|
'environment': 'embedded',
|
||||||
'server.socket_host': host,
|
'server.socket_host': host,
|
||||||
'server.socket_port': port,
|
'server.socket_port': port,
|
||||||
'engine.autoreload.on': False,
|
'engine.autoreload.on': False,
|
||||||
'server.max_request_body_size': 8*1024*1024,
|
'server.max_request_body_size': 8*1024*1024,
|
||||||
})
|
})
|
||||||
if self.embedded:
|
|
||||||
cherrypy.config.update({ 'environment': 'embedded' })
|
|
||||||
|
|
||||||
# Build up application specific configuration
|
# Build up application specific configuration
|
||||||
app_config = {}
|
app_config = {}
|
||||||
|
@ -477,8 +474,7 @@ class Server(object):
|
||||||
# Shutdowns normally wait for clients to disconnect. To speed
|
# Shutdowns normally wait for clients to disconnect. To speed
|
||||||
# up tests, set fast_shutdown = True
|
# up tests, set fast_shutdown = True
|
||||||
if fast_shutdown:
|
if fast_shutdown:
|
||||||
# Setting timeout to 0 triggers os._exit(70) at shutdown, grr...
|
cherrypy.server.shutdown_timeout = 0
|
||||||
cherrypy.server.shutdown_timeout = 0.01
|
|
||||||
else:
|
else:
|
||||||
cherrypy.server.shutdown_timeout = 5
|
cherrypy.server.shutdown_timeout = 5
|
||||||
|
|
||||||
|
@ -491,17 +487,20 @@ class Server(object):
|
||||||
self.force_traceback)
|
self.force_traceback)
|
||||||
|
|
||||||
def start(self, blocking=False, event=None):
|
def start(self, blocking=False, event=None):
|
||||||
cherrypy_start(blocking, event, self.embedded)
|
cherrypy_start(blocking, event)
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
cherrypy_stop()
|
cherrypy_stop()
|
||||||
|
|
||||||
|
|
||||||
# Use a single global nilmdb.server.NilmDB and nilmdb.server.Server
|
# Use a single global nilmdb.server.NilmDB and nilmdb.server.Server
|
||||||
# instance since the database can only be opened once. For this to
|
# instance since the database can only be opened once. For this to
|
||||||
# work, the web server must use only a single process and single
|
# work, the web server must use only a single process and single
|
||||||
# Python interpreter. Multiple threads are OK.
|
# Python interpreter. Multiple threads are OK.
|
||||||
_wsgi_server = None
|
_wsgi_server = None
|
||||||
def wsgi_application(dbpath, basepath): # pragma: no cover
|
|
||||||
|
|
||||||
|
def wsgi_application(dbpath, basepath):
|
||||||
"""Return a WSGI application object with a database at the
|
"""Return a WSGI application object with a database at the
|
||||||
specified path.
|
specified path.
|
||||||
|
|
||||||
|
@ -516,17 +515,16 @@ def wsgi_application(dbpath, basepath): # pragma: no cover
|
||||||
if _wsgi_server is None:
|
if _wsgi_server is None:
|
||||||
# Try to start the server
|
# Try to start the server
|
||||||
try:
|
try:
|
||||||
db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(dbpath)
|
db = nilmdb.utils.serializer_proxy(
|
||||||
|
nilmdb.server.NilmDB)(dbpath)
|
||||||
_wsgi_server = nilmdb.server.Server(
|
_wsgi_server = nilmdb.server.Server(
|
||||||
db, embedded = True,
|
db, basepath=basepath.rstrip('/'))
|
||||||
basepath = basepath.rstrip('/'))
|
|
||||||
except Exception:
|
except Exception:
|
||||||
# Build an error message on failure
|
# Build an error message on failure
|
||||||
import pprint
|
import pprint
|
||||||
err = sprintf("Initializing database at path '%s' failed:\n\n",
|
err = sprintf("Initializing database at path '%s' failed:\n\n",
|
||||||
dbpath)
|
dbpath)
|
||||||
err += traceback.format_exc()
|
err += traceback.format_exc()
|
||||||
try:
|
|
||||||
import pwd
|
import pwd
|
||||||
import grp
|
import grp
|
||||||
err += sprintf("\nRunning as: uid=%d (%s), gid=%d (%s) "
|
err += sprintf("\nRunning as: uid=%d (%s), gid=%d (%s) "
|
||||||
|
@ -534,15 +532,14 @@ def wsgi_application(dbpath, basepath): # pragma: no cover
|
||||||
os.getuid(), pwd.getpwuid(os.getuid())[0],
|
os.getuid(), pwd.getpwuid(os.getuid())[0],
|
||||||
os.getgid(), grp.getgrgid(os.getgid())[0],
|
os.getgid(), grp.getgrgid(os.getgid())[0],
|
||||||
socket.gethostname(), os.getpid())
|
socket.gethostname(), os.getpid())
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
err += sprintf("\nEnvironment:\n%s\n", pprint.pformat(environ))
|
err += sprintf("\nEnvironment:\n%s\n", pprint.pformat(environ))
|
||||||
if _wsgi_server is None:
|
if _wsgi_server is None:
|
||||||
# Serve up the error with our own mini WSGI app.
|
# Serve up the error with our own mini WSGI app.
|
||||||
headers = [ ('Content-type', 'text/plain'),
|
err_b = err.encode('utf-8')
|
||||||
('Content-length', str(len(err))) ]
|
headers = [('Content-type', 'text/plain; charset=utf-8'),
|
||||||
|
('Content-length', str(len(err_b)))]
|
||||||
start_response("500 Internal Server Error", headers)
|
start_response("500 Internal Server Error", headers)
|
||||||
return [err]
|
return [err_b]
|
||||||
|
|
||||||
# Call the normal application
|
# Call the normal application
|
||||||
return _wsgi_server.wsgi_application(environ, start_response)
|
return _wsgi_server.wsgi_application(environ, start_response)
|
||||||
|
|
|
@ -1,11 +1,15 @@
|
||||||
"""Miscellaneous decorators and other helpers for running a CherryPy
|
"""Miscellaneous decorators and other helpers for running a CherryPy
|
||||||
server"""
|
server"""
|
||||||
|
|
||||||
import cherrypy
|
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
import decorator
|
import decorator
|
||||||
import simplejson as json
|
import functools
|
||||||
|
import threading
|
||||||
|
|
||||||
|
import cherrypy
|
||||||
|
|
||||||
|
|
||||||
# Helper to parse parameters into booleans
|
# Helper to parse parameters into booleans
|
||||||
def bool_param(s):
|
def bool_param(s):
|
||||||
|
@ -22,6 +26,7 @@ def bool_param(s):
|
||||||
raise cherrypy.HTTPError("400 Bad Request",
|
raise cherrypy.HTTPError("400 Bad Request",
|
||||||
"can't parse parameter: " + ss)
|
"can't parse parameter: " + ss)
|
||||||
|
|
||||||
|
|
||||||
# Decorators
|
# Decorators
|
||||||
def chunked_response(func):
|
def chunked_response(func):
|
||||||
"""Decorator to enable chunked responses."""
|
"""Decorator to enable chunked responses."""
|
||||||
|
@ -30,6 +35,7 @@ def chunked_response(func):
|
||||||
func._cp_config = {'response.stream': True}
|
func._cp_config = {'response.stream': True}
|
||||||
return func
|
return func
|
||||||
|
|
||||||
|
|
||||||
def response_type(content_type):
|
def response_type(content_type):
|
||||||
"""Return a decorator-generating function that sets the
|
"""Return a decorator-generating function that sets the
|
||||||
response type to the specified string."""
|
response type to the specified string."""
|
||||||
|
@ -38,27 +44,6 @@ def response_type(content_type):
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
return decorator.decorator(wrapper)
|
return decorator.decorator(wrapper)
|
||||||
|
|
||||||
@decorator.decorator
|
|
||||||
def workaround_cp_bug_1200(func, *args, **kwargs): # pragma: no cover
|
|
||||||
"""Decorator to work around CherryPy bug #1200 in a response
|
|
||||||
generator.
|
|
||||||
|
|
||||||
Even if chunked responses are disabled, LookupError or
|
|
||||||
UnicodeError exceptions may still be swallowed by CherryPy due to
|
|
||||||
bug #1200. This throws them as generic Exceptions instead so that
|
|
||||||
they make it through.
|
|
||||||
"""
|
|
||||||
exc_info = None
|
|
||||||
try:
|
|
||||||
for val in func(*args, **kwargs):
|
|
||||||
yield val
|
|
||||||
except (LookupError, UnicodeError):
|
|
||||||
# Re-raise it, but maintain the original traceback
|
|
||||||
exc_info = sys.exc_info()
|
|
||||||
new_exc = Exception(exc_info[0].__name__ + ": " + str(exc_info[1]))
|
|
||||||
raise new_exc, None, exc_info[2]
|
|
||||||
finally:
|
|
||||||
del exc_info
|
|
||||||
|
|
||||||
def exception_to_httperror(*expected):
|
def exception_to_httperror(*expected):
|
||||||
"""Return a decorator-generating function that catches expected
|
"""Return a decorator-generating function that catches expected
|
||||||
|
@ -76,7 +61,7 @@ def exception_to_httperror(*expected):
|
||||||
# Re-raise it, but maintain the original traceback
|
# Re-raise it, but maintain the original traceback
|
||||||
exc_info = sys.exc_info()
|
exc_info = sys.exc_info()
|
||||||
new_exc = cherrypy.HTTPError("400 Bad Request", str(exc_info[1]))
|
new_exc = cherrypy.HTTPError("400 Bad Request", str(exc_info[1]))
|
||||||
raise new_exc, None, exc_info[2]
|
raise new_exc.with_traceback(exc_info[2])
|
||||||
finally:
|
finally:
|
||||||
del exc_info
|
del exc_info
|
||||||
# We need to preserve the function's argspecs for CherryPy to
|
# We need to preserve the function's argspecs for CherryPy to
|
||||||
|
@ -84,8 +69,8 @@ def exception_to_httperror(*expected):
|
||||||
# care of that.
|
# care of that.
|
||||||
return decorator.decorator(wrapper)
|
return decorator.decorator(wrapper)
|
||||||
|
|
||||||
# Custom CherryPy tools
|
|
||||||
|
|
||||||
|
# Custom CherryPy tools
|
||||||
def CORS_allow(methods):
|
def CORS_allow(methods):
|
||||||
"""This does several things:
|
"""This does several things:
|
||||||
|
|
||||||
|
@ -101,12 +86,12 @@ def CORS_allow(methods):
|
||||||
request = cherrypy.request.headers
|
request = cherrypy.request.headers
|
||||||
response = cherrypy.response.headers
|
response = cherrypy.response.headers
|
||||||
|
|
||||||
if not isinstance(methods, (tuple, list)): # pragma: no cover
|
if not isinstance(methods, (tuple, list)):
|
||||||
methods = [methods]
|
methods = [methods]
|
||||||
methods = [m.upper() for m in methods if m]
|
methods = [m.upper() for m in methods if m]
|
||||||
if not methods: # pragma: no cover
|
if not methods:
|
||||||
methods = ['GET', 'HEAD']
|
methods = ['GET', 'HEAD']
|
||||||
elif 'GET' in methods and 'HEAD' not in methods: # pragma: no cover
|
elif 'GET' in methods and 'HEAD' not in methods:
|
||||||
methods.append('HEAD')
|
methods.append('HEAD')
|
||||||
response['Allow'] = ', '.join(methods)
|
response['Allow'] = ', '.join(methods)
|
||||||
|
|
||||||
|
@ -123,7 +108,7 @@ def CORS_allow(methods):
|
||||||
response['Access-Control-Allow-Methods'] = ', '.join(methods)
|
response['Access-Control-Allow-Methods'] = ', '.join(methods)
|
||||||
# Try to stop further processing and return a 200 OK
|
# Try to stop further processing and return a 200 OK
|
||||||
cherrypy.response.status = "200 OK"
|
cherrypy.response.status = "200 OK"
|
||||||
cherrypy.response.body = ""
|
cherrypy.response.body = b""
|
||||||
cherrypy.request.handler = lambda: ""
|
cherrypy.request.handler = lambda: ""
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -140,58 +125,83 @@ def json_to_request_params(body):
|
||||||
raise cherrypy.HTTPError(415)
|
raise cherrypy.HTTPError(415)
|
||||||
cherrypy.request.params.update(cherrypy.request.json)
|
cherrypy.request.params.update(cherrypy.request.json)
|
||||||
|
|
||||||
|
|
||||||
# Used as an "error_page.default" handler
|
# Used as an "error_page.default" handler
|
||||||
def json_error_page(status, message, traceback, version,
|
def json_error_page(status, message, traceback, version,
|
||||||
force_traceback=False):
|
force_traceback=False):
|
||||||
"""Return a custom error page in JSON so the client can parse it"""
|
"""Return a custom error page in JSON so the client can parse it"""
|
||||||
errordata = {"status": status,
|
errordata = {"status": status,
|
||||||
"message": message,
|
"message": message,
|
||||||
|
"version": version,
|
||||||
"traceback": traceback}
|
"traceback": traceback}
|
||||||
# Don't send a traceback if the error was 400-499 (client's fault)
|
# Don't send a traceback if the error was 400-499 (client's fault)
|
||||||
try:
|
|
||||||
code = int(status.split()[0])
|
code = int(status.split()[0])
|
||||||
if not force_traceback:
|
if not force_traceback:
|
||||||
if code >= 400 and code <= 499:
|
if 400 <= code <= 499:
|
||||||
errordata["traceback"] = ""
|
errordata["traceback"] = ""
|
||||||
except Exception: # pragma: no cover
|
|
||||||
pass
|
|
||||||
# Override the response type, which was previously set to text/html
|
# Override the response type, which was previously set to text/html
|
||||||
cherrypy.serving.response.headers['Content-Type'] = (
|
cherrypy.serving.response.headers['Content-Type'] = (
|
||||||
"application/json;charset=utf-8")
|
"application/json;charset=utf-8")
|
||||||
# Undo the HTML escaping that cherrypy's get_error_page function applies
|
# Undo the HTML escaping that cherrypy's get_error_page function applies
|
||||||
# (cherrypy issue 1135)
|
# (cherrypy issue 1135)
|
||||||
for k, v in errordata.iteritems():
|
for k, v in errordata.items():
|
||||||
v = v.replace("<", "<")
|
v = v.replace("<", "<")
|
||||||
v = v.replace(">", ">")
|
v = v.replace(">", ">")
|
||||||
v = v.replace("&", "&")
|
v = v.replace("&", "&")
|
||||||
errordata[k] = v
|
errordata[k] = v
|
||||||
return json.dumps(errordata, separators=(',', ':'))
|
return json.dumps(errordata, separators=(',', ':'))
|
||||||
|
|
||||||
|
|
||||||
|
class CherryPyExit(SystemExit):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def cherrypy_patch_exit():
|
||||||
|
# Cherrypy stupidly calls os._exit(70) when it can't bind the port
|
||||||
|
# and exits. Instead of that, raise a CherryPyExit (derived from
|
||||||
|
# SystemExit). This exception may not make it back up to the caller
|
||||||
|
# due to internal thread use in the CherryPy engine, but there should
|
||||||
|
# be at least some indication that it happened.
|
||||||
|
bus = cherrypy.process.wspbus.bus
|
||||||
|
if "_patched_exit" in bus.__dict__:
|
||||||
|
return
|
||||||
|
bus._patched_exit = True
|
||||||
|
|
||||||
|
def patched_exit(orig):
|
||||||
|
real_exit = os._exit
|
||||||
|
|
||||||
|
def fake_exit(code):
|
||||||
|
raise CherryPyExit(code)
|
||||||
|
os._exit = fake_exit
|
||||||
|
try:
|
||||||
|
orig()
|
||||||
|
finally:
|
||||||
|
os._exit = real_exit
|
||||||
|
bus.exit = functools.partial(patched_exit, bus.exit)
|
||||||
|
|
||||||
|
# A behavior change in Python 3.8 means that some thread exceptions,
|
||||||
|
# derived from SystemExit, now print tracebacks where they didn't
|
||||||
|
# used to: https://bugs.python.org/issue1230540
|
||||||
|
# Install a thread exception hook that ignores CherryPyExit;
|
||||||
|
# to make this match the behavior where we didn't set
|
||||||
|
# threading.excepthook, we also need to ignore SystemExit.
|
||||||
|
def hook(args):
|
||||||
|
if args.exc_type == CherryPyExit or args.exc_type == SystemExit:
|
||||||
|
return
|
||||||
|
sys.excepthook(args.exc_type, args.exc_value,
|
||||||
|
args.exc_traceback) # pragma: no cover
|
||||||
|
threading.excepthook = hook
|
||||||
|
|
||||||
|
|
||||||
# Start/stop CherryPy standalone server
|
# Start/stop CherryPy standalone server
|
||||||
def cherrypy_start(blocking = False, event = False, embedded = False):
|
def cherrypy_start(blocking=False, event=False):
|
||||||
"""Start the CherryPy server, handling errors and signals
|
"""Start the CherryPy server, handling errors and signals
|
||||||
somewhat gracefully."""
|
somewhat gracefully."""
|
||||||
|
|
||||||
if not embedded: # pragma: no cover
|
cherrypy_patch_exit()
|
||||||
# Handle signals nicely
|
|
||||||
if hasattr(cherrypy.engine, "signal_handler"):
|
|
||||||
cherrypy.engine.signal_handler.subscribe()
|
|
||||||
if hasattr(cherrypy.engine, "console_control_handler"):
|
|
||||||
cherrypy.engine.console_control_handler.subscribe()
|
|
||||||
|
|
||||||
# Cherrypy stupidly calls os._exit(70) when it can't bind the
|
# Start the server
|
||||||
# port. At least try to print a reasonable error and continue
|
|
||||||
# in this case, rather than just dying silently (as we would
|
|
||||||
# otherwise do in embedded mode)
|
|
||||||
real_exit = os._exit
|
|
||||||
def fake_exit(code): # pragma: no cover
|
|
||||||
if code == os.EX_SOFTWARE:
|
|
||||||
fprintf(sys.stderr, "error: CherryPy called os._exit!\n")
|
|
||||||
else:
|
|
||||||
real_exit(code)
|
|
||||||
os._exit = fake_exit
|
|
||||||
cherrypy.engine.start()
|
cherrypy.engine.start()
|
||||||
os._exit = real_exit
|
|
||||||
|
|
||||||
# Signal that the engine has started successfully
|
# Signal that the engine has started successfully
|
||||||
if event is not None:
|
if event is not None:
|
||||||
|
@ -201,14 +211,15 @@ def cherrypy_start(blocking = False, event = False, embedded = False):
|
||||||
try:
|
try:
|
||||||
cherrypy.engine.wait(cherrypy.engine.states.EXITING,
|
cherrypy.engine.wait(cherrypy.engine.states.EXITING,
|
||||||
interval=0.1, channel='main')
|
interval=0.1, channel='main')
|
||||||
except (KeyboardInterrupt, IOError): # pragma: no cover
|
except (KeyboardInterrupt, IOError):
|
||||||
cherrypy.engine.log('Keyboard Interrupt: shutting down bus')
|
cherrypy.engine.log('Keyboard Interrupt: shutting down')
|
||||||
cherrypy.engine.exit()
|
cherrypy.engine.exit()
|
||||||
except SystemExit: # pragma: no cover
|
except SystemExit:
|
||||||
cherrypy.engine.log('SystemExit raised: shutting down bus')
|
cherrypy.engine.log('SystemExit raised: shutting down')
|
||||||
cherrypy.engine.exit()
|
cherrypy.engine.exit()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
# Stop CherryPy server
|
# Stop CherryPy server
|
||||||
def cherrypy_stop():
|
def cherrypy_stop():
|
||||||
cherrypy.engine.exit()
|
cherrypy.engine.exit()
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
"""NilmDB utilities"""
|
"""NilmDB utilities"""
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from nilmdb.utils.timer import Timer
|
from nilmdb.utils.timer import Timer
|
||||||
from nilmdb.utils.serializer import serializer_proxy
|
from nilmdb.utils.serializer import serializer_proxy
|
||||||
from nilmdb.utils.lrucache import lru_cache
|
from nilmdb.utils.lrucache import lru_cache
|
||||||
|
@ -14,4 +14,3 @@ import nilmdb.utils.iterator
|
||||||
import nilmdb.utils.interval
|
import nilmdb.utils.interval
|
||||||
import nilmdb.utils.lock
|
import nilmdb.utils.lock
|
||||||
import nilmdb.utils.sort
|
import nilmdb.utils.sort
|
||||||
import nilmdb.utils.unicode
|
|
||||||
|
|
|
@ -2,12 +2,12 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
def replace_file(filename, content):
|
def replace_file(filename, content):
|
||||||
"""Attempt to atomically and durably replace the filename with the
|
"""Attempt to atomically and durably replace the filename with the
|
||||||
given contents. This is intended to be 'pretty good on most
|
given contents"""
|
||||||
OSes', but not necessarily bulletproof."""
|
|
||||||
|
|
||||||
newfilename = filename + ".new"
|
newfilename = filename + b".new"
|
||||||
|
|
||||||
# Write to new file, flush it
|
# Write to new file, flush it
|
||||||
with open(newfilename, "wb") as f:
|
with open(newfilename, "wb") as f:
|
||||||
|
@ -16,11 +16,4 @@ def replace_file(filename, content):
|
||||||
os.fsync(f.fileno())
|
os.fsync(f.fileno())
|
||||||
|
|
||||||
# Move new file over old one
|
# Move new file over old one
|
||||||
try:
|
os.replace(newfilename, filename)
|
||||||
os.rename(newfilename, filename)
|
|
||||||
except OSError: # pragma: no cover
|
|
||||||
# Some OSes might not support renaming over an existing file.
|
|
||||||
# This is definitely NOT atomic!
|
|
||||||
os.remove(filename)
|
|
||||||
os.rename(newfilename, filename)
|
|
||||||
|
|
||||||
|
|
|
@ -1,710 +0,0 @@
|
||||||
#!/usr/bin/python
|
|
||||||
#
|
|
||||||
# Copyright 2009 Google Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# Disable the invalid name warning as we are inheriting from a standard library
|
|
||||||
# object.
|
|
||||||
# pylint: disable-msg=C6409,W0212
|
|
||||||
|
|
||||||
"""A version of the datetime module which *cares* about timezones.
|
|
||||||
|
|
||||||
This module will never return a naive datetime object. This requires the module
|
|
||||||
know your local timezone, which it tries really hard to figure out.
|
|
||||||
|
|
||||||
You can override the detection by using the datetime.tzaware.defaulttz_set
|
|
||||||
method. It the module is unable to figure out the timezone itself this method
|
|
||||||
*must* be called before the normal module is imported. If done before importing
|
|
||||||
it can also speed up the time taken to import as the defaulttz will no longer
|
|
||||||
try and do the detection.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__author__ = "tansell@google.com (Tim Ansell)"
|
|
||||||
|
|
||||||
import calendar
|
|
||||||
import datetime
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
import warnings
|
|
||||||
import dateutil.parser
|
|
||||||
import dateutil.relativedelta
|
|
||||||
import dateutil.tz
|
|
||||||
import pytz
|
|
||||||
import pytz_abbr
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
# pylint: disable-msg=C6204
|
|
||||||
import functools
|
|
||||||
except ImportError, e:
|
|
||||||
|
|
||||||
class functools(object):
|
|
||||||
"""Fake replacement for a full functools."""
|
|
||||||
|
|
||||||
# pylint: disable-msg=W0613
|
|
||||||
@staticmethod
|
|
||||||
def wraps(f, *args, **kw):
|
|
||||||
return f
|
|
||||||
|
|
||||||
|
|
||||||
# Need to patch pytz.utc to have a _utcoffset so you can normalize/localize
|
|
||||||
# using it.
|
|
||||||
pytz.utc._utcoffset = datetime.timedelta()
|
|
||||||
|
|
||||||
|
|
||||||
timedelta = datetime.timedelta
|
|
||||||
|
|
||||||
|
|
||||||
def _tzinfome(tzinfo):
|
|
||||||
"""Gets a tzinfo object from a string.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
tzinfo: A string (or string like) object, or a datetime.tzinfo object.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An datetime.tzinfo object.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
UnknownTimeZoneError: If the timezone given can't be decoded.
|
|
||||||
"""
|
|
||||||
if not isinstance(tzinfo, datetime.tzinfo):
|
|
||||||
try:
|
|
||||||
tzinfo = pytz.timezone(tzinfo)
|
|
||||||
except AttributeError:
|
|
||||||
raise pytz.UnknownTimeZoneError("Unknown timezone! %s" % tzinfo)
|
|
||||||
return tzinfo
|
|
||||||
|
|
||||||
|
|
||||||
# Our "local" timezone
|
|
||||||
_localtz = None
|
|
||||||
|
|
||||||
|
|
||||||
def localtz():
|
|
||||||
"""Get the local timezone.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The localtime timezone as a tzinfo object.
|
|
||||||
"""
|
|
||||||
# pylint: disable-msg=W0603
|
|
||||||
global _localtz
|
|
||||||
if _localtz is None:
|
|
||||||
_localtz = detect_timezone()
|
|
||||||
return _localtz
|
|
||||||
|
|
||||||
|
|
||||||
def localtz_set(timezone):
|
|
||||||
"""Set the local timezone."""
|
|
||||||
# pylint: disable-msg=W0603
|
|
||||||
global _localtz
|
|
||||||
_localtz = _tzinfome(timezone)
|
|
||||||
|
|
||||||
|
|
||||||
def detect_timezone():
|
|
||||||
"""Try and detect the timezone that Python is currently running in.
|
|
||||||
|
|
||||||
We have a bunch of different methods for trying to figure this out (listed in
|
|
||||||
order they are attempted).
|
|
||||||
* Try TZ environment variable.
|
|
||||||
* Try and find /etc/timezone file (with timezone name).
|
|
||||||
* Try and find /etc/localtime file (with timezone data).
|
|
||||||
* Try and match a TZ to the current dst/offset/shortname.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The detected local timezone as a tzinfo object
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
pytz.UnknownTimeZoneError: If it was unable to detect a timezone.
|
|
||||||
"""
|
|
||||||
# First we try the TZ variable
|
|
||||||
tz = _detect_timezone_environ()
|
|
||||||
if tz is not None:
|
|
||||||
return tz
|
|
||||||
|
|
||||||
# Second we try /etc/timezone and use the value in that
|
|
||||||
tz = _detect_timezone_etc_timezone()
|
|
||||||
if tz is not None:
|
|
||||||
return tz
|
|
||||||
|
|
||||||
# Next we try and see if something matches the tzinfo in /etc/localtime
|
|
||||||
tz = _detect_timezone_etc_localtime()
|
|
||||||
if tz is not None:
|
|
||||||
return tz
|
|
||||||
|
|
||||||
# Next we try and use a similiar method to what PHP does.
|
|
||||||
# We first try to search on time.tzname, time.timezone, time.daylight to
|
|
||||||
# match a pytz zone.
|
|
||||||
warnings.warn("Had to fall back to worst detection method (the 'PHP' "
|
|
||||||
"method).")
|
|
||||||
|
|
||||||
tz = _detect_timezone_php()
|
|
||||||
if tz is not None:
|
|
||||||
return tz
|
|
||||||
|
|
||||||
raise pytz.UnknownTimeZoneError("Unable to detect your timezone!")
|
|
||||||
|
|
||||||
|
|
||||||
def _detect_timezone_environ():
|
|
||||||
if "TZ" in os.environ:
|
|
||||||
try:
|
|
||||||
return pytz.timezone(os.environ["TZ"])
|
|
||||||
except (IOError, pytz.UnknownTimeZoneError):
|
|
||||||
warnings.warn("You provided a TZ environment value (%r) we did not "
|
|
||||||
"understand!" % os.environ["TZ"])
|
|
||||||
|
|
||||||
|
|
||||||
def _detect_timezone_etc_timezone():
|
|
||||||
if os.path.exists("/etc/timezone"):
|
|
||||||
try:
|
|
||||||
tz = file("/etc/timezone").read().strip()
|
|
||||||
try:
|
|
||||||
return pytz.timezone(tz)
|
|
||||||
except (IOError, pytz.UnknownTimeZoneError), ei:
|
|
||||||
warnings.warn("Your /etc/timezone file references a timezone (%r) that"
|
|
||||||
" is not valid (%r)." % (tz, ei))
|
|
||||||
|
|
||||||
# Problem reading the /etc/timezone file
|
|
||||||
except IOError, eo:
|
|
||||||
warnings.warn("Could not access your /etc/timezone file: %s" % eo)
|
|
||||||
|
|
||||||
|
|
||||||
def _detect_timezone_etc_localtime():
|
|
||||||
matches = []
|
|
||||||
if os.path.exists("/etc/localtime"):
|
|
||||||
localtime = pytz.tzfile.build_tzinfo("/etc/localtime",
|
|
||||||
file("/etc/localtime"))
|
|
||||||
|
|
||||||
# See if we can find a "Human Name" for this..
|
|
||||||
for tzname in pytz.all_timezones:
|
|
||||||
tz = _tzinfome(tzname)
|
|
||||||
|
|
||||||
if dir(tz) != dir(localtime):
|
|
||||||
continue
|
|
||||||
|
|
||||||
for attrib in dir(tz):
|
|
||||||
# Ignore functions and specials
|
|
||||||
if callable(getattr(tz, attrib)) or attrib.startswith("__"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# This will always be different
|
|
||||||
if attrib == "zone" or attrib == "_tzinfos":
|
|
||||||
continue
|
|
||||||
|
|
||||||
if getattr(tz, attrib) != getattr(localtime, attrib):
|
|
||||||
break
|
|
||||||
|
|
||||||
# We get here iff break didn't happen, i.e. no meaningful attributes
|
|
||||||
# differ between tz and localtime
|
|
||||||
else:
|
|
||||||
matches.append(tzname)
|
|
||||||
|
|
||||||
if len(matches) == 1:
|
|
||||||
return _tzinfome(matches[0])
|
|
||||||
else:
|
|
||||||
# Warn the person about this!
|
|
||||||
warning = "Could not get a human name for your timezone: "
|
|
||||||
if len(matches) > 1:
|
|
||||||
warning += ("We detected multiple matches for your /etc/localtime. "
|
|
||||||
"(Matches where %s)" % matches)
|
|
||||||
return _tzinfome(matches[0])
|
|
||||||
else:
|
|
||||||
warning += "We detected no matches for your /etc/localtime."
|
|
||||||
warnings.warn(warning)
|
|
||||||
|
|
||||||
# Register /etc/localtime as the timezone loaded.
|
|
||||||
pytz._tzinfo_cache['/etc/localtime'] = localtime
|
|
||||||
return localtime
|
|
||||||
|
|
||||||
|
|
||||||
def _detect_timezone_php():
|
|
||||||
tomatch = (time.tzname[0], time.timezone, time.daylight)
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
|
|
||||||
matches = []
|
|
||||||
for tzname in pytz.all_timezones:
|
|
||||||
try:
|
|
||||||
tz = pytz.timezone(tzname)
|
|
||||||
except IOError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
indst = tz.localize(now).timetuple()[-1]
|
|
||||||
|
|
||||||
if tomatch == (tz._tzname, -tz._utcoffset.seconds, indst):
|
|
||||||
matches.append(tzname)
|
|
||||||
|
|
||||||
# pylint: disable-msg=W0704
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if len(matches) > 1:
|
|
||||||
warnings.warn("We detected multiple matches for the timezone, choosing "
|
|
||||||
"the first %s. (Matches where %s)" % (matches[0], matches))
|
|
||||||
return pytz.timezone(matches[0])
|
|
||||||
|
|
||||||
|
|
||||||
class datetime_tz(datetime.datetime):
|
|
||||||
"""An extension of the inbuilt datetime adding more functionality.
|
|
||||||
|
|
||||||
The extra functionality includes:
|
|
||||||
* Partial parsing support (IE 2006/02/30 matches %Y/%M/%D %H:%M)
|
|
||||||
* Full integration with pytz (just give it the string of the timezone!)
|
|
||||||
* Proper support for going to/from Unix timestamps (which are in UTC!).
|
|
||||||
"""
|
|
||||||
__slots__ = ["is_dst"]
|
|
||||||
|
|
||||||
def __new__(cls, *args, **kw):
|
|
||||||
args = list(args)
|
|
||||||
if not args:
|
|
||||||
raise TypeError("Not enough arguments given.")
|
|
||||||
|
|
||||||
# See if we are given a tzinfo object...
|
|
||||||
tzinfo = None
|
|
||||||
if isinstance(args[-1], (datetime.tzinfo, basestring)):
|
|
||||||
tzinfo = _tzinfome(args.pop(-1))
|
|
||||||
elif kw.get("tzinfo", None) is not None:
|
|
||||||
tzinfo = _tzinfome(kw.pop("tzinfo"))
|
|
||||||
|
|
||||||
# Create a datetime object if we don't have one
|
|
||||||
if isinstance(args[0], datetime.datetime):
|
|
||||||
# Convert the datetime instance to a datetime object.
|
|
||||||
newargs = (list(args[0].timetuple()[0:6]) +
|
|
||||||
[args[0].microsecond, args[0].tzinfo])
|
|
||||||
dt = datetime.datetime(*newargs)
|
|
||||||
|
|
||||||
if tzinfo is None and dt.tzinfo is None:
|
|
||||||
raise TypeError("Must specify a timezone!")
|
|
||||||
|
|
||||||
if tzinfo is not None and dt.tzinfo is not None:
|
|
||||||
raise TypeError("Can not give a timezone with timezone aware"
|
|
||||||
" datetime object! (Use localize.)")
|
|
||||||
else:
|
|
||||||
dt = datetime.datetime(*args, **kw)
|
|
||||||
|
|
||||||
if dt.tzinfo is not None:
|
|
||||||
# Re-normalize the dt object
|
|
||||||
dt = dt.tzinfo.normalize(dt)
|
|
||||||
|
|
||||||
else:
|
|
||||||
if tzinfo is None:
|
|
||||||
tzinfo = localtz()
|
|
||||||
|
|
||||||
try:
|
|
||||||
dt = tzinfo.localize(dt, is_dst=None)
|
|
||||||
except pytz.AmbiguousTimeError:
|
|
||||||
is_dst = None
|
|
||||||
if "is_dst" in kw:
|
|
||||||
is_dst = kw.pop("is_dst")
|
|
||||||
|
|
||||||
try:
|
|
||||||
dt = tzinfo.localize(dt, is_dst)
|
|
||||||
except IndexError:
|
|
||||||
raise pytz.AmbiguousTimeError("No such time exists!")
|
|
||||||
|
|
||||||
newargs = list(dt.timetuple()[0:6])+[dt.microsecond, dt.tzinfo]
|
|
||||||
obj = datetime.datetime.__new__(cls, *newargs)
|
|
||||||
obj.is_dst = obj.dst() != datetime.timedelta(0)
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def asdatetime(self, naive=True):
|
|
||||||
"""Return this datetime_tz as a datetime object.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
naive: Return *without* any tz info.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
This datetime_tz as a datetime object.
|
|
||||||
"""
|
|
||||||
args = list(self.timetuple()[0:6])+[self.microsecond]
|
|
||||||
if not naive:
|
|
||||||
args.append(self.tzinfo)
|
|
||||||
return datetime.datetime(*args)
|
|
||||||
|
|
||||||
def asdate(self):
|
|
||||||
"""Return this datetime_tz as a date object.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
This datetime_tz as a date object.
|
|
||||||
"""
|
|
||||||
return datetime.date(self.year, self.month, self.day)
|
|
||||||
|
|
||||||
def totimestamp(self):
|
|
||||||
"""Convert this datetime object back to a unix timestamp.
|
|
||||||
|
|
||||||
The Unix epoch is the time 00:00:00 UTC on January 1, 1970.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Unix timestamp.
|
|
||||||
"""
|
|
||||||
return calendar.timegm(self.utctimetuple())+1e-6*self.microsecond
|
|
||||||
|
|
||||||
def astimezone(self, tzinfo):
|
|
||||||
"""Returns a version of this timestamp converted to the given timezone.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
tzinfo: Either a datetime.tzinfo object or a string (which will be looked
|
|
||||||
up in pytz.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A datetime_tz object in the given timezone.
|
|
||||||
"""
|
|
||||||
# Assert we are not a naive datetime object
|
|
||||||
assert self.tzinfo is not None
|
|
||||||
|
|
||||||
tzinfo = _tzinfome(tzinfo)
|
|
||||||
|
|
||||||
d = self.asdatetime(naive=False).astimezone(tzinfo)
|
|
||||||
return datetime_tz(d)
|
|
||||||
|
|
||||||
# pylint: disable-msg=C6113
|
|
||||||
def replace(self, **kw):
|
|
||||||
"""Return datetime with new specified fields given as arguments.
|
|
||||||
|
|
||||||
For example, dt.replace(days=4) would return a new datetime_tz object with
|
|
||||||
exactly the same as dt but with the days attribute equal to 4.
|
|
||||||
|
|
||||||
Any attribute can be replaced, but tzinfo can not be set to None.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
Any datetime_tz attribute.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A datetime_tz object with the attributes replaced.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
TypeError: If the given replacement is invalid.
|
|
||||||
"""
|
|
||||||
if "tzinfo" in kw:
|
|
||||||
if kw["tzinfo"] is None:
|
|
||||||
raise TypeError("Can not remove the timezone use asdatetime()")
|
|
||||||
|
|
||||||
is_dst = None
|
|
||||||
if "is_dst" in kw:
|
|
||||||
is_dst = kw["is_dst"]
|
|
||||||
del kw["is_dst"]
|
|
||||||
else:
|
|
||||||
# Use our own DST setting..
|
|
||||||
is_dst = self.is_dst
|
|
||||||
|
|
||||||
replaced = self.asdatetime().replace(**kw)
|
|
||||||
|
|
||||||
return datetime_tz(replaced, tzinfo=self.tzinfo.zone, is_dst=is_dst)
|
|
||||||
|
|
||||||
# pylint: disable-msg=C6310
|
|
||||||
@classmethod
|
|
||||||
def smartparse(cls, toparse, tzinfo=None):
|
|
||||||
"""Method which uses dateutil.parse and extras to try and parse the string.
|
|
||||||
|
|
||||||
Valid dates are found at:
|
|
||||||
http://labix.org/python-dateutil#head-1443e0f14ad5dff07efd465e080d1110920673d8-2
|
|
||||||
|
|
||||||
Other valid formats include:
|
|
||||||
"now" or "today"
|
|
||||||
"yesterday"
|
|
||||||
"tommorrow"
|
|
||||||
"5 minutes ago"
|
|
||||||
"10 hours ago"
|
|
||||||
"10h5m ago"
|
|
||||||
"start of yesterday"
|
|
||||||
"end of tommorrow"
|
|
||||||
"end of 3rd of March"
|
|
||||||
|
|
||||||
Args:
|
|
||||||
toparse: The string to parse.
|
|
||||||
tzinfo: Timezone for the resultant datetime_tz object should be in.
|
|
||||||
(Defaults to your local timezone.)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
New datetime_tz object.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If unable to make sense of the input.
|
|
||||||
"""
|
|
||||||
# Default for empty fields are:
|
|
||||||
# year/month/day == now
|
|
||||||
# hour/minute/second/microsecond == 0
|
|
||||||
toparse = toparse.strip()
|
|
||||||
|
|
||||||
if tzinfo is None:
|
|
||||||
dt = cls.now()
|
|
||||||
else:
|
|
||||||
dt = cls.now(tzinfo)
|
|
||||||
|
|
||||||
default = dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
||||||
|
|
||||||
# Remove "start of " and "end of " prefix in the string
|
|
||||||
if toparse.lower().startswith("end of "):
|
|
||||||
toparse = toparse[7:].strip()
|
|
||||||
|
|
||||||
dt += datetime.timedelta(days=1)
|
|
||||||
dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
||||||
dt -= datetime.timedelta(microseconds=1)
|
|
||||||
|
|
||||||
default = dt
|
|
||||||
|
|
||||||
elif toparse.lower().startswith("start of "):
|
|
||||||
toparse = toparse[9:].strip()
|
|
||||||
|
|
||||||
dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
||||||
default = dt
|
|
||||||
|
|
||||||
# Handle strings with "now", "today", "yesterday", "tomorrow" and "ago".
|
|
||||||
# Need to use lowercase
|
|
||||||
toparselower = toparse.lower()
|
|
||||||
|
|
||||||
if toparselower in ["now", "today"]:
|
|
||||||
pass
|
|
||||||
|
|
||||||
elif toparselower == "yesterday":
|
|
||||||
dt -= datetime.timedelta(days=1)
|
|
||||||
|
|
||||||
elif toparselower == "tommorrow":
|
|
||||||
dt += datetime.timedelta(days=1)
|
|
||||||
|
|
||||||
elif "ago" in toparselower:
|
|
||||||
# Remove the "ago" bit
|
|
||||||
toparselower = toparselower[:-3]
|
|
||||||
# Replace all "a day and an hour" with "1 day 1 hour"
|
|
||||||
toparselower = toparselower.replace("a ", "1 ")
|
|
||||||
toparselower = toparselower.replace("an ", "1 ")
|
|
||||||
toparselower = toparselower.replace(" and ", " ")
|
|
||||||
|
|
||||||
# Match the following
|
|
||||||
# 1 hour ago
|
|
||||||
# 1h ago
|
|
||||||
# 1 h ago
|
|
||||||
# 1 hour ago
|
|
||||||
# 2 hours ago
|
|
||||||
# Same with minutes, seconds, etc.
|
|
||||||
|
|
||||||
tocheck = ("seconds", "minutes", "hours", "days", "weeks", "months",
|
|
||||||
"years")
|
|
||||||
result = {}
|
|
||||||
for match in re.finditer("([0-9]+)([^0-9]*)", toparselower):
|
|
||||||
amount = int(match.group(1))
|
|
||||||
unit = match.group(2).strip()
|
|
||||||
|
|
||||||
for bit in tocheck:
|
|
||||||
regex = "^([%s]|((%s)s?))$" % (
|
|
||||||
bit[0], bit[:-1])
|
|
||||||
|
|
||||||
bitmatch = re.search(regex, unit)
|
|
||||||
if bitmatch:
|
|
||||||
result[bit] = amount
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
raise ValueError("Was not able to parse date unit %r!" % unit)
|
|
||||||
|
|
||||||
delta = dateutil.relativedelta.relativedelta(**result)
|
|
||||||
dt -= delta
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Handle strings with normal datetime format, use original case.
|
|
||||||
dt = dateutil.parser.parse(toparse, default=default.asdatetime(),
|
|
||||||
tzinfos=pytz_abbr.tzinfos)
|
|
||||||
if dt is None:
|
|
||||||
raise ValueError("Was not able to parse date!")
|
|
||||||
|
|
||||||
if dt.tzinfo is pytz_abbr.unknown:
|
|
||||||
dt = dt.replace(tzinfo=None)
|
|
||||||
|
|
||||||
if dt.tzinfo is None:
|
|
||||||
if tzinfo is None:
|
|
||||||
tzinfo = localtz()
|
|
||||||
dt = cls(dt, tzinfo)
|
|
||||||
else:
|
|
||||||
if isinstance(dt.tzinfo, pytz_abbr.tzabbr):
|
|
||||||
abbr = dt.tzinfo
|
|
||||||
dt = dt.replace(tzinfo=None)
|
|
||||||
dt = cls(dt, abbr.zone, is_dst=abbr.dst)
|
|
||||||
|
|
||||||
dt = cls(dt)
|
|
||||||
|
|
||||||
return dt
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def utcfromtimestamp(cls, timestamp):
|
|
||||||
"""Returns a datetime object of a given timestamp (in UTC)."""
|
|
||||||
obj = datetime.datetime.utcfromtimestamp(timestamp)
|
|
||||||
obj = pytz.utc.localize(obj)
|
|
||||||
return cls(obj)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def fromtimestamp(cls, timestamp):
|
|
||||||
"""Returns a datetime object of a given timestamp (in local tz)."""
|
|
||||||
d = cls.utcfromtimestamp(timestamp)
|
|
||||||
return d.astimezone(localtz())
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def utcnow(cls):
|
|
||||||
"""Return a new datetime representing UTC day and time."""
|
|
||||||
obj = datetime.datetime.utcnow()
|
|
||||||
obj = cls(obj, tzinfo=pytz.utc)
|
|
||||||
return obj
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def now(cls, tzinfo=None):
|
|
||||||
"""[tz] -> new datetime with tz's local day and time."""
|
|
||||||
obj = cls.utcnow()
|
|
||||||
if tzinfo is None:
|
|
||||||
tzinfo = localtz()
|
|
||||||
return obj.astimezone(tzinfo)
|
|
||||||
|
|
||||||
today = now
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fromordinal(ordinal):
|
|
||||||
raise SyntaxError("Not enough information to create a datetime_tz object "
|
|
||||||
"from an ordinal. Please use datetime.date.fromordinal")
|
|
||||||
|
|
||||||
|
|
||||||
class iterate(object):
|
|
||||||
"""Helpful iterators for working with datetime_tz objects."""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def between(start, delta, end=None):
|
|
||||||
"""Return an iterator between this date till given end point.
|
|
||||||
|
|
||||||
Example usage:
|
|
||||||
>>> d = datetime_tz.smartparse("5 days ago")
|
|
||||||
2008/05/12 11:45
|
|
||||||
>>> for i in d.between(timedelta(days=1), datetime_tz.now()):
|
|
||||||
>>> print i
|
|
||||||
2008/05/12 11:45
|
|
||||||
2008/05/13 11:45
|
|
||||||
2008/05/14 11:45
|
|
||||||
2008/05/15 11:45
|
|
||||||
2008/05/16 11:45
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start: The date to start at.
|
|
||||||
delta: The interval to iterate with.
|
|
||||||
end: (Optional) Date to end at. If not given the iterator will never
|
|
||||||
terminate.
|
|
||||||
|
|
||||||
Yields:
|
|
||||||
datetime_tz objects.
|
|
||||||
"""
|
|
||||||
toyield = start
|
|
||||||
while end is None or toyield < end:
|
|
||||||
yield toyield
|
|
||||||
toyield += delta
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def weeks(start, end=None):
|
|
||||||
"""Iterate over the weeks between the given datetime_tzs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start: datetime_tz to start from.
|
|
||||||
end: (Optional) Date to end at, if not given the iterator will never
|
|
||||||
terminate.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An iterator which generates datetime_tz objects a week apart.
|
|
||||||
"""
|
|
||||||
return iterate.between(start, datetime.timedelta(days=7), end)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def days(start, end=None):
|
|
||||||
"""Iterate over the days between the given datetime_tzs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start: datetime_tz to start from.
|
|
||||||
end: (Optional) Date to end at, if not given the iterator will never
|
|
||||||
terminate.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An iterator which generates datetime_tz objects a day apart.
|
|
||||||
"""
|
|
||||||
return iterate.between(start, datetime.timedelta(days=1), end)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def hours(start, end=None):
|
|
||||||
"""Iterate over the hours between the given datetime_tzs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start: datetime_tz to start from.
|
|
||||||
end: (Optional) Date to end at, if not given the iterator will never
|
|
||||||
terminate.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An iterator which generates datetime_tz objects a hour apart.
|
|
||||||
"""
|
|
||||||
return iterate.between(start, datetime.timedelta(hours=1), end)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def minutes(start, end=None):
|
|
||||||
"""Iterate over the minutes between the given datetime_tzs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start: datetime_tz to start from.
|
|
||||||
end: (Optional) Date to end at, if not given the iterator will never
|
|
||||||
terminate.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An iterator which generates datetime_tz objects a minute apart.
|
|
||||||
"""
|
|
||||||
return iterate.between(start, datetime.timedelta(minutes=1), end)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def seconds(start, end=None):
|
|
||||||
"""Iterate over the seconds between the given datetime_tzs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start: datetime_tz to start from.
|
|
||||||
end: (Optional) Date to end at, if not given the iterator will never
|
|
||||||
terminate.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An iterator which generates datetime_tz objects a second apart.
|
|
||||||
"""
|
|
||||||
return iterate.between(start, datetime.timedelta(minutes=1), end)
|
|
||||||
|
|
||||||
|
|
||||||
def _wrap_method(name):
|
|
||||||
"""Wrap a method.
|
|
||||||
|
|
||||||
Patch a method which might return a datetime.datetime to return a
|
|
||||||
datetime_tz.datetime_tz instead.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name: The name of the method to patch
|
|
||||||
"""
|
|
||||||
method = getattr(datetime.datetime, name)
|
|
||||||
|
|
||||||
# Have to give the second argument as method has no __module__ option.
|
|
||||||
@functools.wraps(method, ("__name__", "__doc__"), ())
|
|
||||||
def wrapper(*args, **kw):
|
|
||||||
r = method(*args, **kw)
|
|
||||||
|
|
||||||
if isinstance(r, datetime.datetime) and not isinstance(r, datetime_tz):
|
|
||||||
r = datetime_tz(r)
|
|
||||||
return r
|
|
||||||
|
|
||||||
setattr(datetime_tz, name, wrapper)
|
|
||||||
|
|
||||||
for methodname in ["__add__", "__radd__", "__rsub__", "__sub__", "combine"]:
|
|
||||||
|
|
||||||
# Make sure we have not already got an override for this method
|
|
||||||
assert methodname not in datetime_tz.__dict__
|
|
||||||
|
|
||||||
_wrap_method(methodname)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['datetime_tz', 'detect_timezone', 'iterate', 'localtz',
|
|
||||||
'localtz_set', 'timedelta', '_detect_timezone_environ',
|
|
||||||
'_detect_timezone_etc_localtime', '_detect_timezone_etc_timezone',
|
|
||||||
'_detect_timezone_php']
|
|
|
@ -1,230 +0,0 @@
|
||||||
#!/usr/bin/python2.4
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
# Copyright 2010 Google Inc. All Rights Reserved.
|
|
||||||
#
|
|
||||||
|
|
||||||
"""
|
|
||||||
Common time zone acronyms/abbreviations for use with the datetime_tz module.
|
|
||||||
|
|
||||||
*WARNING*: There are lots of caveats when using this module which are listed
|
|
||||||
below.
|
|
||||||
|
|
||||||
CAVEAT 1: The acronyms/abbreviations are not globally unique, they are not even
|
|
||||||
unique within a region. For example, EST can mean any of,
|
|
||||||
Eastern Standard Time in Australia (which is 10 hour ahead of UTC)
|
|
||||||
Eastern Standard Time in North America (which is 5 hours behind UTC)
|
|
||||||
|
|
||||||
Where there are two abbreviations the more popular one will appear in the all
|
|
||||||
dictionary, while the less common one will only appear in that countries region
|
|
||||||
dictionary. IE If using all, EST will be mapped to Eastern Standard Time in
|
|
||||||
North America.
|
|
||||||
|
|
||||||
CAVEAT 2: Many of the acronyms don't map to a neat Oslon timezones. For example,
|
|
||||||
Eastern European Summer Time (EEDT) is used by many different countries in
|
|
||||||
Europe *at different times*! If the acronym does not map neatly to one zone it
|
|
||||||
is mapped to the Etc/GMT+-XX Oslon zone. This means that any date manipulations
|
|
||||||
can end up with idiot things like summer time in the middle of winter.
|
|
||||||
|
|
||||||
CAVEAT 3: The Summer/Standard time difference is really important! For an hour
|
|
||||||
each year it is needed to determine which time you are actually talking about.
|
|
||||||
2002-10-27 01:20:00 EST != 2002-10-27 01:20:00 EDT
|
|
||||||
"""
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import pytz
|
|
||||||
import pytz.tzfile
|
|
||||||
|
|
||||||
|
|
||||||
class tzabbr(datetime.tzinfo):
|
|
||||||
"""A timezone abbreviation.
|
|
||||||
|
|
||||||
*WARNING*: This is not a tzinfo implementation! Trying to use this as tzinfo
|
|
||||||
object will result in failure. We inherit from datetime.tzinfo so we can get
|
|
||||||
through the dateutil checks.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# A "marker" tzinfo object which is used to signify an unknown timezone.
|
|
||||||
unknown = datetime.tzinfo(0)
|
|
||||||
|
|
||||||
|
|
||||||
regions = {'all': {}, 'military': {}}
|
|
||||||
# Create a special alias for the all and military regions
|
|
||||||
all = regions['all']
|
|
||||||
military = regions['military']
|
|
||||||
|
|
||||||
|
|
||||||
def tzabbr_register(abbr, name, region, zone, dst):
|
|
||||||
"""Register a new timezone abbreviation in the global registry.
|
|
||||||
|
|
||||||
If another abbreviation with the same name has already been registered it new
|
|
||||||
abbreviation will only be registered in region specific dictionary.
|
|
||||||
"""
|
|
||||||
newabbr = tzabbr()
|
|
||||||
newabbr.abbr = abbr
|
|
||||||
newabbr.name = name
|
|
||||||
newabbr.region = region
|
|
||||||
newabbr.zone = zone
|
|
||||||
newabbr.dst = dst
|
|
||||||
|
|
||||||
if abbr not in all:
|
|
||||||
all[abbr] = newabbr
|
|
||||||
|
|
||||||
if not region in regions:
|
|
||||||
regions[region] = {}
|
|
||||||
|
|
||||||
assert abbr not in regions[region]
|
|
||||||
regions[region][abbr] = newabbr
|
|
||||||
|
|
||||||
|
|
||||||
def tzinfos_create(use_region):
|
|
||||||
abbrs = regions[use_region]
|
|
||||||
|
|
||||||
def tzinfos(abbr, offset):
|
|
||||||
if abbr:
|
|
||||||
if abbr in abbrs:
|
|
||||||
result = abbrs[abbr]
|
|
||||||
if offset:
|
|
||||||
# FIXME: Check the offset matches the abbreviation we just selected.
|
|
||||||
pass
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
raise ValueError, "Unknown timezone found %s" % abbr
|
|
||||||
if offset == 0:
|
|
||||||
return pytz.utc
|
|
||||||
if offset:
|
|
||||||
return pytz.FixedOffset(offset/60)
|
|
||||||
return unknown
|
|
||||||
|
|
||||||
return tzinfos
|
|
||||||
|
|
||||||
|
|
||||||
# Create a special alias for the all tzinfos
|
|
||||||
tzinfos = tzinfos_create('all')
|
|
||||||
|
|
||||||
|
|
||||||
# Create the abbreviations.
|
|
||||||
# *WARNING*: Order matters!
|
|
||||||
tzabbr_register("A", u"Alpha Time Zone", u"Military", "Etc/GMT-1", False)
|
|
||||||
tzabbr_register("ACDT", u"Australian Central Daylight Time", u"Australia",
|
|
||||||
"Australia/Adelaide", True)
|
|
||||||
tzabbr_register("ACST", u"Australian Central Standard Time", u"Australia",
|
|
||||||
"Australia/Adelaide", False)
|
|
||||||
tzabbr_register("ADT", u"Atlantic Daylight Time", u"North America",
|
|
||||||
"America/Halifax", True)
|
|
||||||
tzabbr_register("AEDT", u"Australian Eastern Daylight Time", u"Australia",
|
|
||||||
"Australia/Sydney", True)
|
|
||||||
tzabbr_register("AEST", u"Australian Eastern Standard Time", u"Australia",
|
|
||||||
"Australia/Sydney", False)
|
|
||||||
tzabbr_register("AKDT", u"Alaska Daylight Time", u"North America",
|
|
||||||
"US/Alaska", True)
|
|
||||||
tzabbr_register("AKST", u"Alaska Standard Time", u"North America",
|
|
||||||
"US/Alaska", False)
|
|
||||||
tzabbr_register("AST", u"Atlantic Standard Time", u"North America",
|
|
||||||
"America/Halifax", False)
|
|
||||||
tzabbr_register("AWDT", u"Australian Western Daylight Time", u"Australia",
|
|
||||||
"Australia/West", True)
|
|
||||||
tzabbr_register("AWST", u"Australian Western Standard Time", u"Australia",
|
|
||||||
"Australia/West", False)
|
|
||||||
tzabbr_register("B", u"Bravo Time Zone", u"Military", "Etc/GMT-2", False)
|
|
||||||
tzabbr_register("BST", u"British Summer Time", u"Europe", "Europe/London", True)
|
|
||||||
tzabbr_register("C", u"Charlie Time Zone", u"Military", "Etc/GMT-2", False)
|
|
||||||
tzabbr_register("CDT", u"Central Daylight Time", u"North America",
|
|
||||||
"US/Central", True)
|
|
||||||
tzabbr_register("CEDT", u"Central European Daylight Time", u"Europe",
|
|
||||||
"Etc/GMT+2", True)
|
|
||||||
tzabbr_register("CEST", u"Central European Summer Time", u"Europe",
|
|
||||||
"Etc/GMT+2", True)
|
|
||||||
tzabbr_register("CET", u"Central European Time", u"Europe", "Etc/GMT+1", False)
|
|
||||||
tzabbr_register("CST", u"Central Standard Time", u"North America",
|
|
||||||
"US/Central", False)
|
|
||||||
tzabbr_register("CXT", u"Christmas Island Time", u"Australia",
|
|
||||||
"Indian/Christmas", False)
|
|
||||||
tzabbr_register("D", u"Delta Time Zone", u"Military", "Etc/GMT-2", False)
|
|
||||||
tzabbr_register("E", u"Echo Time Zone", u"Military", "Etc/GMT-2", False)
|
|
||||||
tzabbr_register("EDT", u"Eastern Daylight Time", u"North America",
|
|
||||||
"US/Eastern", True)
|
|
||||||
tzabbr_register("EEDT", u"Eastern European Daylight Time", u"Europe",
|
|
||||||
"Etc/GMT+3", True)
|
|
||||||
tzabbr_register("EEST", u"Eastern European Summer Time", u"Europe",
|
|
||||||
"Etc/GMT+3", True)
|
|
||||||
tzabbr_register("EET", u"Eastern European Time", u"Europe", "Etc/GMT+2", False)
|
|
||||||
tzabbr_register("EST", u"Eastern Standard Time", u"North America",
|
|
||||||
"US/Eastern", False)
|
|
||||||
tzabbr_register("F", u"Foxtrot Time Zone", u"Military", "Etc/GMT-6", False)
|
|
||||||
tzabbr_register("G", u"Golf Time Zone", u"Military", "Etc/GMT-7", False)
|
|
||||||
tzabbr_register("GMT", u"Greenwich Mean Time", u"Europe", pytz.utc, False)
|
|
||||||
tzabbr_register("H", u"Hotel Time Zone", u"Military", "Etc/GMT-8", False)
|
|
||||||
#tzabbr_register("HAA", u"Heure Avancée de l'Atlantique", u"North America", u"UTC - 3 hours")
|
|
||||||
#tzabbr_register("HAC", u"Heure Avancée du Centre", u"North America", u"UTC - 5 hours")
|
|
||||||
tzabbr_register("HADT", u"Hawaii-Aleutian Daylight Time", u"North America",
|
|
||||||
"Pacific/Honolulu", True)
|
|
||||||
#tzabbr_register("HAE", u"Heure Avancée de l'Est", u"North America", u"UTC - 4 hours")
|
|
||||||
#tzabbr_register("HAP", u"Heure Avancée du Pacifique", u"North America", u"UTC - 7 hours")
|
|
||||||
#tzabbr_register("HAR", u"Heure Avancée des Rocheuses", u"North America", u"UTC - 6 hours")
|
|
||||||
tzabbr_register("HAST", u"Hawaii-Aleutian Standard Time", u"North America",
|
|
||||||
"Pacific/Honolulu", False)
|
|
||||||
#tzabbr_register("HAT", u"Heure Avancée de Terre-Neuve", u"North America", u"UTC - 2:30 hours")
|
|
||||||
#tzabbr_register("HAY", u"Heure Avancée du Yukon", u"North America", u"UTC - 8 hours")
|
|
||||||
tzabbr_register("HDT", u"Hawaii Daylight Time", u"North America",
|
|
||||||
"Pacific/Honolulu", True)
|
|
||||||
#tzabbr_register("HNA", u"Heure Normale de l'Atlantique", u"North America", u"UTC - 4 hours")
|
|
||||||
#tzabbr_register("HNC", u"Heure Normale du Centre", u"North America", u"UTC - 6 hours")
|
|
||||||
#tzabbr_register("HNE", u"Heure Normale de l'Est", u"North America", u"UTC - 5 hours")
|
|
||||||
#tzabbr_register("HNP", u"Heure Normale du Pacifique", u"North America", u"UTC - 8 hours")
|
|
||||||
#tzabbr_register("HNR", u"Heure Normale des Rocheuses", u"North America", u"UTC - 7 hours")
|
|
||||||
#tzabbr_register("HNT", u"Heure Normale de Terre-Neuve", u"North America", u"UTC - 3:30 hours")
|
|
||||||
#tzabbr_register("HNY", u"Heure Normale du Yukon", u"North America", u"UTC - 9 hours")
|
|
||||||
tzabbr_register("HST", u"Hawaii Standard Time", u"North America",
|
|
||||||
"Pacific/Honolulu", False)
|
|
||||||
tzabbr_register("I", u"India Time Zone", u"Military", "Etc/GMT-9", False)
|
|
||||||
tzabbr_register("IST", u"Irish Summer Time", u"Europe", "Europe/Dublin", True)
|
|
||||||
tzabbr_register("K", u"Kilo Time Zone", u"Military", "Etc/GMT-10", False)
|
|
||||||
tzabbr_register("L", u"Lima Time Zone", u"Military", "Etc/GMT-11", False)
|
|
||||||
tzabbr_register("M", u"Mike Time Zone", u"Military", "Etc/GMT-12", False)
|
|
||||||
tzabbr_register("MDT", u"Mountain Daylight Time", u"North America",
|
|
||||||
"US/Mountain", True)
|
|
||||||
#tzabbr_register("MESZ", u"Mitteleuroäische Sommerzeit", u"Europe", u"UTC + 2 hours")
|
|
||||||
#tzabbr_register("MEZ", u"Mitteleuropäische Zeit", u"Europe", u"UTC + 1 hour")
|
|
||||||
tzabbr_register("MSD", u"Moscow Daylight Time", u"Europe",
|
|
||||||
"Europe/Moscow", True)
|
|
||||||
tzabbr_register("MSK", u"Moscow Standard Time", u"Europe",
|
|
||||||
"Europe/Moscow", False)
|
|
||||||
tzabbr_register("MST", u"Mountain Standard Time", u"North America",
|
|
||||||
"US/Mountain", False)
|
|
||||||
tzabbr_register("N", u"November Time Zone", u"Military", "Etc/GMT+1", False)
|
|
||||||
tzabbr_register("NDT", u"Newfoundland Daylight Time", u"North America",
|
|
||||||
"America/St_Johns", True)
|
|
||||||
tzabbr_register("NFT", u"Norfolk (Island) Time", u"Australia",
|
|
||||||
"Pacific/Norfolk", False)
|
|
||||||
tzabbr_register("NST", u"Newfoundland Standard Time", u"North America",
|
|
||||||
"America/St_Johns", False)
|
|
||||||
tzabbr_register("O", u"Oscar Time Zone", u"Military", "Etc/GMT+2", False)
|
|
||||||
tzabbr_register("P", u"Papa Time Zone", u"Military", "Etc/GMT+3", False)
|
|
||||||
tzabbr_register("PDT", u"Pacific Daylight Time", u"North America",
|
|
||||||
"US/Pacific", True)
|
|
||||||
tzabbr_register("PST", u"Pacific Standard Time", u"North America",
|
|
||||||
"US/Pacific", False)
|
|
||||||
tzabbr_register("Q", u"Quebec Time Zone", u"Military", "Etc/GMT+4", False)
|
|
||||||
tzabbr_register("R", u"Romeo Time Zone", u"Military", "Etc/GMT+5", False)
|
|
||||||
tzabbr_register("S", u"Sierra Time Zone", u"Military", "Etc/GMT+6", False)
|
|
||||||
tzabbr_register("T", u"Tango Time Zone", u"Military", "Etc/GMT+7", False)
|
|
||||||
tzabbr_register("U", u"Uniform Time Zone", u"Military", "Etc/GMT+8", False)
|
|
||||||
tzabbr_register("UTC", u"Coordinated Universal Time", u"Europe",
|
|
||||||
pytz.utc, False)
|
|
||||||
tzabbr_register("V", u"Victor Time Zone", u"Military", "Etc/GMT+9", False)
|
|
||||||
tzabbr_register("W", u"Whiskey Time Zone", u"Military", "Etc/GMT+10", False)
|
|
||||||
tzabbr_register("WDT", u"Western Daylight Time", u"Australia",
|
|
||||||
"Australia/West", True)
|
|
||||||
tzabbr_register("WEDT", u"Western European Daylight Time", u"Europe",
|
|
||||||
"Etc/GMT+1", True)
|
|
||||||
tzabbr_register("WEST", u"Western European Summer Time", u"Europe",
|
|
||||||
"Etc/GMT+1", True)
|
|
||||||
tzabbr_register("WET", u"Western European Time", u"Europe", pytz.utc, False)
|
|
||||||
tzabbr_register("WST", u"Western Standard Time", u"Australia",
|
|
||||||
"Australia/West", False)
|
|
||||||
tzabbr_register("X", u"X-ray Time Zone", u"Military", "Etc/GMT+11", False)
|
|
||||||
tzabbr_register("Y", u"Yankee Time Zone", u"Military", "Etc/GMT+12", False)
|
|
||||||
tzabbr_register("Z", u"Zulu Time Zone", u"Military", pytz.utc, False)
|
|
|
@ -2,19 +2,21 @@ import os
|
||||||
import errno
|
import errno
|
||||||
from math import log
|
from math import log
|
||||||
|
|
||||||
|
|
||||||
def human_size(num):
|
def human_size(num):
|
||||||
"""Human friendly file size"""
|
"""Human friendly file size"""
|
||||||
unit_list = zip(['bytes', 'kiB', 'MiB', 'GiB', 'TiB'], [0, 0, 1, 2, 2])
|
unit_list = list(zip(['bytes', 'kiB', 'MiB', 'GiB', 'TiB'],
|
||||||
if num > 1:
|
[0, 0, 1, 2, 2]))
|
||||||
|
if num == 0:
|
||||||
|
return '0 bytes'
|
||||||
|
if num == 1:
|
||||||
|
return '1 byte'
|
||||||
exponent = min(int(log(num, 1024)), len(unit_list) - 1)
|
exponent = min(int(log(num, 1024)), len(unit_list) - 1)
|
||||||
quotient = float(num) / 1024**exponent
|
quotient = float(num) / 1024**exponent
|
||||||
unit, num_decimals = unit_list[exponent]
|
unit, num_decimals = unit_list[exponent]
|
||||||
format_string = '{:.%sf} {}' % (num_decimals)
|
format_string = '{:.%sf} {}' % (num_decimals)
|
||||||
return format_string.format(quotient, unit)
|
return format_string.format(quotient, unit)
|
||||||
if num == 0: # pragma: no cover
|
|
||||||
return '0 bytes'
|
|
||||||
if num == 1: # pragma: no cover
|
|
||||||
return '1 byte'
|
|
||||||
|
|
||||||
def du(path):
|
def du(path):
|
||||||
"""Like du -sb, returns total size of path in bytes. Ignore
|
"""Like du -sb, returns total size of path in bytes. Ignore
|
||||||
|
@ -28,7 +30,7 @@ def du(path):
|
||||||
filepath = os.path.join(path, thisfile)
|
filepath = os.path.join(path, thisfile)
|
||||||
size += du(filepath)
|
size += du(filepath)
|
||||||
return size
|
return size
|
||||||
except OSError as e: # pragma: no cover
|
except OSError as e:
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
raise
|
raise
|
||||||
return 0
|
return 0
|
||||||
|
|
|
@ -1,49 +1,20 @@
|
||||||
# Implementation of hole punching via fallocate, if the OS
|
# Implementation of hole punching via fallocate, if the OS
|
||||||
# and filesystem support it.
|
# and filesystem support it.
|
||||||
|
|
||||||
try:
|
import fallocate
|
||||||
import os
|
|
||||||
import ctypes
|
|
||||||
import ctypes.util
|
|
||||||
|
|
||||||
def make_fallocate():
|
|
||||||
libc_name = ctypes.util.find_library('c')
|
|
||||||
libc = ctypes.CDLL(libc_name, use_errno=True)
|
|
||||||
|
|
||||||
_fallocate = libc.fallocate
|
|
||||||
_fallocate.restype = ctypes.c_int
|
|
||||||
_fallocate.argtypes = [ ctypes.c_int, ctypes.c_int,
|
|
||||||
ctypes.c_int64, ctypes.c_int64 ]
|
|
||||||
|
|
||||||
del libc
|
|
||||||
del libc_name
|
|
||||||
|
|
||||||
def fallocate(fd, mode, offset, len_):
|
|
||||||
res = _fallocate(fd, mode, offset, len_)
|
|
||||||
if res != 0: # pragma: no cover
|
|
||||||
errno = ctypes.get_errno()
|
|
||||||
raise IOError(errno, os.strerror(errno))
|
|
||||||
return fallocate
|
|
||||||
|
|
||||||
fallocate = make_fallocate()
|
|
||||||
del make_fallocate
|
|
||||||
except Exception: # pragma: no cover
|
|
||||||
fallocate = None
|
|
||||||
|
|
||||||
FALLOC_FL_KEEP_SIZE = 0x01
|
|
||||||
FALLOC_FL_PUNCH_HOLE = 0x02
|
|
||||||
|
|
||||||
def punch_hole(filename, offset, length, ignore_errors=True):
|
def punch_hole(filename, offset, length, ignore_errors=True):
|
||||||
"""Punch a hole in the file. This isn't well supported, so errors
|
"""Punch a hole in the file. This isn't well supported, so errors
|
||||||
are ignored by default."""
|
are ignored by default."""
|
||||||
try:
|
try:
|
||||||
if fallocate is None: # pragma: no cover
|
|
||||||
raise IOError("fallocate not available")
|
|
||||||
with open(filename, "r+") as f:
|
with open(filename, "r+") as f:
|
||||||
fallocate(f.fileno(),
|
fallocate.fallocate(
|
||||||
FALLOC_FL_KEEP_SIZE | FALLOC_FL_PUNCH_HOLE,
|
f.fileno(),
|
||||||
offset, length)
|
offset,
|
||||||
except IOError: # pragma: no cover
|
length,
|
||||||
|
fallocate.FALLOC_FL_KEEP_SIZE | fallocate.FALLOC_FL_PUNCH_HOLE)
|
||||||
|
except Exception:
|
||||||
if ignore_errors:
|
if ignore_errors:
|
||||||
return
|
return
|
||||||
raise
|
raise
|
||||||
|
|
|
@ -9,10 +9,12 @@ Intervals are half-open, ie. they include data points with timestamps
|
||||||
import nilmdb.utils.time
|
import nilmdb.utils.time
|
||||||
import nilmdb.utils.iterator
|
import nilmdb.utils.iterator
|
||||||
|
|
||||||
|
|
||||||
class IntervalError(Exception):
|
class IntervalError(Exception):
|
||||||
"""Error due to interval overlap, etc"""
|
"""Error due to interval overlap, etc"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
# Interval
|
# Interval
|
||||||
class Interval:
|
class Interval:
|
||||||
"""Represents an interval of time."""
|
"""Represents an interval of time."""
|
||||||
|
@ -22,7 +24,7 @@ class Interval:
|
||||||
'start' and 'end' are arbitrary numbers that represent time
|
'start' and 'end' are arbitrary numbers that represent time
|
||||||
"""
|
"""
|
||||||
if start >= end:
|
if start >= end:
|
||||||
# Explicitly disallow zero-width intervals (since they're half-open)
|
# Explicitly disallow zero-width intervals, since they're half-open
|
||||||
raise IntervalError("start %s must precede end %s" % (start, end))
|
raise IntervalError("start %s must precede end %s" % (start, end))
|
||||||
self.start = start
|
self.start = start
|
||||||
self.end = end
|
self.end = end
|
||||||
|
@ -39,9 +41,24 @@ class Interval:
|
||||||
return ("[ " + nilmdb.utils.time.timestamp_to_human(self.start) +
|
return ("[ " + nilmdb.utils.time.timestamp_to_human(self.start) +
|
||||||
" -> " + nilmdb.utils.time.timestamp_to_human(self.end) + " ]")
|
" -> " + nilmdb.utils.time.timestamp_to_human(self.end) + " ]")
|
||||||
|
|
||||||
def __cmp__(self, other):
|
# Compare two intervals. If non-equal, order by start then end
|
||||||
"""Compare two intervals. If non-equal, order by start then end"""
|
def __lt__(self, other):
|
||||||
return cmp(self.start, other.start) or cmp(self.end, other.end)
|
return (self.start, self.end) < (other.start, other.end)
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
return (self.start, self.end) > (other.start, other.end)
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
return (self.start, self.end) <= (other.start, other.end)
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
return (self.start, self.end) >= (other.start, other.end)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return (self.start, self.end) == (other.start, other.end)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return (self.start, self.end) != (other.start, other.end)
|
||||||
|
|
||||||
def intersects(self, other):
|
def intersects(self, other):
|
||||||
"""Return True if two Interval objects intersect"""
|
"""Return True if two Interval objects intersect"""
|
||||||
|
@ -58,6 +75,7 @@ class Interval:
|
||||||
raise IntervalError("not a subset")
|
raise IntervalError("not a subset")
|
||||||
return Interval(start, end)
|
return Interval(start, end)
|
||||||
|
|
||||||
|
|
||||||
def _interval_math_helper(a, b, op, subset=True):
|
def _interval_math_helper(a, b, op, subset=True):
|
||||||
"""Helper for set_difference, intersection functions,
|
"""Helper for set_difference, intersection functions,
|
||||||
to compute interval subsets based on a math operator on ranges
|
to compute interval subsets based on a math operator on ranges
|
||||||
|
@ -88,7 +106,7 @@ def _interval_math_helper(a, b, op, subset = True):
|
||||||
in_b = True
|
in_b = True
|
||||||
elif k == 2:
|
elif k == 2:
|
||||||
in_a = False
|
in_a = False
|
||||||
elif k == 3:
|
else: # k == 3
|
||||||
in_b = False
|
in_b = False
|
||||||
include = op(in_a, in_b)
|
include = op(in_a, in_b)
|
||||||
if include and out_start is None:
|
if include and out_start is None:
|
||||||
|
@ -101,6 +119,7 @@ def _interval_math_helper(a, b, op, subset = True):
|
||||||
yield Interval(out_start, ts)
|
yield Interval(out_start, ts)
|
||||||
out_start = None
|
out_start = None
|
||||||
|
|
||||||
|
|
||||||
def set_difference(a, b):
|
def set_difference(a, b):
|
||||||
"""
|
"""
|
||||||
Compute the difference (a \\ b) between the intervals in 'a' and
|
Compute the difference (a \\ b) between the intervals in 'a' and
|
||||||
|
@ -115,6 +134,7 @@ def set_difference(a, b):
|
||||||
"""
|
"""
|
||||||
return _interval_math_helper(a, b, (lambda a, b: a and not b))
|
return _interval_math_helper(a, b, (lambda a, b: a and not b))
|
||||||
|
|
||||||
|
|
||||||
def intersection(a, b):
|
def intersection(a, b):
|
||||||
"""
|
"""
|
||||||
Compute the intersection between the intervals in 'a' and the
|
Compute the intersection between the intervals in 'a' and the
|
||||||
|
@ -129,6 +149,7 @@ def intersection(a, b):
|
||||||
"""
|
"""
|
||||||
return _interval_math_helper(a, b, (lambda a, b: a and b))
|
return _interval_math_helper(a, b, (lambda a, b: a and b))
|
||||||
|
|
||||||
|
|
||||||
def optimize(it):
|
def optimize(it):
|
||||||
"""
|
"""
|
||||||
Given an iterable 'it' with intervals, optimize them by joining
|
Given an iterable 'it' with intervals, optimize them by joining
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
# Iterator merging, based on http://code.activestate.com/recipes/491285/
|
# Iterator merging, based on http://code.activestate.com/recipes/491285/
|
||||||
import heapq
|
import heapq
|
||||||
|
|
||||||
|
|
||||||
def imerge(*iterables):
|
def imerge(*iterables):
|
||||||
'''Merge multiple sorted inputs into a single sorted output.
|
'''Merge multiple sorted inputs into a single sorted output.
|
||||||
|
|
||||||
|
@ -17,8 +19,8 @@ def imerge(*iterables):
|
||||||
h_append = h.append
|
h_append = h.append
|
||||||
for it in map(iter, iterables):
|
for it in map(iter, iterables):
|
||||||
try:
|
try:
|
||||||
next = it.next
|
nexter = it.__next__
|
||||||
h_append([next(), next])
|
h_append([nexter(), nexter])
|
||||||
except _Stop:
|
except _Stop:
|
||||||
pass
|
pass
|
||||||
heapq.heapify(h)
|
heapq.heapify(h)
|
||||||
|
@ -26,9 +28,9 @@ def imerge(*iterables):
|
||||||
while 1:
|
while 1:
|
||||||
try:
|
try:
|
||||||
while 1:
|
while 1:
|
||||||
v, next = s = h[0] # raises IndexError when h is empty
|
v, nexter = s = h[0] # raises IndexError when h is empty
|
||||||
yield v
|
yield v
|
||||||
s[0] = next() # raises StopIteration when exhausted
|
s[0] = nexter() # raises StopIteration when exhausted
|
||||||
siftup(h, 0) # restore heap condition
|
siftup(h, 0) # restore heap condition
|
||||||
except _Stop:
|
except _Stop:
|
||||||
heappop(h) # remove empty iterator
|
heappop(h) # remove empty iterator
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
# File locking
|
# File locking
|
||||||
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
try:
|
|
||||||
import fcntl
|
import fcntl
|
||||||
import errno
|
import errno
|
||||||
|
|
||||||
|
|
||||||
def exclusive_lock(f):
|
def exclusive_lock(f):
|
||||||
"""Acquire an exclusive lock. Returns True on successful
|
"""Acquire an exclusive lock. Returns True on successful
|
||||||
lock, or False on error."""
|
lock, or False on error."""
|
||||||
|
@ -14,20 +12,11 @@ try:
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
if e.errno in (errno.EACCES, errno.EAGAIN):
|
if e.errno in (errno.EACCES, errno.EAGAIN):
|
||||||
return False
|
return False
|
||||||
else: # pragma: no cover
|
else:
|
||||||
raise
|
raise
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def exclusive_unlock(f):
|
def exclusive_unlock(f):
|
||||||
"""Release an exclusive lock."""
|
"""Release an exclusive lock."""
|
||||||
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
||||||
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
def exclusive_lock(f):
|
|
||||||
"""Dummy lock function -- does not lock!"""
|
|
||||||
warnings.warn("Pretending to lock " + str(f))
|
|
||||||
return True
|
|
||||||
|
|
||||||
def exclusive_unlock(f):
|
|
||||||
"""Release an exclusive lock."""
|
|
||||||
return
|
|
||||||
|
|
|
@ -6,10 +6,11 @@
|
||||||
import collections
|
import collections
|
||||||
import decorator
|
import decorator
|
||||||
|
|
||||||
|
|
||||||
def lru_cache(size=10, onremove=None, keys=slice(None)):
|
def lru_cache(size=10, onremove=None, keys=slice(None)):
|
||||||
"""Least-recently-used cache decorator.
|
"""Least-recently-used cache decorator.
|
||||||
|
|
||||||
@lru_cache(size = 10, onevict = None)
|
@lru_cache(size=10, onremove=None)
|
||||||
def f(...):
|
def f(...):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -53,14 +54,17 @@ def lru_cache(size = 10, onremove = None, keys = slice(None)):
|
||||||
if key in cache:
|
if key in cache:
|
||||||
evict(cache.pop(key))
|
evict(cache.pop(key))
|
||||||
else:
|
else:
|
||||||
if len(cache) > 0 and len(args) != len(cache.iterkeys().next()):
|
if cache:
|
||||||
|
if len(args) != len(next(iter(cache.keys()))):
|
||||||
raise KeyError("trying to remove from LRU cache, but "
|
raise KeyError("trying to remove from LRU cache, but "
|
||||||
"number of arguments doesn't match the "
|
"number of arguments doesn't match the "
|
||||||
"cache key length")
|
"cache key length")
|
||||||
|
|
||||||
def cache_remove_all():
|
def cache_remove_all():
|
||||||
|
nonlocal cache
|
||||||
for key in cache:
|
for key in cache:
|
||||||
evict(cache.pop(key))
|
evict(cache[key])
|
||||||
|
cache = collections.OrderedDict()
|
||||||
|
|
||||||
def cache_info():
|
def cache_info():
|
||||||
return (func.cache_hits, func.cache_misses)
|
return (func.cache_hits, func.cache_misses)
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
import sys
|
import sys
|
||||||
import inspect
|
import inspect
|
||||||
import decorator
|
import decorator
|
||||||
|
from nilmdb.utils.printf import fprintf
|
||||||
|
|
||||||
|
|
||||||
def must_close(errorfile=sys.stderr, wrap_verify=False):
|
def must_close(errorfile=sys.stderr, wrap_verify=False):
|
||||||
"""Class decorator that warns on 'errorfile' at deletion time if
|
"""Class decorator that warns on 'errorfile' at deletion time if
|
||||||
|
@ -12,12 +13,17 @@ def must_close(errorfile = sys.stderr, wrap_verify = False):
|
||||||
already been called."""
|
already been called."""
|
||||||
def class_decorator(cls):
|
def class_decorator(cls):
|
||||||
|
|
||||||
|
def is_method_or_function(x):
|
||||||
|
return inspect.ismethod(x) or inspect.isfunction(x)
|
||||||
|
|
||||||
def wrap_class_method(wrapper):
|
def wrap_class_method(wrapper):
|
||||||
try:
|
try:
|
||||||
orig = getattr(cls, wrapper.__name__).im_func
|
orig = getattr(cls, wrapper.__name__)
|
||||||
except Exception:
|
except AttributeError:
|
||||||
orig = lambda x: None
|
orig = lambda x: None
|
||||||
setattr(cls, wrapper.__name__, decorator.decorator(wrapper, orig))
|
if is_method_or_function(orig):
|
||||||
|
setattr(cls, wrapper.__name__,
|
||||||
|
decorator.decorator(wrapper, orig))
|
||||||
|
|
||||||
@wrap_class_method
|
@wrap_class_method
|
||||||
def __init__(orig, self, *args, **kwargs):
|
def __init__(orig, self, *args, **kwargs):
|
||||||
|
@ -33,7 +39,7 @@ def must_close(errorfile = sys.stderr, wrap_verify = False):
|
||||||
fprintf(errorfile, "error: %s.close() wasn't called!\n",
|
fprintf(errorfile, "error: %s.close() wasn't called!\n",
|
||||||
self.__class__.__name__)
|
self.__class__.__name__)
|
||||||
return orig(self, *args, **kwargs)
|
return orig(self, *args, **kwargs)
|
||||||
except: # pragma: no cover
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@wrap_class_method
|
@wrap_class_method
|
||||||
|
@ -49,16 +55,17 @@ def must_close(errorfile = sys.stderr, wrap_verify = False):
|
||||||
raise AssertionError("called " + str(orig) + " after close")
|
raise AssertionError("called " + str(orig) + " after close")
|
||||||
return orig(self, *args, **kwargs)
|
return orig(self, *args, **kwargs)
|
||||||
if wrap_verify:
|
if wrap_verify:
|
||||||
for (name, method) in inspect.getmembers(cls, inspect.ismethod):
|
for (name, method) in inspect.getmembers(cls,
|
||||||
# Skip class methods
|
is_method_or_function):
|
||||||
if method.__self__ is not None:
|
|
||||||
continue
|
|
||||||
# Skip some methods
|
# Skip some methods
|
||||||
if name in ["__del__", "__init__"]:
|
if name in ["__del__", "__init__"]:
|
||||||
continue
|
continue
|
||||||
# Set up wrapper
|
# Set up wrapper
|
||||||
setattr(cls, name, decorator.decorator(verifier,
|
if inspect.ismethod(method):
|
||||||
method.im_func))
|
func = method.__func__
|
||||||
|
else:
|
||||||
|
func = method
|
||||||
|
setattr(cls, name, decorator.decorator(verifier, func))
|
||||||
|
|
||||||
return cls
|
return cls
|
||||||
return class_decorator
|
return class_decorator
|
||||||
|
|
|
@ -1,9 +1,13 @@
|
||||||
"""printf, fprintf, sprintf"""
|
"""printf, fprintf, sprintf"""
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
def printf(_str, *args):
|
def printf(_str, *args):
|
||||||
print(_str % args, end='')
|
print(_str % args, end='')
|
||||||
|
|
||||||
|
|
||||||
def fprintf(_file, _str, *args):
|
def fprintf(_file, _str, *args):
|
||||||
print(_str % args, end='', file=_file)
|
print(_str % args, end='', file=_file)
|
||||||
|
|
||||||
|
|
||||||
def sprintf(_str, *args):
|
def sprintf(_str, *args):
|
||||||
return (_str % args)
|
return (_str % args)
|
||||||
|
|
|
@ -1,10 +1,6 @@
|
||||||
import Queue
|
import queue
|
||||||
import threading
|
import threading
|
||||||
import sys
|
import sys
|
||||||
import decorator
|
|
||||||
import inspect
|
|
||||||
import types
|
|
||||||
import functools
|
|
||||||
|
|
||||||
# This file provides a class that will wrap an object and serialize
|
# This file provides a class that will wrap an object and serialize
|
||||||
# all calls to its methods. All calls to that object will be queued
|
# all calls to its methods. All calls to that object will be queued
|
||||||
|
@ -13,6 +9,7 @@ import functools
|
||||||
|
|
||||||
# Based partially on http://stackoverflow.com/questions/2642515/
|
# Based partially on http://stackoverflow.com/questions/2642515/
|
||||||
|
|
||||||
|
|
||||||
class SerializerThread(threading.Thread):
|
class SerializerThread(threading.Thread):
|
||||||
"""Thread that retrieves call information from the queue, makes the
|
"""Thread that retrieves call information from the queue, makes the
|
||||||
call, and returns the results."""
|
call, and returns the results."""
|
||||||
|
@ -40,6 +37,7 @@ class SerializerThread(threading.Thread):
|
||||||
result_queue.put((exception, result))
|
result_queue.put((exception, result))
|
||||||
del exception, result
|
del exception, result
|
||||||
|
|
||||||
|
|
||||||
def serializer_proxy(obj_or_type):
|
def serializer_proxy(obj_or_type):
|
||||||
"""Wrap the given object or type in a SerializerObjectProxy.
|
"""Wrap the given object or type in a SerializerObjectProxy.
|
||||||
|
|
||||||
|
@ -49,45 +47,54 @@ def serializer_proxy(obj_or_type):
|
||||||
The proxied requests, including instantiation, are performed in a
|
The proxied requests, including instantiation, are performed in a
|
||||||
single thread and serialized between caller threads.
|
single thread and serialized between caller threads.
|
||||||
"""
|
"""
|
||||||
class SerializerCallProxy(object):
|
class SerializerCallProxy():
|
||||||
def __init__(self, call_queue, func, objectproxy):
|
def __init__(self, call_queue, func, objectproxy):
|
||||||
self.call_queue = call_queue
|
self.call_queue = call_queue
|
||||||
self.func = func
|
self.func = func
|
||||||
# Need to hold a reference to object proxy so it doesn't
|
# Need to hold a reference to object proxy so it doesn't
|
||||||
# go away (and kill the thread) until after get called.
|
# go away (and kill the thread) until after get called.
|
||||||
self.objectproxy = objectproxy
|
self.objectproxy = objectproxy
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
result_queue = Queue.Queue()
|
result_queue = queue.Queue()
|
||||||
self.call_queue.put((result_queue, self.func, args, kwargs))
|
self.call_queue.put((result_queue, self.func, args, kwargs))
|
||||||
(exc_info, result) = result_queue.get()
|
(exc_info, result) = result_queue.get()
|
||||||
if exc_info is None:
|
if exc_info is None:
|
||||||
return result
|
return result
|
||||||
else:
|
else:
|
||||||
raise exc_info[0], exc_info[1], exc_info[2]
|
raise exc_info[1].with_traceback(exc_info[2])
|
||||||
|
|
||||||
class SerializerObjectProxy(object):
|
class SerializerObjectProxy():
|
||||||
def __init__(self, obj_or_type, *args, **kwargs):
|
def __init__(self, obj_or_type, *args, **kwargs):
|
||||||
self.__object = obj_or_type
|
self.__object = obj_or_type
|
||||||
try:
|
if isinstance(obj_or_type, type):
|
||||||
if type(obj_or_type) in (types.TypeType, types.ClassType):
|
|
||||||
classname = obj_or_type.__name__
|
classname = obj_or_type.__name__
|
||||||
else:
|
else:
|
||||||
classname = obj_or_type.__class__.__name__
|
classname = obj_or_type.__class__.__name__
|
||||||
except AttributeError: # pragma: no cover
|
self.__call_queue = queue.Queue()
|
||||||
classname = "???"
|
|
||||||
self.__call_queue = Queue.Queue()
|
|
||||||
self.__thread = SerializerThread(classname, self.__call_queue)
|
self.__thread = SerializerThread(classname, self.__call_queue)
|
||||||
self.__thread.daemon = True
|
self.__thread.daemon = True
|
||||||
self.__thread.start()
|
self.__thread.start()
|
||||||
self._thread_safe = True
|
self._thread_safe = True
|
||||||
|
|
||||||
def __getattr__(self, key):
|
def __getattr__(self, key):
|
||||||
if key.startswith("_SerializerObjectProxy__"): # pragma: no cover
|
# If the attribute is a function, we want to return a
|
||||||
raise AttributeError
|
# proxy that will perform the call through the serializer
|
||||||
|
# when called. Otherwise, we want to return the value
|
||||||
|
# directly. This means we need to grab the attribute once,
|
||||||
|
# and therefore self.__object.__getattr__ may be called
|
||||||
|
# in an unsafe way, from the caller's thread.
|
||||||
attr = getattr(self.__object, key)
|
attr = getattr(self.__object, key)
|
||||||
if not callable(attr):
|
if not callable(attr):
|
||||||
|
# It's not callable, so perform the getattr from within
|
||||||
|
# the serializer thread, then return its value.
|
||||||
|
# That may differ from the "attr" value we just grabbed
|
||||||
|
# from here, due to forced ordering in the serializer.
|
||||||
getter = SerializerCallProxy(self.__call_queue, getattr, self)
|
getter = SerializerCallProxy(self.__call_queue, getattr, self)
|
||||||
return getter(self.__object, key)
|
return getter(self.__object, key)
|
||||||
|
else:
|
||||||
|
# It is callable, so return an object that will proxy through
|
||||||
|
# the serializer when called.
|
||||||
r = SerializerCallProxy(self.__call_queue, attr, self)
|
r = SerializerCallProxy(self.__call_queue, attr, self)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
@ -98,9 +105,10 @@ def serializer_proxy(obj_or_type):
|
||||||
attr = getattr(self.__object, "__iter__")
|
attr = getattr(self.__object, "__iter__")
|
||||||
self.__iter = SerializerCallProxy(self.__call_queue, attr, self)()
|
self.__iter = SerializerCallProxy(self.__call_queue, attr, self)()
|
||||||
return self
|
return self
|
||||||
def next(self):
|
|
||||||
|
def __next__(self):
|
||||||
return SerializerCallProxy(self.__call_queue,
|
return SerializerCallProxy(self.__call_queue,
|
||||||
self.__iter.next, self)()
|
self.__iter.__next__, self)()
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
return self.__getattr__("__getitem__")(key)
|
return self.__getattr__("__getitem__")(key)
|
||||||
|
@ -110,7 +118,7 @@ def serializer_proxy(obj_or_type):
|
||||||
to serializer_proxy. Otherwise, pass the call through."""
|
to serializer_proxy. Otherwise, pass the call through."""
|
||||||
ret = SerializerCallProxy(self.__call_queue,
|
ret = SerializerCallProxy(self.__call_queue,
|
||||||
self.__object, self)(*args, **kwargs)
|
self.__object, self)(*args, **kwargs)
|
||||||
if type(self.__object) in (types.TypeType, types.ClassType):
|
if isinstance(self.__object, type):
|
||||||
# Instantiation
|
# Instantiation
|
||||||
self.__object = ret
|
self.__object = ret
|
||||||
return self
|
return self
|
||||||
|
@ -118,9 +126,9 @@ def serializer_proxy(obj_or_type):
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
try:
|
try:
|
||||||
|
# Signal thread to exit, but don't wait for it.
|
||||||
self.__call_queue.put((None, None, None, None))
|
self.__call_queue.put((None, None, None, None))
|
||||||
self.__thread.join()
|
except:
|
||||||
except: # pragma: no cover
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return SerializerObjectProxy(obj_or_type)
|
return SerializerObjectProxy(obj_or_type)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
def sort_human(items, key=None):
|
def sort_human(items, key=None):
|
||||||
"""Human-friendly sort (/stream/2 before /stream/10)"""
|
"""Human-friendly sort (/stream/2 before /stream/10)"""
|
||||||
def to_num(val):
|
def to_num(val):
|
||||||
|
|
|
@ -1,26 +1,25 @@
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
import threading
|
import threading
|
||||||
import warnings
|
from nilmdb.utils.printf import sprintf
|
||||||
import types
|
|
||||||
|
|
||||||
def verify_proxy(obj_or_type, exception = False, check_thread = True,
|
|
||||||
|
def verify_proxy(obj_or_type, check_thread=True,
|
||||||
check_concurrent=True):
|
check_concurrent=True):
|
||||||
"""Wrap the given object or type in a VerifyObjectProxy.
|
"""Wrap the given object or type in a VerifyObjectProxy.
|
||||||
|
|
||||||
Returns a VerifyObjectProxy that proxies all method calls to the
|
Returns a VerifyObjectProxy that proxies all method calls to the
|
||||||
given object, as well as attribute retrievals.
|
given object, as well as attribute retrievals.
|
||||||
|
|
||||||
When calling methods, the following checks are performed. If
|
When calling methods, the following checks are performed. On
|
||||||
exception is True, an exception is raised. Otherwise, a warning
|
failure, an exception is raised.
|
||||||
is printed.
|
|
||||||
|
|
||||||
check_thread = True # Warn/fail if two different threads call methods.
|
check_thread = True # Fail if two different threads call methods.
|
||||||
check_concurrent = True # Warn/fail if two functions are concurrently
|
check_concurrent = True # Fail if two functions are concurrently
|
||||||
# run through this proxy
|
# run through this proxy
|
||||||
"""
|
"""
|
||||||
class Namespace(object):
|
class Namespace():
|
||||||
pass
|
pass
|
||||||
class VerifyCallProxy(object):
|
|
||||||
|
class VerifyCallProxy():
|
||||||
def __init__(self, func, parent_namespace):
|
def __init__(self, func, parent_namespace):
|
||||||
self.func = func
|
self.func = func
|
||||||
self.parent_namespace = parent_namespace
|
self.parent_namespace = parent_namespace
|
||||||
|
@ -42,22 +41,16 @@ def verify_proxy(obj_or_type, exception = False, check_thread = True,
|
||||||
" but %s called %s.%s",
|
" but %s called %s.%s",
|
||||||
p.thread.name, p.classname, p.thread_callee,
|
p.thread.name, p.classname, p.thread_callee,
|
||||||
this.name, p.classname, callee)
|
this.name, p.classname, callee)
|
||||||
if exception:
|
|
||||||
raise AssertionError(err)
|
raise AssertionError(err)
|
||||||
else: # pragma: no cover
|
|
||||||
warnings.warn(err)
|
|
||||||
|
|
||||||
need_concur_unlock = False
|
need_concur_unlock = False
|
||||||
if check_concurrent:
|
if check_concurrent:
|
||||||
if p.concur_lock.acquire(False) == False:
|
if not p.concur_lock.acquire(False):
|
||||||
err = sprintf("unsafe concurrency: %s called %s.%s "
|
err = sprintf("unsafe concurrency: %s called %s.%s "
|
||||||
"while %s is still in %s.%s",
|
"while %s is still in %s.%s",
|
||||||
this.name, p.classname, callee,
|
this.name, p.classname, callee,
|
||||||
p.concur_tname, p.classname, p.concur_callee)
|
p.concur_tname, p.classname, p.concur_callee)
|
||||||
if exception:
|
|
||||||
raise AssertionError(err)
|
raise AssertionError(err)
|
||||||
else: # pragma: no cover
|
|
||||||
warnings.warn(err)
|
|
||||||
else:
|
else:
|
||||||
p.concur_tname = this.name
|
p.concur_tname = this.name
|
||||||
p.concur_callee = callee
|
p.concur_callee = callee
|
||||||
|
@ -70,7 +63,7 @@ def verify_proxy(obj_or_type, exception = False, check_thread = True,
|
||||||
p.concur_lock.release()
|
p.concur_lock.release()
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
class VerifyObjectProxy(object):
|
class VerifyObjectProxy():
|
||||||
def __init__(self, obj_or_type, *args, **kwargs):
|
def __init__(self, obj_or_type, *args, **kwargs):
|
||||||
p = Namespace()
|
p = Namespace()
|
||||||
self.__ns = p
|
self.__ns = p
|
||||||
|
@ -80,17 +73,12 @@ def verify_proxy(obj_or_type, exception = False, check_thread = True,
|
||||||
p.concur_tname = None
|
p.concur_tname = None
|
||||||
p.concur_callee = None
|
p.concur_callee = None
|
||||||
self.__obj = obj_or_type
|
self.__obj = obj_or_type
|
||||||
try:
|
if isinstance(obj_or_type, type):
|
||||||
if type(obj_or_type) in (types.TypeType, types.ClassType):
|
|
||||||
p.classname = self.__obj.__name__
|
p.classname = self.__obj.__name__
|
||||||
else:
|
else:
|
||||||
p.classname = self.__obj.__class__.__name__
|
p.classname = self.__obj.__class__.__name__
|
||||||
except AttributeError: # pragma: no cover
|
|
||||||
p.classname = "???"
|
|
||||||
|
|
||||||
def __getattr__(self, key):
|
def __getattr__(self, key):
|
||||||
if key.startswith("_VerifyObjectProxy__"): # pragma: no cover
|
|
||||||
raise AttributeError
|
|
||||||
attr = getattr(self.__obj, key)
|
attr = getattr(self.__obj, key)
|
||||||
if not callable(attr):
|
if not callable(attr):
|
||||||
return VerifyCallProxy(getattr, self.__ns)(self.__obj, key)
|
return VerifyCallProxy(getattr, self.__ns)(self.__obj, key)
|
||||||
|
@ -100,7 +88,7 @@ def verify_proxy(obj_or_type, exception = False, check_thread = True,
|
||||||
"""Call this to instantiate the type, if a type was passed
|
"""Call this to instantiate the type, if a type was passed
|
||||||
to verify_proxy. Otherwise, pass the call through."""
|
to verify_proxy. Otherwise, pass the call through."""
|
||||||
ret = VerifyCallProxy(self.__obj, self.__ns)(*args, **kwargs)
|
ret = VerifyCallProxy(self.__obj, self.__ns)(*args, **kwargs)
|
||||||
if type(self.__obj) in (types.TypeType, types.ClassType):
|
if isinstance(self.__obj, type):
|
||||||
# Instantiation
|
# Instantiation
|
||||||
self.__obj = ret
|
self.__obj = ret
|
||||||
return self
|
return self
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
from nilmdb.utils import datetime_tz
|
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
import datetime_tz
|
||||||
|
|
||||||
# Range
|
# Range
|
||||||
min_timestamp = (-2**63)
|
min_timestamp = (-2**63)
|
||||||
|
@ -11,15 +9,17 @@ max_timestamp = (2**63 - 1)
|
||||||
# Smallest representable step
|
# Smallest representable step
|
||||||
epsilon = 1
|
epsilon = 1
|
||||||
|
|
||||||
def string_to_timestamp(str):
|
|
||||||
|
def string_to_timestamp(string):
|
||||||
"""Convert a string that represents an integer number of microseconds
|
"""Convert a string that represents an integer number of microseconds
|
||||||
since epoch."""
|
since epoch."""
|
||||||
try:
|
try:
|
||||||
# Parse a string like "1234567890123456" and return an integer
|
# Parse a string like "1234567890123456" and return an integer
|
||||||
return int(str)
|
return int(string)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# Try parsing as a float, in case it's "1234567890123456.0"
|
# Try parsing as a float, in case it's "1234567890123456.0"
|
||||||
return int(round(float(str)))
|
return int(round(float(string)))
|
||||||
|
|
||||||
|
|
||||||
def timestamp_to_string(timestamp):
|
def timestamp_to_string(timestamp):
|
||||||
"""Convert a timestamp (integer microseconds since epoch) to a string"""
|
"""Convert a timestamp (integer microseconds since epoch) to a string"""
|
||||||
|
@ -28,6 +28,13 @@ def timestamp_to_string(timestamp):
|
||||||
else:
|
else:
|
||||||
return str(timestamp)
|
return str(timestamp)
|
||||||
|
|
||||||
|
|
||||||
|
def timestamp_to_bytes(timestamp):
|
||||||
|
"""Convert a timestamp (integer microseconds since epoch) to a Python
|
||||||
|
bytes object"""
|
||||||
|
return timestamp_to_string(timestamp).encode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
def timestamp_to_human(timestamp):
|
def timestamp_to_human(timestamp):
|
||||||
"""Convert a timestamp (integer microseconds since epoch) to a
|
"""Convert a timestamp (integer microseconds since epoch) to a
|
||||||
human-readable string, using the local timezone for display
|
human-readable string, using the local timezone for display
|
||||||
|
@ -39,24 +46,30 @@ def timestamp_to_human(timestamp):
|
||||||
dt = datetime_tz.datetime_tz.fromtimestamp(timestamp_to_unix(timestamp))
|
dt = datetime_tz.datetime_tz.fromtimestamp(timestamp_to_unix(timestamp))
|
||||||
return dt.strftime("%a, %d %b %Y %H:%M:%S.%f %z")
|
return dt.strftime("%a, %d %b %Y %H:%M:%S.%f %z")
|
||||||
|
|
||||||
|
|
||||||
def unix_to_timestamp(unix):
|
def unix_to_timestamp(unix):
|
||||||
"""Convert a Unix timestamp (floating point seconds since epoch)
|
"""Convert a Unix timestamp (floating point seconds since epoch)
|
||||||
into a NILM timestamp (integer microseconds since epoch)"""
|
into a NILM timestamp (integer microseconds since epoch)"""
|
||||||
return int(round(unix * 1e6))
|
return int(round(unix * 1e6))
|
||||||
seconds_to_timestamp = unix_to_timestamp
|
|
||||||
|
|
||||||
def timestamp_to_unix(timestamp):
|
def timestamp_to_unix(timestamp):
|
||||||
"""Convert a NILM timestamp (integer microseconds since epoch)
|
"""Convert a NILM timestamp (integer microseconds since epoch)
|
||||||
into a Unix timestamp (floating point seconds since epoch)"""
|
into a Unix timestamp (floating point seconds since epoch)"""
|
||||||
return timestamp / 1e6
|
return timestamp / 1e6
|
||||||
|
|
||||||
|
|
||||||
|
seconds_to_timestamp = unix_to_timestamp
|
||||||
timestamp_to_seconds = timestamp_to_unix
|
timestamp_to_seconds = timestamp_to_unix
|
||||||
|
|
||||||
|
|
||||||
def rate_to_period(hz, cycles=1):
|
def rate_to_period(hz, cycles=1):
|
||||||
"""Convert a rate (in Hz) to a period (in timestamp units).
|
"""Convert a rate (in Hz) to a period (in timestamp units).
|
||||||
Returns an integer."""
|
Returns an integer."""
|
||||||
period = unix_to_timestamp(cycles) / float(hz)
|
period = unix_to_timestamp(cycles) / float(hz)
|
||||||
return int(round(period))
|
return int(round(period))
|
||||||
|
|
||||||
|
|
||||||
def parse_time(toparse):
|
def parse_time(toparse):
|
||||||
"""
|
"""
|
||||||
Parse a free-form time string and return a nilmdb timestamp
|
Parse a free-form time string and return a nilmdb timestamp
|
||||||
|
@ -95,9 +108,9 @@ def parse_time(toparse):
|
||||||
try:
|
try:
|
||||||
val = float(toparse)
|
val = float(toparse)
|
||||||
# range is from about year 2001 - 2128
|
# range is from about year 2001 - 2128
|
||||||
if val > 1e9 and val < 5e9:
|
if 1e9 < val < 5e9:
|
||||||
return unix_to_timestamp(val)
|
return unix_to_timestamp(val)
|
||||||
if val > 1e15 and val < 5e15:
|
if 1e15 < val < 5e15:
|
||||||
return val
|
return val
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
@ -129,6 +142,7 @@ def parse_time(toparse):
|
||||||
# just give up for now.
|
# just give up for now.
|
||||||
raise ValueError("unable to parse timestamp")
|
raise ValueError("unable to parse timestamp")
|
||||||
|
|
||||||
|
|
||||||
def now():
|
def now():
|
||||||
"""Return current timestamp"""
|
"""Return current timestamp"""
|
||||||
return unix_to_timestamp(time.time())
|
return unix_to_timestamp(time.time())
|
||||||
|
|
|
@ -5,18 +5,17 @@
|
||||||
# with nilmdb.utils.Timer("flush"):
|
# with nilmdb.utils.Timer("flush"):
|
||||||
# foo.flush()
|
# foo.flush()
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def Timer(name=None, tosyslog=False):
|
def Timer(name=None, tosyslog=False):
|
||||||
start = time.time()
|
start = time.time()
|
||||||
yield
|
yield
|
||||||
elapsed = int((time.time() - start) * 1000)
|
elapsed = int((time.time() - start) * 1000)
|
||||||
msg = (name or 'elapsed') + ": " + str(elapsed) + " ms"
|
msg = (name or 'elapsed') + ": " + str(elapsed) + " ms"
|
||||||
if tosyslog: # pragma: no cover
|
if tosyslog:
|
||||||
import syslog
|
import syslog
|
||||||
syslog.syslog(msg)
|
syslog.syslog(msg)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -1,16 +1,17 @@
|
||||||
"""File-like objects that add timestamps to the input lines"""
|
"""File-like objects that add timestamps to the input lines"""
|
||||||
|
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import sprintf
|
||||||
import nilmdb.utils.time
|
import nilmdb.utils.time
|
||||||
|
|
||||||
class Timestamper(object):
|
|
||||||
|
class Timestamper():
|
||||||
"""A file-like object that adds timestamps to lines of an input file."""
|
"""A file-like object that adds timestamps to lines of an input file."""
|
||||||
def __init__(self, infile, ts_iter):
|
def __init__(self, infile, ts_iter):
|
||||||
"""file: filename, or another file-like object
|
"""file: filename, or another file-like object
|
||||||
ts_iter: iterator that returns a timestamp string for
|
ts_iter: iterator that returns a timestamp string for
|
||||||
each line of the file"""
|
each line of the file"""
|
||||||
if isinstance(infile, basestring):
|
if isinstance(infile, str):
|
||||||
self.file = open(infile, "r")
|
self.file = open(infile, "rb")
|
||||||
else:
|
else:
|
||||||
self.file = infile
|
self.file = infile
|
||||||
self.ts_iter = ts_iter
|
self.ts_iter = ts_iter
|
||||||
|
@ -22,17 +23,19 @@ class Timestamper(object):
|
||||||
while True:
|
while True:
|
||||||
line = self.file.readline(*args)
|
line = self.file.readline(*args)
|
||||||
if not line:
|
if not line:
|
||||||
return ""
|
return b""
|
||||||
if line[0] == '#':
|
if line[0:1] == b'#':
|
||||||
continue
|
continue
|
||||||
break
|
# For some reason, coverage on python 3.8 reports that
|
||||||
|
# we never hit this break, even though we definitely do.
|
||||||
|
break # pragma: no cover
|
||||||
try:
|
try:
|
||||||
return self.ts_iter.next() + line
|
return next(self.ts_iter) + line
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
return ""
|
return b""
|
||||||
|
|
||||||
def readlines(self, size=None):
|
def readlines(self, size=None):
|
||||||
out = ""
|
out = b""
|
||||||
while True:
|
while True:
|
||||||
line = self.readline()
|
line = self.readline()
|
||||||
out += line
|
out += line
|
||||||
|
@ -43,12 +46,13 @@ class Timestamper(object):
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def next(self):
|
def __next__(self):
|
||||||
result = self.readline()
|
result = self.readline()
|
||||||
if not result:
|
if not result:
|
||||||
raise StopIteration
|
raise StopIteration
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class TimestamperRate(Timestamper):
|
class TimestamperRate(Timestamper):
|
||||||
"""Timestamper that uses a start time and a fixed rate"""
|
"""Timestamper that uses a start time and a fixed rate"""
|
||||||
def __init__(self, infile, start, rate, end=None):
|
def __init__(self, infile, start, rate, end=None):
|
||||||
|
@ -61,33 +65,39 @@ class TimestamperRate(Timestamper):
|
||||||
|
|
||||||
end: If specified, raise StopIteration before outputting a value
|
end: If specified, raise StopIteration before outputting a value
|
||||||
greater than this."""
|
greater than this."""
|
||||||
timestamp_to_string = nilmdb.utils.time.timestamp_to_string
|
timestamp_to_bytes = nilmdb.utils.time.timestamp_to_bytes
|
||||||
rate_to_period = nilmdb.utils.time.rate_to_period
|
rate_to_period = nilmdb.utils.time.rate_to_period
|
||||||
|
|
||||||
def iterator(start, rate, end):
|
def iterator(start, rate, end):
|
||||||
n = 0
|
n = 0
|
||||||
rate = float(rate)
|
rate = float(rate)
|
||||||
while True:
|
while True:
|
||||||
now = start + rate_to_period(rate, n)
|
now = start + rate_to_period(rate, n)
|
||||||
if end and now >= end:
|
if end and now >= end:
|
||||||
raise StopIteration
|
return
|
||||||
yield timestamp_to_string(now) + " "
|
yield timestamp_to_bytes(now) + b" "
|
||||||
n += 1
|
n += 1
|
||||||
Timestamper.__init__(self, infile, iterator(start, rate, end))
|
Timestamper.__init__(self, infile, iterator(start, rate, end))
|
||||||
self.start = start
|
self.start = start
|
||||||
self.rate = rate
|
self.rate = rate
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return sprintf("TimestamperRate(..., start=\"%s\", rate=%g)",
|
return sprintf("TimestamperRate(..., start=\"%s\", rate=%g)",
|
||||||
nilmdb.utils.time.timestamp_to_human(self.start),
|
nilmdb.utils.time.timestamp_to_human(self.start),
|
||||||
self.rate)
|
self.rate)
|
||||||
|
|
||||||
|
|
||||||
class TimestamperNow(Timestamper):
|
class TimestamperNow(Timestamper):
|
||||||
"""Timestamper that uses current time"""
|
"""Timestamper that uses current time"""
|
||||||
def __init__(self, infile):
|
def __init__(self, infile):
|
||||||
timestamp_to_string = nilmdb.utils.time.timestamp_to_string
|
timestamp_to_bytes = nilmdb.utils.time.timestamp_to_bytes
|
||||||
get_now = nilmdb.utils.time.now
|
get_now = nilmdb.utils.time.now
|
||||||
|
|
||||||
def iterator():
|
def iterator():
|
||||||
while True:
|
while True:
|
||||||
yield timestamp_to_string(get_now()) + " "
|
yield timestamp_to_bytes(get_now()) + b" "
|
||||||
|
|
||||||
Timestamper.__init__(self, infile, iterator())
|
Timestamper.__init__(self, infile, iterator())
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "TimestamperNow(...)"
|
return "TimestamperNow(...)"
|
||||||
|
|
|
@ -1,29 +0,0 @@
|
||||||
import sys
|
|
||||||
|
|
||||||
if sys.version_info[0] >= 3: # pragma: no cover (future Python3 compat)
|
|
||||||
text_type = str
|
|
||||||
else:
|
|
||||||
text_type = unicode
|
|
||||||
|
|
||||||
def encode(u):
|
|
||||||
"""Try to encode something from Unicode to a string using the
|
|
||||||
default encoding. If it fails, try encoding as UTF-8."""
|
|
||||||
if not isinstance(u, text_type):
|
|
||||||
return u
|
|
||||||
try:
|
|
||||||
return u.encode()
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
return u.encode("utf-8")
|
|
||||||
|
|
||||||
def decode(s):
|
|
||||||
"""Try to decode someting from string to Unicode using the
|
|
||||||
default encoding. If it fails, try decoding as UTF-8."""
|
|
||||||
if isinstance(s, text_type):
|
|
||||||
return s
|
|
||||||
try:
|
|
||||||
return s.decode()
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
try:
|
|
||||||
return s.decode("utf-8")
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
return s # best we can do
|
|
41
requirements.txt
Normal file
41
requirements.txt
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
argcomplete==1.12.0
|
||||||
|
CherryPy==18.6.0
|
||||||
|
coverage==5.2.1
|
||||||
|
Cython==0.29.21
|
||||||
|
decorator==4.4.2
|
||||||
|
fallocate==1.6.4
|
||||||
|
flake8==3.8.3
|
||||||
|
nose==1.3.7
|
||||||
|
numpy==1.19.1
|
||||||
|
progressbar==2.5
|
||||||
|
psutil==5.7.2
|
||||||
|
python-datetime-tz==0.5.4
|
||||||
|
python-dateutil==2.8.1
|
||||||
|
requests==2.24.0
|
||||||
|
tz==0.2.2
|
||||||
|
yappi==1.2.5
|
||||||
|
|
||||||
|
## The following requirements were added by pip freeze:
|
||||||
|
beautifulsoup4==4.9.1
|
||||||
|
certifi==2020.6.20
|
||||||
|
chardet==3.0.4
|
||||||
|
cheroot==8.4.2
|
||||||
|
idna==2.10
|
||||||
|
jaraco.classes==3.1.0
|
||||||
|
jaraco.collections==3.0.0
|
||||||
|
jaraco.functools==3.0.1
|
||||||
|
jaraco.text==3.2.0
|
||||||
|
mccabe==0.6.1
|
||||||
|
more-itertools==8.4.0
|
||||||
|
portend==2.6
|
||||||
|
pycodestyle==2.6.0
|
||||||
|
pyflakes==2.2.0
|
||||||
|
pytz==2020.1
|
||||||
|
six==1.15.0
|
||||||
|
soupsieve==2.0.1
|
||||||
|
tempora==4.0.0
|
||||||
|
urllib3==1.25.10
|
||||||
|
waitress==1.4.4
|
||||||
|
WebOb==1.8.6
|
||||||
|
WebTest==2.0.35
|
||||||
|
zc.lockfile==2.0
|
22
setup.cfg
22
setup.cfg
|
@ -13,8 +13,6 @@ cover-package=nilmdb
|
||||||
cover-erase=1
|
cover-erase=1
|
||||||
# this works, puts html output in cover/ dir:
|
# this works, puts html output in cover/ dir:
|
||||||
# cover-html=1
|
# cover-html=1
|
||||||
# need nose 1.1.3 for this:
|
|
||||||
# cover-branches=1
|
|
||||||
#debug=nose
|
#debug=nose
|
||||||
#debug-log=nose.log
|
#debug-log=nose.log
|
||||||
stop=1
|
stop=1
|
||||||
|
@ -39,3 +37,23 @@ tests=tests
|
||||||
#with-profile=1
|
#with-profile=1
|
||||||
#profile-sort=time
|
#profile-sort=time
|
||||||
##profile-restrict=10 # doesn't work right, treated as string or something
|
##profile-restrict=10 # doesn't work right, treated as string or something
|
||||||
|
|
||||||
|
[versioneer]
|
||||||
|
VCS=git
|
||||||
|
style=pep440
|
||||||
|
versionfile_source=nilmdb/_version.py
|
||||||
|
versionfile_build=nilmdb/_version.py
|
||||||
|
tag_prefix=nilmdb-
|
||||||
|
parentdir_prefix=nilmdb-
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
exclude=_version.py
|
||||||
|
extend-ignore=E731
|
||||||
|
per-file-ignores=__init__.py:F401,E402 \
|
||||||
|
serializer.py:E722 \
|
||||||
|
mustclose.py:E722 \
|
||||||
|
fsck.py:E266
|
||||||
|
|
||||||
|
[pylint]
|
||||||
|
ignore=_version.py
|
||||||
|
disable=C0103,C0111,R0913,R0914
|
||||||
|
|
98
setup.py
98
setup.py
|
@ -1,119 +1,51 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# To release a new version, tag it:
|
# To release a new version, tag it:
|
||||||
# git tag -a nilmdb-1.1 -m "Version 1.1"
|
# git tag -a nilmdb-1.1 -m "Version 1.1"
|
||||||
# git push --tags
|
# git push --tags
|
||||||
# Then just package it up:
|
# Then just package it up:
|
||||||
# python setup.py sdist
|
# python3 setup.py sdist
|
||||||
|
|
||||||
import traceback
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
from setuptools import setup
|
||||||
try:
|
|
||||||
from setuptools import setup, find_packages
|
|
||||||
from distutils.extension import Extension
|
from distutils.extension import Extension
|
||||||
import distutils.version
|
|
||||||
except ImportError:
|
|
||||||
traceback.print_exc()
|
|
||||||
print "Please install the prerequisites listed in README.txt"
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Versioneer manages version numbers from git tags.
|
# Versioneer manages version numbers from git tags.
|
||||||
# https://github.com/warner/python-versioneer
|
# https://github.com/warner/python-versioneer
|
||||||
import versioneer
|
import versioneer
|
||||||
versioneer.versionfile_source = 'nilmdb/_version.py'
|
|
||||||
versioneer.versionfile_build = 'nilmdb/_version.py'
|
|
||||||
versioneer.tag_prefix = 'nilmdb-'
|
|
||||||
versioneer.parentdir_prefix = 'nilmdb-'
|
|
||||||
|
|
||||||
# Hack to workaround logging/multiprocessing issue:
|
# External modules that need to be built
|
||||||
# https://groups.google.com/d/msg/nose-users/fnJ-kAUbYHQ/_UsLN786ygcJ
|
ext_modules = [ Extension('nilmdb.server.rocket', ['nilmdb/server/rocket.c' ]) ]
|
||||||
try: import multiprocessing
|
|
||||||
except Exception: pass
|
|
||||||
|
|
||||||
# Use Cython if it's new enough, otherwise use preexisting C files.
|
# Use Cython.
|
||||||
cython_modules = [ 'nilmdb.server.interval',
|
cython_modules = [ 'nilmdb.server.interval', 'nilmdb.server.rbtree' ]
|
||||||
'nilmdb.server.rbtree' ]
|
|
||||||
try:
|
|
||||||
import Cython
|
import Cython
|
||||||
from Cython.Build import cythonize
|
from Cython.Build import cythonize
|
||||||
if (distutils.version.LooseVersion(Cython.__version__) <
|
|
||||||
distutils.version.LooseVersion("0.16")):
|
|
||||||
print "Cython version", Cython.__version__, "is too old; not using it."
|
|
||||||
raise ImportError()
|
|
||||||
use_cython = True
|
|
||||||
except ImportError:
|
|
||||||
use_cython = False
|
|
||||||
|
|
||||||
ext_modules = [ Extension('nilmdb.server.rocket', ['nilmdb/server/rocket.c' ]) ]
|
|
||||||
for modulename in cython_modules:
|
for modulename in cython_modules:
|
||||||
filename = modulename.replace('.','/')
|
filename = modulename.replace('.','/')
|
||||||
if use_cython:
|
|
||||||
ext_modules.extend(cythonize(filename + ".pyx"))
|
ext_modules.extend(cythonize(filename + ".pyx"))
|
||||||
else:
|
|
||||||
cfile = filename + ".c"
|
|
||||||
if not os.path.exists(cfile):
|
|
||||||
raise Exception("Missing source file " + cfile + ". "
|
|
||||||
"Try installing cython >= 0.16.")
|
|
||||||
ext_modules.append(Extension(modulename, [ cfile ]))
|
|
||||||
|
|
||||||
# We need a MANIFEST.in. Generate it here rather than polluting the
|
# Get list of requirements to use in `install_requires` below. Note
|
||||||
# repository with yet another setup-related file.
|
# that we don't make a distinction between things that are actually
|
||||||
with open("MANIFEST.in", "w") as m:
|
# required for end-users vs developers (or use `test_requires` or
|
||||||
m.write("""
|
# anything else) -- just install everything for simplicity.
|
||||||
# Root
|
install_requires = open('requirements.txt').readlines()
|
||||||
include README.txt
|
|
||||||
include setup.cfg
|
|
||||||
include setup.py
|
|
||||||
include versioneer.py
|
|
||||||
include Makefile
|
|
||||||
include .coveragerc
|
|
||||||
include .pylintrc
|
|
||||||
|
|
||||||
# Cython files -- include source.
|
|
||||||
recursive-include nilmdb/server *.pyx *.pyxdep *.pxd
|
|
||||||
|
|
||||||
# Tests
|
|
||||||
recursive-include tests *.py
|
|
||||||
recursive-include tests/data *
|
|
||||||
include tests/test.order
|
|
||||||
|
|
||||||
# Docs
|
|
||||||
recursive-include docs Makefile *.md
|
|
||||||
|
|
||||||
# Extras
|
|
||||||
recursive-include extras *
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Run setup
|
# Run setup
|
||||||
setup(name='nilmdb',
|
setup(name='nilmdb',
|
||||||
version = versioneer.get_version(),
|
version = versioneer.get_version(),
|
||||||
cmdclass = versioneer.get_cmdclass(),
|
cmdclass = versioneer.get_cmdclass(),
|
||||||
url = 'https://git.jim.sh/jim/lees/nilmdb.git',
|
url = 'https://git.jim.sh/nilm/nilmdb.git',
|
||||||
author = 'Jim Paris',
|
author = 'Jim Paris',
|
||||||
description = "NILM Database",
|
description = "NILM Database",
|
||||||
long_description = "NILM Database",
|
long_description = "NILM Database",
|
||||||
license = "Proprietary",
|
license = "Proprietary",
|
||||||
author_email = 'jim@jtan.com',
|
author_email = 'jim@jtan.com',
|
||||||
tests_require = [ 'nose',
|
setup_requires = [ 'setuptools' ],
|
||||||
'coverage',
|
install_requires = install_requires,
|
||||||
'numpy',
|
|
||||||
],
|
|
||||||
setup_requires = [ 'setuptools',
|
|
||||||
],
|
|
||||||
install_requires = [ 'decorator',
|
|
||||||
'cherrypy >= 3.2',
|
|
||||||
'simplejson',
|
|
||||||
'python-dateutil',
|
|
||||||
'pytz',
|
|
||||||
'psutil >= 0.3.0',
|
|
||||||
'requests >= 1.1.0',
|
|
||||||
'progressbar >= 2.2',
|
|
||||||
],
|
|
||||||
packages = [ 'nilmdb',
|
packages = [ 'nilmdb',
|
||||||
'nilmdb.utils',
|
'nilmdb.utils',
|
||||||
'nilmdb.utils.datetime_tz',
|
|
||||||
'nilmdb.server',
|
'nilmdb.server',
|
||||||
'nilmdb.client',
|
'nilmdb.client',
|
||||||
'nilmdb.cmdline',
|
'nilmdb.cmdline',
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# comments are cool?
|
# comments are cool? what if they contain →UNICODEâ†<C3A2> or invalid utf-8 like Ã(
|
||||||
2.66568e+05 2.24029e+05 5.16140e+03 2.52517e+03 8.35084e+03 3.72470e+03 1.35534e+03 2.03900e+03
|
2.66568e+05 2.24029e+05 5.16140e+03 2.52517e+03 8.35084e+03 3.72470e+03 1.35534e+03 2.03900e+03
|
||||||
2.57914e+05 2.27183e+05 4.30368e+03 4.13080e+03 7.25535e+03 4.89047e+03 1.63859e+03 1.93496e+03
|
2.57914e+05 2.27183e+05 4.30368e+03 4.13080e+03 7.25535e+03 4.89047e+03 1.63859e+03 1.93496e+03
|
||||||
2.51717e+05 2.26047e+05 5.99445e+03 3.49363e+03 8.07250e+03 5.08267e+03 2.26917e+03 2.86231e+03
|
2.51717e+05 2.26047e+05 5.99445e+03 3.49363e+03 8.07250e+03 5.08267e+03 2.26917e+03 2.86231e+03
|
||||||
|
|
BIN
tests/fsck-data/test1/data.sql
Normal file
BIN
tests/fsck-data/test1/data.sql
Normal file
Binary file not shown.
1
tests/fsck-data/test1/data/git-empty-dir-placeholder
Normal file
1
tests/fsck-data/test1/data/git-empty-dir-placeholder
Normal file
|
@ -0,0 +1 @@
|
||||||
|
hi
|
1
tests/fsck-data/test1a/data/git-empty-dir-placeholder
Normal file
1
tests/fsck-data/test1a/data/git-empty-dir-placeholder
Normal file
|
@ -0,0 +1 @@
|
||||||
|
hi
|
BIN
tests/fsck-data/test1b/data.sql
Normal file
BIN
tests/fsck-data/test1b/data.sql
Normal file
Binary file not shown.
BIN
tests/fsck-data/test1c/data.sql
Normal file
BIN
tests/fsck-data/test1c/data.sql
Normal file
Binary file not shown.
1
tests/fsck-data/test1c/data/git-empty-dir-placeholder
Normal file
1
tests/fsck-data/test1c/data/git-empty-dir-placeholder
Normal file
|
@ -0,0 +1 @@
|
||||||
|
hi
|
BIN
tests/fsck-data/test2/data.sql
Normal file
BIN
tests/fsck-data/test2/data.sql
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2/data/a/b/0000/0000
Normal file
BIN
tests/fsck-data/test2/data/a/b/0000/0000
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2/data/a/b/_format
Normal file
BIN
tests/fsck-data/test2/data/a/b/_format
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2a/data.sql
Normal file
BIN
tests/fsck-data/test2a/data.sql
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2a/data/a/b/0000/0000
Normal file
BIN
tests/fsck-data/test2a/data/a/b/0000/0000
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2a/data/a/b/_format
Normal file
BIN
tests/fsck-data/test2a/data/a/b/_format
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2b/data.sql
Normal file
BIN
tests/fsck-data/test2b/data.sql
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2b/data/a/b/0000/0000
Normal file
BIN
tests/fsck-data/test2b/data/a/b/0000/0000
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2b/data/a/b/_format
Normal file
BIN
tests/fsck-data/test2b/data/a/b/_format
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2c/data.sql
Normal file
BIN
tests/fsck-data/test2c/data.sql
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2c/data/a/b/0000/0000
Normal file
BIN
tests/fsck-data/test2c/data/a/b/0000/0000
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2c/data/a/b/_format
Normal file
BIN
tests/fsck-data/test2c/data/a/b/_format
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2d/data.sql
Normal file
BIN
tests/fsck-data/test2d/data.sql
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2d/data/a/b/0000/0000
Normal file
BIN
tests/fsck-data/test2d/data/a/b/0000/0000
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2d/data/a/b/_format
Normal file
BIN
tests/fsck-data/test2d/data/a/b/_format
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2e/data.sql
Normal file
BIN
tests/fsck-data/test2e/data.sql
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2e/data/a/b/0000/0000
Normal file
BIN
tests/fsck-data/test2e/data/a/b/0000/0000
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2e/data/a/b/_format
Normal file
BIN
tests/fsck-data/test2e/data/a/b/_format
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2f/data.sql
Normal file
BIN
tests/fsck-data/test2f/data.sql
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2f/data/a/b/0000/0000
Normal file
BIN
tests/fsck-data/test2f/data/a/b/0000/0000
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2f/data/a/b/_format
Normal file
BIN
tests/fsck-data/test2f/data/a/b/_format
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2g/data.sql
Normal file
BIN
tests/fsck-data/test2g/data.sql
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2g/data/a/b/0000/0000
Normal file
BIN
tests/fsck-data/test2g/data/a/b/0000/0000
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2g/data/a/b/_format
Normal file
BIN
tests/fsck-data/test2g/data/a/b/_format
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2h/data.sql
Normal file
BIN
tests/fsck-data/test2h/data.sql
Normal file
Binary file not shown.
1
tests/fsck-data/test2h/data/git-empty-dir-placeholder
Normal file
1
tests/fsck-data/test2h/data/git-empty-dir-placeholder
Normal file
|
@ -0,0 +1 @@
|
||||||
|
hi
|
BIN
tests/fsck-data/test2i/data.sql
Normal file
BIN
tests/fsck-data/test2i/data.sql
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2i/data/a/b/0000/0000
Normal file
BIN
tests/fsck-data/test2i/data/a/b/0000/0000
Normal file
Binary file not shown.
BIN
tests/fsck-data/test2j/data.sql
Normal file
BIN
tests/fsck-data/test2j/data.sql
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user