Compare commits
No commits in common. "master" and "nilmdb-1.2.3" have entirely different histories.
master
...
nilmdb-1.2
|
@ -1,11 +1,10 @@
|
||||||
# -*- conf -*-
|
# -*- conf -*-
|
||||||
|
|
||||||
[run]
|
[run]
|
||||||
branch = True
|
# branch = True
|
||||||
|
|
||||||
[report]
|
[report]
|
||||||
exclude_lines =
|
exclude_lines =
|
||||||
pragma: no cover
|
pragma: no cover
|
||||||
if 0:
|
if 0:
|
||||||
omit = nilmdb/scripts,nilmdb/_version.py,nilmdb/fsck
|
omit = nilmdb/utils/datetime_tz*,nilmdb/scripts,nilmdb/_version.py
|
||||||
show_missing = True
|
|
||||||
|
|
11
.gitignore
vendored
11
.gitignore
vendored
|
@ -4,20 +4,23 @@ tests/*testdb/
|
||||||
db/
|
db/
|
||||||
|
|
||||||
# Compiled / cythonized files
|
# Compiled / cythonized files
|
||||||
README.html
|
|
||||||
docs/*.html
|
docs/*.html
|
||||||
build/
|
build/
|
||||||
*.pyc
|
*.pyc
|
||||||
nilmdb/server/interval.c
|
nilmdb/server/interval.c
|
||||||
|
nilmdb/server/interval.so
|
||||||
nilmdb/server/layout.c
|
nilmdb/server/layout.c
|
||||||
|
nilmdb/server/layout.so
|
||||||
nilmdb/server/rbtree.c
|
nilmdb/server/rbtree.c
|
||||||
*.so
|
nilmdb/server/rbtree.so
|
||||||
|
|
||||||
# Setup junk
|
# Setup junk
|
||||||
dist/
|
dist/
|
||||||
nilmdb.egg-info/
|
nilmdb.egg-info/
|
||||||
venv/
|
|
||||||
.eggs/
|
# This gets generated as needed by setup.py
|
||||||
|
MANIFEST.in
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
# Misc
|
# Misc
|
||||||
timeit*out
|
timeit*out
|
||||||
|
|
250
.pylintrc
Normal file
250
.pylintrc
Normal file
|
@ -0,0 +1,250 @@
|
||||||
|
# -*- conf -*-
|
||||||
|
[MASTER]
|
||||||
|
|
||||||
|
# Specify a configuration file.
|
||||||
|
#rcfile=
|
||||||
|
|
||||||
|
# Python code to execute, usually for sys.path manipulation such as
|
||||||
|
# pygtk.require().
|
||||||
|
#init-hook=
|
||||||
|
|
||||||
|
# Profiled execution.
|
||||||
|
profile=no
|
||||||
|
|
||||||
|
# Add files or directories to the blacklist. They should be base names, not
|
||||||
|
# paths.
|
||||||
|
ignore=datetime_tz
|
||||||
|
|
||||||
|
# Pickle collected data for later comparisons.
|
||||||
|
persistent=no
|
||||||
|
|
||||||
|
# List of plugins (as comma separated values of python modules names) to load,
|
||||||
|
# usually to register additional checkers.
|
||||||
|
load-plugins=
|
||||||
|
|
||||||
|
|
||||||
|
[MESSAGES CONTROL]
|
||||||
|
|
||||||
|
# Enable the message, report, category or checker with the given id(s). You can
|
||||||
|
# either give multiple identifier separated by comma (,) or put this option
|
||||||
|
# multiple time.
|
||||||
|
#enable=
|
||||||
|
|
||||||
|
# Disable the message, report, category or checker with the given id(s). You
|
||||||
|
# can either give multiple identifier separated by comma (,) or put this option
|
||||||
|
# multiple time (only on the command line, not in the configuration file where
|
||||||
|
# it should appear only once).
|
||||||
|
disable=C0111,R0903,R0201,R0914,R0912,W0142,W0703,W0702
|
||||||
|
|
||||||
|
|
||||||
|
[REPORTS]
|
||||||
|
|
||||||
|
# Set the output format. Available formats are text, parseable, colorized, msvs
|
||||||
|
# (visual studio) and html
|
||||||
|
output-format=parseable
|
||||||
|
|
||||||
|
# Include message's id in output
|
||||||
|
include-ids=yes
|
||||||
|
|
||||||
|
# Put messages in a separate file for each module / package specified on the
|
||||||
|
# command line instead of printing them on stdout. Reports (if any) will be
|
||||||
|
# written in a file name "pylint_global.[txt|html]".
|
||||||
|
files-output=no
|
||||||
|
|
||||||
|
# Tells whether to display a full report or only the messages
|
||||||
|
reports=yes
|
||||||
|
|
||||||
|
# Python expression which should return a note less than 10 (10 is the highest
|
||||||
|
# note). You have access to the variables errors warning, statement which
|
||||||
|
# respectively contain the number of errors / warnings messages and the total
|
||||||
|
# number of statements analyzed. This is used by the global evaluation report
|
||||||
|
# (RP0004).
|
||||||
|
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||||
|
|
||||||
|
# Add a comment according to your evaluation note. This is used by the global
|
||||||
|
# evaluation report (RP0004).
|
||||||
|
comment=no
|
||||||
|
|
||||||
|
|
||||||
|
[SIMILARITIES]
|
||||||
|
|
||||||
|
# Minimum lines number of a similarity.
|
||||||
|
min-similarity-lines=4
|
||||||
|
|
||||||
|
# Ignore comments when computing similarities.
|
||||||
|
ignore-comments=yes
|
||||||
|
|
||||||
|
# Ignore docstrings when computing similarities.
|
||||||
|
ignore-docstrings=yes
|
||||||
|
|
||||||
|
|
||||||
|
[TYPECHECK]
|
||||||
|
|
||||||
|
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||||
|
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||||
|
ignore-mixin-members=yes
|
||||||
|
|
||||||
|
# List of classes names for which member attributes should not be checked
|
||||||
|
# (useful for classes with attributes dynamically set).
|
||||||
|
ignored-classes=SQLObject
|
||||||
|
|
||||||
|
# When zope mode is activated, add a predefined set of Zope acquired attributes
|
||||||
|
# to generated-members.
|
||||||
|
zope=no
|
||||||
|
|
||||||
|
# List of members which are set dynamically and missed by pylint inference
|
||||||
|
# system, and so shouldn't trigger E0201 when accessed. Python regular
|
||||||
|
# expressions are accepted.
|
||||||
|
generated-members=REQUEST,acl_users,aq_parent
|
||||||
|
|
||||||
|
|
||||||
|
[FORMAT]
|
||||||
|
|
||||||
|
# Maximum number of characters on a single line.
|
||||||
|
max-line-length=80
|
||||||
|
|
||||||
|
# Maximum number of lines in a module
|
||||||
|
max-module-lines=1000
|
||||||
|
|
||||||
|
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||||
|
# tab).
|
||||||
|
indent-string=' '
|
||||||
|
|
||||||
|
|
||||||
|
[MISCELLANEOUS]
|
||||||
|
|
||||||
|
# List of note tags to take in consideration, separated by a comma.
|
||||||
|
notes=FIXME,XXX,TODO
|
||||||
|
|
||||||
|
|
||||||
|
[VARIABLES]
|
||||||
|
|
||||||
|
# Tells whether we should check for unused import in __init__ files.
|
||||||
|
init-import=no
|
||||||
|
|
||||||
|
# A regular expression matching the beginning of the name of dummy variables
|
||||||
|
# (i.e. not used).
|
||||||
|
dummy-variables-rgx=_|dummy
|
||||||
|
|
||||||
|
# List of additional names supposed to be defined in builtins. Remember that
|
||||||
|
# you should avoid to define new builtins when possible.
|
||||||
|
additional-builtins=
|
||||||
|
|
||||||
|
|
||||||
|
[BASIC]
|
||||||
|
|
||||||
|
# Required attributes for module, separated by a comma
|
||||||
|
required-attributes=
|
||||||
|
|
||||||
|
# List of builtins function names that should not be used, separated by a comma
|
||||||
|
bad-functions=apply,input
|
||||||
|
|
||||||
|
# Regular expression which should only match correct module names
|
||||||
|
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||||
|
|
||||||
|
# Regular expression which should only match correct module level names
|
||||||
|
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__)|version)$
|
||||||
|
|
||||||
|
# Regular expression which should only match correct class names
|
||||||
|
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||||
|
|
||||||
|
# Regular expression which should only match correct function names
|
||||||
|
function-rgx=[a-z_][a-z0-9_]{0,30}$
|
||||||
|
|
||||||
|
# Regular expression which should only match correct method names
|
||||||
|
method-rgx=[a-z_][a-z0-9_]{0,30}$
|
||||||
|
|
||||||
|
# Regular expression which should only match correct instance attribute names
|
||||||
|
attr-rgx=[a-z_][a-z0-9_]{0,30}$
|
||||||
|
|
||||||
|
# Regular expression which should only match correct argument names
|
||||||
|
argument-rgx=[a-z_][a-z0-9_]{0,30}$
|
||||||
|
|
||||||
|
# Regular expression which should only match correct variable names
|
||||||
|
variable-rgx=[a-z_][a-z0-9_]{0,30}$
|
||||||
|
|
||||||
|
# Regular expression which should only match correct list comprehension /
|
||||||
|
# generator expression variable names
|
||||||
|
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||||
|
|
||||||
|
# Good variable names which should always be accepted, separated by a comma
|
||||||
|
good-names=i,j,k,ex,Run,_
|
||||||
|
|
||||||
|
# Bad variable names which should always be refused, separated by a comma
|
||||||
|
bad-names=foo,bar,baz,toto,tutu,tata
|
||||||
|
|
||||||
|
# Regular expression which should only match functions or classes name which do
|
||||||
|
# not require a docstring
|
||||||
|
no-docstring-rgx=__.*__
|
||||||
|
|
||||||
|
|
||||||
|
[CLASSES]
|
||||||
|
|
||||||
|
# List of interface methods to ignore, separated by a comma. This is used for
|
||||||
|
# instance to not check methods defines in Zope's Interface base class.
|
||||||
|
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
|
||||||
|
|
||||||
|
# List of method names used to declare (i.e. assign) instance attributes.
|
||||||
|
defining-attr-methods=__init__,__new__,setUp
|
||||||
|
|
||||||
|
# List of valid names for the first argument in a class method.
|
||||||
|
valid-classmethod-first-arg=cls
|
||||||
|
|
||||||
|
|
||||||
|
[DESIGN]
|
||||||
|
|
||||||
|
# Maximum number of arguments for function / method
|
||||||
|
max-args=5
|
||||||
|
|
||||||
|
# Argument names that match this expression will be ignored. Default to name
|
||||||
|
# with leading underscore
|
||||||
|
ignored-argument-names=_.*
|
||||||
|
|
||||||
|
# Maximum number of locals for function / method body
|
||||||
|
max-locals=15
|
||||||
|
|
||||||
|
# Maximum number of return / yield for function / method body
|
||||||
|
max-returns=6
|
||||||
|
|
||||||
|
# Maximum number of branch for function / method body
|
||||||
|
max-branchs=12
|
||||||
|
|
||||||
|
# Maximum number of statements in function / method body
|
||||||
|
max-statements=50
|
||||||
|
|
||||||
|
# Maximum number of parents for a class (see R0901).
|
||||||
|
max-parents=7
|
||||||
|
|
||||||
|
# Maximum number of attributes for a class (see R0902).
|
||||||
|
max-attributes=7
|
||||||
|
|
||||||
|
# Minimum number of public methods for a class (see R0903).
|
||||||
|
min-public-methods=2
|
||||||
|
|
||||||
|
# Maximum number of public methods for a class (see R0904).
|
||||||
|
max-public-methods=20
|
||||||
|
|
||||||
|
|
||||||
|
[IMPORTS]
|
||||||
|
|
||||||
|
# Deprecated modules which should not be used, separated by a comma
|
||||||
|
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
|
||||||
|
|
||||||
|
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||||
|
# given file (report RP0402 must not be disabled)
|
||||||
|
import-graph=
|
||||||
|
|
||||||
|
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||||
|
# not be disabled)
|
||||||
|
ext-import-graph=
|
||||||
|
|
||||||
|
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||||
|
# not be disabled)
|
||||||
|
int-import-graph=
|
||||||
|
|
||||||
|
|
||||||
|
[EXCEPTIONS]
|
||||||
|
|
||||||
|
# Exceptions that will emit a warning when being caught. Defaults to
|
||||||
|
# "Exception"
|
||||||
|
overgeneral-exceptions=Exception
|
29
MANIFEST.in
29
MANIFEST.in
|
@ -1,29 +0,0 @@
|
||||||
# Root
|
|
||||||
include README.txt
|
|
||||||
include setup.cfg
|
|
||||||
include setup.py
|
|
||||||
include versioneer.py
|
|
||||||
include Makefile
|
|
||||||
include .coveragerc
|
|
||||||
include .pylintrc
|
|
||||||
include requirements.txt
|
|
||||||
|
|
||||||
# Cython files -- include .pyx source, but not the generated .c files
|
|
||||||
# (Downstream systems must have cython installed in order to build)
|
|
||||||
recursive-include nilmdb/server *.pyx *.pyxdep *.pxd
|
|
||||||
exclude nilmdb/server/interval.c
|
|
||||||
exclude nilmdb/server/rbtree.c
|
|
||||||
|
|
||||||
# Version
|
|
||||||
include nilmdb/_version.py
|
|
||||||
|
|
||||||
# Tests
|
|
||||||
recursive-include tests *.py
|
|
||||||
recursive-include tests/data *
|
|
||||||
include tests/test.order
|
|
||||||
|
|
||||||
# Docs
|
|
||||||
recursive-include docs Makefile *.md
|
|
||||||
|
|
||||||
# Extras
|
|
||||||
recursive-include extras *
|
|
30
Makefile
30
Makefile
|
@ -2,49 +2,41 @@
|
||||||
all: test
|
all: test
|
||||||
|
|
||||||
version:
|
version:
|
||||||
python3 setup.py version
|
python setup.py version
|
||||||
|
|
||||||
build:
|
build:
|
||||||
python3 setup.py build_ext --inplace
|
python setup.py build_ext --inplace
|
||||||
|
|
||||||
dist: sdist
|
dist: sdist
|
||||||
sdist:
|
sdist:
|
||||||
python3 setup.py sdist
|
python setup.py sdist
|
||||||
|
|
||||||
install:
|
install:
|
||||||
python3 setup.py install
|
python setup.py install
|
||||||
|
|
||||||
develop:
|
|
||||||
python3 setup.py develop
|
|
||||||
|
|
||||||
docs:
|
docs:
|
||||||
make -C docs
|
make -C docs
|
||||||
|
|
||||||
ctrl: flake
|
|
||||||
flake:
|
|
||||||
flake8 nilmdb
|
|
||||||
lint:
|
lint:
|
||||||
pylint3 --rcfile=setup.cfg nilmdb
|
pylint --rcfile=.pylintrc nilmdb
|
||||||
|
|
||||||
test:
|
test:
|
||||||
ifneq ($(INSIDE_EMACS),)
|
ifeq ($(INSIDE_EMACS), t)
|
||||||
# Use the slightly more flexible script
|
# Use the slightly more flexible script
|
||||||
python3 setup.py build_ext --inplace
|
python tests/runtests.py
|
||||||
python3 tests/runtests.py
|
|
||||||
else
|
else
|
||||||
# Let setup.py check dependencies, build stuff, and run the test
|
# Let setup.py check dependencies, build stuff, and run the test
|
||||||
python3 setup.py nosetests
|
python setup.py nosetests
|
||||||
endif
|
endif
|
||||||
|
|
||||||
clean::
|
clean::
|
||||||
find . -name '*.pyc' -o -name '__pycache__' -print0 | xargs -0 rm -rf
|
find . -name '*pyc' | xargs rm -f
|
||||||
rm -f .coverage
|
rm -f .coverage
|
||||||
rm -rf tests/*testdb*
|
rm -rf tests/*testdb*
|
||||||
rm -rf nilmdb.egg-info/ build/ nilmdb/server/*.so
|
rm -rf nilmdb.egg-info/ build/ nilmdb/server/*.so MANIFEST.in
|
||||||
make -C docs clean
|
make -C docs clean
|
||||||
|
|
||||||
gitclean::
|
gitclean::
|
||||||
git clean -dXf
|
git clean -dXf
|
||||||
|
|
||||||
.PHONY: all version build dist sdist install docs test
|
.PHONY: all version build dist sdist install docs lint test clean
|
||||||
.PHONY: ctrl lint flake clean gitclean
|
|
||||||
|
|
40
README.md
40
README.md
|
@ -1,40 +0,0 @@
|
||||||
# nilmdb: Non-Intrusive Load Monitor Database
|
|
||||||
by Jim Paris <jim@jtan.com>
|
|
||||||
|
|
||||||
NilmDB requires Python 3.8 or newer.
|
|
||||||
|
|
||||||
## Prerequisites:
|
|
||||||
|
|
||||||
# Runtime and build environments
|
|
||||||
sudo apt install python3 python3-dev python3-venv python3-pip
|
|
||||||
|
|
||||||
# Create a new Python virtual environment to isolate deps.
|
|
||||||
python3 -m venv ../venv
|
|
||||||
source ../venv/bin/activate # run "deactivate" to leave
|
|
||||||
|
|
||||||
# Install all Python dependencies
|
|
||||||
pip3 install -r requirements.txt
|
|
||||||
|
|
||||||
## Test:
|
|
||||||
|
|
||||||
python3 setup.py nosetests
|
|
||||||
|
|
||||||
## Install:
|
|
||||||
|
|
||||||
Install it into the virtual environment
|
|
||||||
|
|
||||||
python3 setup.py install
|
|
||||||
|
|
||||||
If you want to instead install it system-wide, you will also need to
|
|
||||||
install the requirements system-wide:
|
|
||||||
|
|
||||||
sudo pip3 install -r requirements.txt
|
|
||||||
sudo python3 setup.py install
|
|
||||||
|
|
||||||
## Usage:
|
|
||||||
|
|
||||||
nilmdb-server --help
|
|
||||||
nilmdb-fsck --help
|
|
||||||
nilmtool --help
|
|
||||||
|
|
||||||
See docs/wsgi.md for info on setting up a WSGI application in Apache.
|
|
26
README.txt
Normal file
26
README.txt
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
nilmdb: Non-Intrusive Load Monitor Database
|
||||||
|
by Jim Paris <jim@jtan.com>
|
||||||
|
|
||||||
|
Prerequisites:
|
||||||
|
|
||||||
|
# Runtime and build environments
|
||||||
|
sudo apt-get install python2.7 python2.7-dev python-setuptools cython
|
||||||
|
|
||||||
|
# Base NilmDB dependencies
|
||||||
|
sudo apt-get install python-cherrypy3 python-decorator python-simplejson
|
||||||
|
sudo apt-get install python-requests python-dateutil python-tz python-psutil
|
||||||
|
|
||||||
|
# Tools for running tests
|
||||||
|
sudo apt-get install python-nose python-coverage
|
||||||
|
|
||||||
|
Test:
|
||||||
|
python setup.py nosetests
|
||||||
|
|
||||||
|
Install:
|
||||||
|
|
||||||
|
python setup.py install
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
nilmdb-server --help
|
||||||
|
nilmtool --help
|
203
docs/design.md
203
docs/design.md
|
@ -140,7 +140,7 @@ Speed
|
||||||
|
|
||||||
- Next slowdown target is nilmdb.layout.Parser.parse().
|
- Next slowdown target is nilmdb.layout.Parser.parse().
|
||||||
- Rewrote parsers using cython and sscanf
|
- Rewrote parsers using cython and sscanf
|
||||||
- Stats (rev 10831), with `_add_interval` disabled
|
- Stats (rev 10831), with _add_interval disabled
|
||||||
|
|
||||||
layout.pyx.Parser.parse:128 6303 sec, 262k calls
|
layout.pyx.Parser.parse:128 6303 sec, 262k calls
|
||||||
layout.pyx.parse:63 13913 sec, 5.1g calls
|
layout.pyx.parse:63 13913 sec, 5.1g calls
|
||||||
|
@ -186,19 +186,6 @@ IntervalSet speed
|
||||||
- rbtree and interval converted to cython:
|
- rbtree and interval converted to cython:
|
||||||
8.4 μS, total 12 s, 134 MB RAM
|
8.4 μS, total 12 s, 134 MB RAM
|
||||||
|
|
||||||
- Would like to move Interval itself back to Python so other
|
|
||||||
non-cythonized code like client code can use it more easily.
|
|
||||||
Testing speed with just `test_interval` being tested, with
|
|
||||||
`range(5,22)`, using `/usr/bin/time -v python tests/runtests.py`,
|
|
||||||
times recorded for 2097152:
|
|
||||||
- 52ae397 (Interval in cython):
|
|
||||||
12.6133 μs each, ratio 0.866533, total 47 sec, 399 MB RAM
|
|
||||||
- 9759dcf (Interval in python):
|
|
||||||
21.2937 μs each, ratio 1.462870, total 83 sec, 1107 MB RAM
|
|
||||||
That's a huge difference! Instead, will keep Interval and DBInterval
|
|
||||||
cythonized inside nilmdb, and just have an additional copy in
|
|
||||||
nilmdb.utils for clients to use.
|
|
||||||
|
|
||||||
Layouts
|
Layouts
|
||||||
-------
|
-------
|
||||||
Current/old design has specific layouts: RawData, PrepData, RawNotchedData.
|
Current/old design has specific layouts: RawData, PrepData, RawNotchedData.
|
||||||
|
@ -279,191 +266,3 @@ Each table contains:
|
||||||
from the end of the file will not shorten it; it will only be
|
from the end of the file will not shorten it; it will only be
|
||||||
deleted when it has been fully filled and all of the data has been
|
deleted when it has been fully filled and all of the data has been
|
||||||
subsequently removed.
|
subsequently removed.
|
||||||
|
|
||||||
|
|
||||||
Rocket
|
|
||||||
------
|
|
||||||
|
|
||||||
Original design had the nilmdb.nilmdb thread (through bulkdata)
|
|
||||||
convert from on-disk layout to a Python list, and then the
|
|
||||||
nilmdb.server thread (from cherrypy) converts to ASCII. For at least
|
|
||||||
the extraction side of things, it's easy to pass the bulkdata a layout
|
|
||||||
name instead, and have it convert directly from on-disk to ASCII
|
|
||||||
format, because this conversion can then be shoved into a C module.
|
|
||||||
This module, which provides a means for converting directly from
|
|
||||||
on-disk format to ASCII or Python lists, is the "rocket" interface.
|
|
||||||
Python is still used to manage the files and figure out where the
|
|
||||||
data should go; rocket just puts binary data directly in or out of
|
|
||||||
those files at specified locations.
|
|
||||||
|
|
||||||
Before rocket, testing speed with uint16_6 data, with an end-to-end
|
|
||||||
test (extracting data with nilmtool):
|
|
||||||
|
|
||||||
- insert: 65 klines/sec
|
|
||||||
- extract: 120 klines/sec
|
|
||||||
|
|
||||||
After switching to the rocket design, but using the Python version
|
|
||||||
(pyrocket):
|
|
||||||
|
|
||||||
- insert: 57 klines/sec
|
|
||||||
- extract: 120 klines/sec
|
|
||||||
|
|
||||||
After switching to a C extension module (rocket.c)
|
|
||||||
|
|
||||||
- insert: 74 klines/sec through insert.py; 99.6 klines/sec through nilmtool
|
|
||||||
- extract: 335 klines/sec
|
|
||||||
|
|
||||||
After client block updates (described below):
|
|
||||||
|
|
||||||
- insert: 180 klines/sec through nilmtool (pre-timestamped)
|
|
||||||
- extract: 390 klines/sec through nilmtool
|
|
||||||
|
|
||||||
Using "insert --timestamp" or "extract --bare" cuts the speed in half.
|
|
||||||
|
|
||||||
Blocks versus lines
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
Generally want to avoid parsing the bulk of the data as lines if
|
|
||||||
possible, and transfer things in bigger blocks at once.
|
|
||||||
|
|
||||||
Current places where we use lines:
|
|
||||||
|
|
||||||
- All data returned by `client.stream_extract`, since it comes from
|
|
||||||
`httpclient.get_gen`, which iterates over lines. Not sure if this
|
|
||||||
should be changed, because a `nilmtool extract` is just about the
|
|
||||||
same speed as `curl -q .../stream/extract`!
|
|
||||||
|
|
||||||
- `client.StreamInserter.insert_iter` and
|
|
||||||
`client.StreamInserter.insert_line`, which should probably get
|
|
||||||
replaced with block versions. There's no real need to keep
|
|
||||||
updating the timestamp every time we get a new line of data.
|
|
||||||
|
|
||||||
- Finished. Just a single insert() that takes any length string and
|
|
||||||
does very little processing until it's time to send it to the
|
|
||||||
server.
|
|
||||||
|
|
||||||
Timestamps
|
|
||||||
----------
|
|
||||||
|
|
||||||
Timestamps are currently double-precision floats (64 bit). Since the
|
|
||||||
mantissa is 53-bit, this can only represent about 15-17 significant
|
|
||||||
figures, and microsecond Unix timestamps like 1222333444.000111 are
|
|
||||||
already 16 significant figures. Rounding is therefore an issue;
|
|
||||||
it's hard to sure that converting from ASCII, then back to ASCII,
|
|
||||||
will always give the same result.
|
|
||||||
|
|
||||||
Also, if the client provides a floating point value like 1.9999999999,
|
|
||||||
we need to be careful that we don't store it as 1.9999999999 but later
|
|
||||||
print it as 2.000000, because then round-trips change the data.
|
|
||||||
|
|
||||||
Possible solutions:
|
|
||||||
|
|
||||||
- When the client provides a floating point value to the server,
|
|
||||||
always round to the 6th decimal digit before verifying & storing.
|
|
||||||
Good for compatibility and simplicity. But still might have rounding
|
|
||||||
issues, and clients will also need to round when doing their own
|
|
||||||
verification. Having every piece of code need to know which digit
|
|
||||||
to round at is not ideal.
|
|
||||||
|
|
||||||
- Always store int64 timestamps on the server, representing
|
|
||||||
microseconds since epoch. int64 timestamps are used in all HTTP
|
|
||||||
parameters, in insert/extract ASCII strings, client API, commandline
|
|
||||||
raw timestamps, etc. Pretty big change.
|
|
||||||
|
|
||||||
This is what we'll go with...
|
|
||||||
|
|
||||||
- Client programs that interpret the timestamps as doubles instead
|
|
||||||
of ints will remain accurate until 2^53 microseconds, or year
|
|
||||||
2255.
|
|
||||||
|
|
||||||
- On insert, maybe it's OK to send floating point microsecond values
|
|
||||||
(1234567890123456.0), just to cope with clients that want to print
|
|
||||||
everything as a double. Server could try parsing as int64, and if
|
|
||||||
that fails, parse as double and truncate to int64. However, this
|
|
||||||
wouldn't catch imprecise inputs like "1.23456789012e+15". But
|
|
||||||
maybe that can just be ignored; it's likely to cause a
|
|
||||||
non-monotonic error at the client.
|
|
||||||
|
|
||||||
- Timestamps like 1234567890.123456 never show up anywhere, except
|
|
||||||
for interfacing to datetime_tz etc. Command line "raw timestamps"
|
|
||||||
are always printed as int64 values, and a new format
|
|
||||||
"@1234567890123456" is added to the parser for specifying them
|
|
||||||
exactly.
|
|
||||||
|
|
||||||
Binary interface
|
|
||||||
----------------
|
|
||||||
|
|
||||||
The ASCII interface is too slow for high-bandwidth processing, like
|
|
||||||
sinefits, prep, etc. A binary interface was added so that you can
|
|
||||||
extract the raw binary out of the bulkdata storage. This binary is
|
|
||||||
a little-endian format, e.g. in C a uint16_6 stream would be:
|
|
||||||
|
|
||||||
#include <endian.h>
|
|
||||||
#include <stdint.h>
|
|
||||||
struct {
|
|
||||||
int64_t timestamp_le;
|
|
||||||
uint16_t data_le[6];
|
|
||||||
} __attribute__((packed));
|
|
||||||
|
|
||||||
Remember to byteswap (with e.g. `letoh` in C)!
|
|
||||||
|
|
||||||
This interface is used by the new `nilmdb.client.numpyclient.NumpyClient`
|
|
||||||
class, which is a subclass of the normal `nilmcb.client.client.Client`
|
|
||||||
and has all of the same functions. It adds three new functions:
|
|
||||||
|
|
||||||
- `stream_extract_numpy` to extract data as a Numpy array
|
|
||||||
|
|
||||||
- `stream_insert_numpy` to insert data as a Numpy array
|
|
||||||
|
|
||||||
- `stream_insert_numpy_context` is the context manager for
|
|
||||||
incrementally inserting data
|
|
||||||
|
|
||||||
It is significantly faster! It is about 20 times faster to decimate a
|
|
||||||
stream with `nilm-decimate` when the filter code is using the new
|
|
||||||
binary/numpy interface.
|
|
||||||
|
|
||||||
|
|
||||||
WSGI interface & chunked requests
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
mod_wsgi requires "WSGIChunkedRequest On" to handle
|
|
||||||
"Transfer-encoding: Chunked" requests. However, `/stream/insert`
|
|
||||||
doesn't handle this correctly right now, because:
|
|
||||||
|
|
||||||
- The `cherrypy.request.body.read()` call needs to be fixed for chunked requests
|
|
||||||
|
|
||||||
- We don't want to just buffer endlessly in the server, and it will
|
|
||||||
require some thought on how to handle data in chunks (what to do about
|
|
||||||
interval endpoints).
|
|
||||||
|
|
||||||
It is probably better to just keep the endpoint management on the client
|
|
||||||
side, so leave "WSGIChunkedRequest off" for now.
|
|
||||||
|
|
||||||
|
|
||||||
Unicode & character encoding
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
Stream data is passed back and forth as raw `bytes` objects in most
|
|
||||||
places, including the `nilmdb.client` and command-line interfaces.
|
|
||||||
This is done partially for performance reasons, and partially to
|
|
||||||
support the binary insert/extract options, where character-set encoding
|
|
||||||
would not apply.
|
|
||||||
|
|
||||||
For the HTTP server, the raw bytes transferred over HTTP are interpreted
|
|
||||||
as follows:
|
|
||||||
- For `/stream/insert`, the client-provided `Content-Type` is ignored,
|
|
||||||
and the data is read as if it were `application/octet-stream`.
|
|
||||||
- For `/stream/extract`, the returned data is `application/octet-stream`.
|
|
||||||
- All other endpoints communicate via JSON, which is specified to always
|
|
||||||
be encoded as UTF-8. This includes:
|
|
||||||
- `/version`
|
|
||||||
- `/dbinfo`
|
|
||||||
- `/stream/list`
|
|
||||||
- `/stream/create`
|
|
||||||
- `/stream/destroy`
|
|
||||||
- `/stream/rename`
|
|
||||||
- `/stream/get_metadata`
|
|
||||||
- `/stream/set_metadata`
|
|
||||||
- `/stream/update_metadata`
|
|
||||||
- `/stream/remove`
|
|
||||||
- `/stream/intervals`
|
|
||||||
|
|
32
docs/wsgi.md
32
docs/wsgi.md
|
@ -1,32 +0,0 @@
|
||||||
WSGI Application in Apache
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
Install `apache2` and `libapache2-mod-wsgi`
|
|
||||||
|
|
||||||
We'll set up the database server at URL `http://myhost.com/nilmdb`.
|
|
||||||
The database will be stored in `/home/nilm/db`, and the process will
|
|
||||||
run as user `nilm`, group `nilm`.
|
|
||||||
|
|
||||||
First, create a WSGI script `/home/nilm/nilmdb.wsgi` containing:
|
|
||||||
|
|
||||||
import nilmdb.server
|
|
||||||
application = nilmdb.server.wsgi_application("/home/nilm/db", "/nilmdb")
|
|
||||||
|
|
||||||
The first parameter is the local filesystem path, and the second
|
|
||||||
parameter is the path part of the URL.
|
|
||||||
|
|
||||||
Then, set up Apache with a configuration like:
|
|
||||||
|
|
||||||
<VirtualHost>
|
|
||||||
WSGIScriptAlias /nilmdb /home/nilm/nilmdb.wsgi
|
|
||||||
WSGIDaemonProcess nilmdb-procgroup threads=32 user=nilm group=nilm
|
|
||||||
<Location /nilmdb>
|
|
||||||
WSGIProcessGroup nilmdb-procgroup
|
|
||||||
WSGIApplicationGroup nilmdb-appgroup
|
|
||||||
|
|
||||||
# Access control example:
|
|
||||||
Order deny,allow
|
|
||||||
Deny from all
|
|
||||||
Allow from 1.2.3.4
|
|
||||||
</Location>
|
|
||||||
</VirtualHost>
|
|
|
@ -1,50 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import pickle
|
|
||||||
import argparse
|
|
||||||
import fcntl
|
|
||||||
import re
|
|
||||||
from nilmdb.client.numpyclient import layout_to_dtype
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description = """
|
|
||||||
Fix database corruption where binary writes caused too much data to be
|
|
||||||
written to the file. Truncates files to the correct length. This was
|
|
||||||
fixed by b98ff1331a515ad47fd3203615e835b529b039f9.
|
|
||||||
""")
|
|
||||||
parser.add_argument("path", action="store", help='Database root path')
|
|
||||||
parser.add_argument("-y", "--yes", action="store_true", help='Fix them')
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
lock = os.path.join(args.path, "data.lock")
|
|
||||||
with open(lock, "w") as f:
|
|
||||||
fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
|
||||||
|
|
||||||
fix = {}
|
|
||||||
|
|
||||||
for (path, dirs, files) in os.walk(args.path):
|
|
||||||
if "_format" in files:
|
|
||||||
with open(os.path.join(path, "_format")) as format:
|
|
||||||
fmt = pickle.load(format)
|
|
||||||
rowsize = layout_to_dtype(fmt["layout"]).itemsize
|
|
||||||
maxsize = rowsize * fmt["rows_per_file"]
|
|
||||||
fix[path] = maxsize
|
|
||||||
if maxsize < 128000000: # sanity check
|
|
||||||
raise Exception("bad maxsize " + str(maxsize))
|
|
||||||
|
|
||||||
for fixpath in fix:
|
|
||||||
for (path, dirs, files) in os.walk(fixpath):
|
|
||||||
for fn in files:
|
|
||||||
if not re.match("^[0-9a-f]{4,}$", fn):
|
|
||||||
continue
|
|
||||||
fn = os.path.join(path, fn)
|
|
||||||
size = os.path.getsize(fn)
|
|
||||||
maxsize = fix[fixpath]
|
|
||||||
if size > maxsize:
|
|
||||||
diff = size - maxsize
|
|
||||||
print(diff, "too big:", fn)
|
|
||||||
if args.yes:
|
|
||||||
with open(fn, "a+") as dbfile:
|
|
||||||
dbfile.truncate(maxsize)
|
|
|
@ -1,20 +0,0 @@
|
||||||
# To enable bash completion:
|
|
||||||
#
|
|
||||||
# 1. Ensure python-argcomplete is installed:
|
|
||||||
# pip install argcomplete
|
|
||||||
# 2. Source this file:
|
|
||||||
# . nilmtool-bash-completion.sh
|
|
||||||
|
|
||||||
_nilmtool_argcomplete() {
|
|
||||||
local IFS=$(printf "\013")
|
|
||||||
COMPREPLY=( $(IFS="$IFS" \
|
|
||||||
COMP_LINE="$COMP_LINE" \
|
|
||||||
COMP_WORDBREAKS="$COMP_WORDBREAKS" \
|
|
||||||
COMP_POINT="$COMP_POINT" \
|
|
||||||
_ARGCOMPLETE=1 \
|
|
||||||
"$1" 8>&1 9>&2 1>/dev/null 2>/dev/null) )
|
|
||||||
if [[ $? != 0 ]]; then
|
|
||||||
unset COMPREPLY
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
complete -o nospace -F _nilmtool_argcomplete nilmtool
|
|
|
@ -1,5 +1,8 @@
|
||||||
"""Main NilmDB import"""
|
"""Main NilmDB import"""
|
||||||
|
|
||||||
from ._version import get_versions
|
from nilmdb.server import NilmDB, Server
|
||||||
|
from nilmdb.client import Client
|
||||||
|
|
||||||
|
from nilmdb._version import get_versions
|
||||||
__version__ = get_versions()['version']
|
__version__ = get_versions()['version']
|
||||||
del get_versions
|
del get_versions
|
||||||
|
|
|
@ -1,520 +1,197 @@
|
||||||
|
|
||||||
|
IN_LONG_VERSION_PY = True
|
||||||
# This file helps to compute a version number in source trees obtained from
|
# This file helps to compute a version number in source trees obtained from
|
||||||
# git-archive tarball (such as those provided by githubs download-from-tag
|
# git-archive tarball (such as those provided by githubs download-from-tag
|
||||||
# feature). Distribution tarballs (built by setup.py sdist) and build
|
# feature). Distribution tarballs (build by setup.py sdist) and build
|
||||||
# directories (produced by setup.py build) will contain a much shorter file
|
# directories (produced by setup.py build) will contain a much shorter file
|
||||||
# that just contains the computed version number.
|
# that just contains the computed version number.
|
||||||
|
|
||||||
# This file is released into the public domain. Generated by
|
# This file is released into the public domain. Generated by
|
||||||
# versioneer-0.18 (https://github.com/warner/python-versioneer)
|
# versioneer-0.7+ (https://github.com/warner/python-versioneer)
|
||||||
|
|
||||||
|
# these strings will be replaced by git during git-archive
|
||||||
|
git_refnames = "$Format:%d$"
|
||||||
|
git_full = "$Format:%H$"
|
||||||
|
|
||||||
"""Git implementation of _version.py."""
|
|
||||||
|
|
||||||
import errno
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
def run_command(args, cwd=None, verbose=False):
|
||||||
def get_keywords():
|
|
||||||
"""Get the keywords needed to look up the version information."""
|
|
||||||
# these strings will be replaced by git during git-archive.
|
|
||||||
# setup.py/versioneer.py will grep for the variable names, so they must
|
|
||||||
# each be defined on a line of their own. _version.py will just call
|
|
||||||
# get_keywords().
|
|
||||||
git_refnames = "$Format:%d$"
|
|
||||||
git_full = "$Format:%H$"
|
|
||||||
git_date = "$Format:%ci$"
|
|
||||||
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
|
|
||||||
return keywords
|
|
||||||
|
|
||||||
|
|
||||||
class VersioneerConfig:
|
|
||||||
"""Container for Versioneer configuration parameters."""
|
|
||||||
|
|
||||||
|
|
||||||
def get_config():
|
|
||||||
"""Create, populate and return the VersioneerConfig() object."""
|
|
||||||
# these strings are filled in when 'setup.py versioneer' creates
|
|
||||||
# _version.py
|
|
||||||
cfg = VersioneerConfig()
|
|
||||||
cfg.VCS = "git"
|
|
||||||
cfg.style = "pep440"
|
|
||||||
cfg.tag_prefix = "nilmdb-"
|
|
||||||
cfg.parentdir_prefix = "nilmdb-"
|
|
||||||
cfg.versionfile_source = "nilmdb/_version.py"
|
|
||||||
cfg.verbose = False
|
|
||||||
return cfg
|
|
||||||
|
|
||||||
|
|
||||||
class NotThisMethod(Exception):
|
|
||||||
"""Exception raised if a method is not valid for the current scenario."""
|
|
||||||
|
|
||||||
|
|
||||||
LONG_VERSION_PY = {}
|
|
||||||
HANDLERS = {}
|
|
||||||
|
|
||||||
|
|
||||||
def register_vcs_handler(vcs, method): # decorator
|
|
||||||
"""Decorator to mark a method as the handler for a particular VCS."""
|
|
||||||
def decorate(f):
|
|
||||||
"""Store f in HANDLERS[vcs][method]."""
|
|
||||||
if vcs not in HANDLERS:
|
|
||||||
HANDLERS[vcs] = {}
|
|
||||||
HANDLERS[vcs][method] = f
|
|
||||||
return f
|
|
||||||
return decorate
|
|
||||||
|
|
||||||
|
|
||||||
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
|
|
||||||
env=None):
|
|
||||||
"""Call the given command(s)."""
|
|
||||||
assert isinstance(commands, list)
|
|
||||||
p = None
|
|
||||||
for c in commands:
|
|
||||||
try:
|
try:
|
||||||
dispcmd = str([c] + args)
|
|
||||||
# remember shell=False, so use git.cmd on windows, not just git
|
# remember shell=False, so use git.cmd on windows, not just git
|
||||||
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
|
p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd)
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=(subprocess.PIPE if hide_stderr
|
|
||||||
else None))
|
|
||||||
break
|
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
e = sys.exc_info()[1]
|
e = sys.exc_info()[1]
|
||||||
if e.errno == errno.ENOENT:
|
|
||||||
continue
|
|
||||||
if verbose:
|
if verbose:
|
||||||
print("unable to run %s" % dispcmd)
|
print("unable to run %s" % args[0])
|
||||||
print(e)
|
print(e)
|
||||||
return None, None
|
return None
|
||||||
else:
|
|
||||||
if verbose:
|
|
||||||
print("unable to find command, tried %s" % (commands,))
|
|
||||||
return None, None
|
|
||||||
stdout = p.communicate()[0].strip()
|
stdout = p.communicate()[0].strip()
|
||||||
if sys.version_info[0] >= 3:
|
if sys.version >= '3':
|
||||||
stdout = stdout.decode()
|
stdout = stdout.decode()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
if verbose:
|
if verbose:
|
||||||
print("unable to run %s (error)" % dispcmd)
|
print("unable to run %s (error)" % args[0])
|
||||||
print("stdout was %s" % stdout)
|
return None
|
||||||
return None, p.returncode
|
return stdout
|
||||||
return stdout, p.returncode
|
|
||||||
|
|
||||||
|
|
||||||
def versions_from_parentdir(parentdir_prefix, root, verbose):
|
import sys
|
||||||
"""Try to determine the version from the parent directory name.
|
import re
|
||||||
|
import os.path
|
||||||
|
|
||||||
Source tarballs conventionally unpack into a directory that includes both
|
def get_expanded_variables(versionfile_source):
|
||||||
the project name and a version string. We will also support searching up
|
|
||||||
two directory levels for an appropriately named parent directory
|
|
||||||
"""
|
|
||||||
rootdirs = []
|
|
||||||
|
|
||||||
for i in range(3):
|
|
||||||
dirname = os.path.basename(root)
|
|
||||||
if dirname.startswith(parentdir_prefix):
|
|
||||||
return {"version": dirname[len(parentdir_prefix):],
|
|
||||||
"full-revisionid": None,
|
|
||||||
"dirty": False, "error": None, "date": None}
|
|
||||||
else:
|
|
||||||
rootdirs.append(root)
|
|
||||||
root = os.path.dirname(root) # up a level
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
print("Tried directories %s but none started with prefix %s" %
|
|
||||||
(str(rootdirs), parentdir_prefix))
|
|
||||||
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
|
|
||||||
|
|
||||||
|
|
||||||
@register_vcs_handler("git", "get_keywords")
|
|
||||||
def git_get_keywords(versionfile_abs):
|
|
||||||
"""Extract version information from the given file."""
|
|
||||||
# the code embedded in _version.py can just fetch the value of these
|
# the code embedded in _version.py can just fetch the value of these
|
||||||
# keywords. When used from setup.py, we don't want to import _version.py,
|
# variables. When used from setup.py, we don't want to import
|
||||||
# so we do it with a regexp instead. This function is not used from
|
# _version.py, so we do it with a regexp instead. This function is not
|
||||||
# _version.py.
|
# used from _version.py.
|
||||||
keywords = {}
|
variables = {}
|
||||||
try:
|
try:
|
||||||
f = open(versionfile_abs, "r")
|
for line in open(versionfile_source,"r").readlines():
|
||||||
for line in f.readlines():
|
|
||||||
if line.strip().startswith("git_refnames ="):
|
if line.strip().startswith("git_refnames ="):
|
||||||
mo = re.search(r'=\s*"(.*)"', line)
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
if mo:
|
if mo:
|
||||||
keywords["refnames"] = mo.group(1)
|
variables["refnames"] = mo.group(1)
|
||||||
if line.strip().startswith("git_full ="):
|
if line.strip().startswith("git_full ="):
|
||||||
mo = re.search(r'=\s*"(.*)"', line)
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
if mo:
|
if mo:
|
||||||
keywords["full"] = mo.group(1)
|
variables["full"] = mo.group(1)
|
||||||
if line.strip().startswith("git_date ="):
|
|
||||||
mo = re.search(r'=\s*"(.*)"', line)
|
|
||||||
if mo:
|
|
||||||
keywords["date"] = mo.group(1)
|
|
||||||
f.close()
|
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
pass
|
pass
|
||||||
return keywords
|
return variables
|
||||||
|
|
||||||
|
def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
|
||||||
@register_vcs_handler("git", "keywords")
|
refnames = variables["refnames"].strip()
|
||||||
def git_versions_from_keywords(keywords, tag_prefix, verbose):
|
|
||||||
"""Get version information from git keywords."""
|
|
||||||
if not keywords:
|
|
||||||
raise NotThisMethod("no keywords at all, weird")
|
|
||||||
date = keywords.get("date")
|
|
||||||
if date is not None:
|
|
||||||
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
|
|
||||||
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
|
|
||||||
# -like" string, which we must then edit to make compliant), because
|
|
||||||
# it's been around since git-1.5.3, and it's too difficult to
|
|
||||||
# discover which version we're using, or to work around using an
|
|
||||||
# older one.
|
|
||||||
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
|
|
||||||
refnames = keywords["refnames"].strip()
|
|
||||||
if refnames.startswith("$Format"):
|
if refnames.startswith("$Format"):
|
||||||
if verbose:
|
if verbose:
|
||||||
print("keywords are unexpanded, not using")
|
print("variables are unexpanded, not using")
|
||||||
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
|
return {} # unexpanded, so not in an unpacked git-archive tarball
|
||||||
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
||||||
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
for ref in list(refs):
|
||||||
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
if not re.search(r'\d', ref):
|
||||||
TAG = "tag: "
|
|
||||||
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
|
|
||||||
if not tags:
|
|
||||||
# Either we're using git < 1.8.3, or there really are no tags. We use
|
|
||||||
# a heuristic: assume all version tags have a digit. The old git %d
|
|
||||||
# expansion behaves like git log --decorate=short and strips out the
|
|
||||||
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
|
|
||||||
# between branches and tags. By ignoring refnames without digits, we
|
|
||||||
# filter out many common branch names like "release" and
|
|
||||||
# "stabilization", as well as "HEAD" and "master".
|
|
||||||
tags = set([r for r in refs if re.search(r'\d', r)])
|
|
||||||
if verbose:
|
if verbose:
|
||||||
print("discarding '%s', no digits" % ",".join(refs - tags))
|
print("discarding '%s', no digits" % ref)
|
||||||
|
refs.discard(ref)
|
||||||
|
# Assume all version tags have a digit. git's %d expansion
|
||||||
|
# behaves like git log --decorate=short and strips out the
|
||||||
|
# refs/heads/ and refs/tags/ prefixes that would let us
|
||||||
|
# distinguish between branches and tags. By ignoring refnames
|
||||||
|
# without digits, we filter out many common branch names like
|
||||||
|
# "release" and "stabilization", as well as "HEAD" and "master".
|
||||||
if verbose:
|
if verbose:
|
||||||
print("likely tags: %s" % ",".join(sorted(tags)))
|
print("remaining refs: %s" % ",".join(sorted(refs)))
|
||||||
for ref in sorted(tags):
|
for ref in sorted(refs):
|
||||||
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
||||||
if ref.startswith(tag_prefix):
|
if ref.startswith(tag_prefix):
|
||||||
r = ref[len(tag_prefix):]
|
r = ref[len(tag_prefix):]
|
||||||
if verbose:
|
if verbose:
|
||||||
print("picking %s" % r)
|
print("picking %s" % r)
|
||||||
return { "version": r,
|
return { "version": r,
|
||||||
"full-revisionid": keywords["full"].strip(),
|
"full": variables["full"].strip() }
|
||||||
"dirty": False, "error": None,
|
# no suitable tags, so we use the full revision id
|
||||||
"date": date}
|
|
||||||
# no suitable tags, so version is "0+unknown", but full hex is still there
|
|
||||||
if verbose:
|
if verbose:
|
||||||
print("no suitable tags, using unknown + full revision id")
|
print("no suitable tags, using full revision id")
|
||||||
return {"version": "0+unknown",
|
return { "version": variables["full"].strip(),
|
||||||
"full-revisionid": keywords["full"].strip(),
|
"full": variables["full"].strip() }
|
||||||
"dirty": False, "error": "no suitable tags", "date": None}
|
|
||||||
|
|
||||||
|
def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
|
||||||
@register_vcs_handler("git", "pieces_from_vcs")
|
# this runs 'git' from the root of the source tree. That either means
|
||||||
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
|
# someone ran a setup.py command (and this code is in versioneer.py, so
|
||||||
"""Get version from 'git describe' in the root of the source tree.
|
# IN_LONG_VERSION_PY=False, thus the containing directory is the root of
|
||||||
|
# the source tree), or someone ran a project-specific entry point (and
|
||||||
This only gets called if the git-archive 'subst' keywords were *not*
|
# this code is in _version.py, so IN_LONG_VERSION_PY=True, thus the
|
||||||
expanded, and _version.py hasn't already been rewritten with a short
|
# containing directory is somewhere deeper in the source tree). This only
|
||||||
version string, meaning we're inside a checked out source tree.
|
# gets called if the git-archive 'subst' variables were *not* expanded,
|
||||||
"""
|
# and _version.py hasn't already been rewritten with a short version
|
||||||
GITS = ["git"]
|
# string, meaning we're inside a checked out source tree.
|
||||||
if sys.platform == "win32":
|
|
||||||
GITS = ["git.cmd", "git.exe"]
|
|
||||||
|
|
||||||
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
|
|
||||||
hide_stderr=True)
|
|
||||||
if rc != 0:
|
|
||||||
if verbose:
|
|
||||||
print("Directory %s not under git control" % root)
|
|
||||||
raise NotThisMethod("'git rev-parse --git-dir' returned error")
|
|
||||||
|
|
||||||
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
|
|
||||||
# if there isn't one, this yields HEX[-dirty] (no NUM)
|
|
||||||
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
|
|
||||||
"--always", "--long",
|
|
||||||
"--match", "%s*" % tag_prefix],
|
|
||||||
cwd=root)
|
|
||||||
# --long was added in git-1.5.5
|
|
||||||
if describe_out is None:
|
|
||||||
raise NotThisMethod("'git describe' failed")
|
|
||||||
describe_out = describe_out.strip()
|
|
||||||
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
|
|
||||||
if full_out is None:
|
|
||||||
raise NotThisMethod("'git rev-parse' failed")
|
|
||||||
full_out = full_out.strip()
|
|
||||||
|
|
||||||
pieces = {}
|
|
||||||
pieces["long"] = full_out
|
|
||||||
pieces["short"] = full_out[:7] # maybe improved later
|
|
||||||
pieces["error"] = None
|
|
||||||
|
|
||||||
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
|
|
||||||
# TAG might have hyphens.
|
|
||||||
git_describe = describe_out
|
|
||||||
|
|
||||||
# look for -dirty suffix
|
|
||||||
dirty = git_describe.endswith("-dirty")
|
|
||||||
pieces["dirty"] = dirty
|
|
||||||
if dirty:
|
|
||||||
git_describe = git_describe[:git_describe.rindex("-dirty")]
|
|
||||||
|
|
||||||
# now we have TAG-NUM-gHEX or HEX
|
|
||||||
|
|
||||||
if "-" in git_describe:
|
|
||||||
# TAG-NUM-gHEX
|
|
||||||
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
|
|
||||||
if not mo:
|
|
||||||
# unparseable. Maybe git-describe is misbehaving?
|
|
||||||
pieces["error"] = ("unable to parse git-describe output: '%s'"
|
|
||||||
% describe_out)
|
|
||||||
return pieces
|
|
||||||
|
|
||||||
# tag
|
|
||||||
full_tag = mo.group(1)
|
|
||||||
if not full_tag.startswith(tag_prefix):
|
|
||||||
if verbose:
|
|
||||||
fmt = "tag '%s' doesn't start with prefix '%s'"
|
|
||||||
print(fmt % (full_tag, tag_prefix))
|
|
||||||
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
|
|
||||||
% (full_tag, tag_prefix))
|
|
||||||
return pieces
|
|
||||||
pieces["closest-tag"] = full_tag[len(tag_prefix):]
|
|
||||||
|
|
||||||
# distance: number of commits since tag
|
|
||||||
pieces["distance"] = int(mo.group(2))
|
|
||||||
|
|
||||||
# commit: short hex revision ID
|
|
||||||
pieces["short"] = mo.group(3)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# HEX: no tags
|
|
||||||
pieces["closest-tag"] = None
|
|
||||||
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
|
|
||||||
cwd=root)
|
|
||||||
pieces["distance"] = int(count_out) # total number of commits
|
|
||||||
|
|
||||||
# commit date: see ISO-8601 comment in git_versions_from_keywords()
|
|
||||||
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
|
|
||||||
cwd=root)[0].strip()
|
|
||||||
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
|
|
||||||
|
|
||||||
return pieces
|
|
||||||
|
|
||||||
|
|
||||||
def plus_or_dot(pieces):
|
|
||||||
"""Return a + if we don't already have one, else return a ."""
|
|
||||||
if "+" in pieces.get("closest-tag", ""):
|
|
||||||
return "."
|
|
||||||
return "+"
|
|
||||||
|
|
||||||
|
|
||||||
def render_pep440(pieces):
|
|
||||||
"""Build up version string, with post-release "local version identifier".
|
|
||||||
|
|
||||||
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
|
|
||||||
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
|
|
||||||
|
|
||||||
Exceptions:
|
|
||||||
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
|
|
||||||
"""
|
|
||||||
if pieces["closest-tag"]:
|
|
||||||
rendered = pieces["closest-tag"]
|
|
||||||
if pieces["distance"] or pieces["dirty"]:
|
|
||||||
rendered += plus_or_dot(pieces)
|
|
||||||
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
|
|
||||||
if pieces["dirty"]:
|
|
||||||
rendered += ".dirty"
|
|
||||||
else:
|
|
||||||
# exception #1
|
|
||||||
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
|
|
||||||
pieces["short"])
|
|
||||||
if pieces["dirty"]:
|
|
||||||
rendered += ".dirty"
|
|
||||||
return rendered
|
|
||||||
|
|
||||||
|
|
||||||
def render_pep440_pre(pieces):
|
|
||||||
"""TAG[.post.devDISTANCE] -- No -dirty.
|
|
||||||
|
|
||||||
Exceptions:
|
|
||||||
1: no tags. 0.post.devDISTANCE
|
|
||||||
"""
|
|
||||||
if pieces["closest-tag"]:
|
|
||||||
rendered = pieces["closest-tag"]
|
|
||||||
if pieces["distance"]:
|
|
||||||
rendered += ".post.dev%d" % pieces["distance"]
|
|
||||||
else:
|
|
||||||
# exception #1
|
|
||||||
rendered = "0.post.dev%d" % pieces["distance"]
|
|
||||||
return rendered
|
|
||||||
|
|
||||||
|
|
||||||
def render_pep440_post(pieces):
|
|
||||||
"""TAG[.postDISTANCE[.dev0]+gHEX] .
|
|
||||||
|
|
||||||
The ".dev0" means dirty. Note that .dev0 sorts backwards
|
|
||||||
(a dirty tree will appear "older" than the corresponding clean one),
|
|
||||||
but you shouldn't be releasing software with -dirty anyways.
|
|
||||||
|
|
||||||
Exceptions:
|
|
||||||
1: no tags. 0.postDISTANCE[.dev0]
|
|
||||||
"""
|
|
||||||
if pieces["closest-tag"]:
|
|
||||||
rendered = pieces["closest-tag"]
|
|
||||||
if pieces["distance"] or pieces["dirty"]:
|
|
||||||
rendered += ".post%d" % pieces["distance"]
|
|
||||||
if pieces["dirty"]:
|
|
||||||
rendered += ".dev0"
|
|
||||||
rendered += plus_or_dot(pieces)
|
|
||||||
rendered += "g%s" % pieces["short"]
|
|
||||||
else:
|
|
||||||
# exception #1
|
|
||||||
rendered = "0.post%d" % pieces["distance"]
|
|
||||||
if pieces["dirty"]:
|
|
||||||
rendered += ".dev0"
|
|
||||||
rendered += "+g%s" % pieces["short"]
|
|
||||||
return rendered
|
|
||||||
|
|
||||||
|
|
||||||
def render_pep440_old(pieces):
|
|
||||||
"""TAG[.postDISTANCE[.dev0]] .
|
|
||||||
|
|
||||||
The ".dev0" means dirty.
|
|
||||||
|
|
||||||
Eexceptions:
|
|
||||||
1: no tags. 0.postDISTANCE[.dev0]
|
|
||||||
"""
|
|
||||||
if pieces["closest-tag"]:
|
|
||||||
rendered = pieces["closest-tag"]
|
|
||||||
if pieces["distance"] or pieces["dirty"]:
|
|
||||||
rendered += ".post%d" % pieces["distance"]
|
|
||||||
if pieces["dirty"]:
|
|
||||||
rendered += ".dev0"
|
|
||||||
else:
|
|
||||||
# exception #1
|
|
||||||
rendered = "0.post%d" % pieces["distance"]
|
|
||||||
if pieces["dirty"]:
|
|
||||||
rendered += ".dev0"
|
|
||||||
return rendered
|
|
||||||
|
|
||||||
|
|
||||||
def render_git_describe(pieces):
|
|
||||||
"""TAG[-DISTANCE-gHEX][-dirty].
|
|
||||||
|
|
||||||
Like 'git describe --tags --dirty --always'.
|
|
||||||
|
|
||||||
Exceptions:
|
|
||||||
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
|
||||||
"""
|
|
||||||
if pieces["closest-tag"]:
|
|
||||||
rendered = pieces["closest-tag"]
|
|
||||||
if pieces["distance"]:
|
|
||||||
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
|
||||||
else:
|
|
||||||
# exception #1
|
|
||||||
rendered = pieces["short"]
|
|
||||||
if pieces["dirty"]:
|
|
||||||
rendered += "-dirty"
|
|
||||||
return rendered
|
|
||||||
|
|
||||||
|
|
||||||
def render_git_describe_long(pieces):
|
|
||||||
"""TAG-DISTANCE-gHEX[-dirty].
|
|
||||||
|
|
||||||
Like 'git describe --tags --dirty --always -long'.
|
|
||||||
The distance/hash is unconditional.
|
|
||||||
|
|
||||||
Exceptions:
|
|
||||||
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
|
||||||
"""
|
|
||||||
if pieces["closest-tag"]:
|
|
||||||
rendered = pieces["closest-tag"]
|
|
||||||
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
|
||||||
else:
|
|
||||||
# exception #1
|
|
||||||
rendered = pieces["short"]
|
|
||||||
if pieces["dirty"]:
|
|
||||||
rendered += "-dirty"
|
|
||||||
return rendered
|
|
||||||
|
|
||||||
|
|
||||||
def render(pieces, style):
|
|
||||||
"""Render the given version pieces into the requested style."""
|
|
||||||
if pieces["error"]:
|
|
||||||
return {"version": "unknown",
|
|
||||||
"full-revisionid": pieces.get("long"),
|
|
||||||
"dirty": None,
|
|
||||||
"error": pieces["error"],
|
|
||||||
"date": None}
|
|
||||||
|
|
||||||
if not style or style == "default":
|
|
||||||
style = "pep440" # the default
|
|
||||||
|
|
||||||
if style == "pep440":
|
|
||||||
rendered = render_pep440(pieces)
|
|
||||||
elif style == "pep440-pre":
|
|
||||||
rendered = render_pep440_pre(pieces)
|
|
||||||
elif style == "pep440-post":
|
|
||||||
rendered = render_pep440_post(pieces)
|
|
||||||
elif style == "pep440-old":
|
|
||||||
rendered = render_pep440_old(pieces)
|
|
||||||
elif style == "git-describe":
|
|
||||||
rendered = render_git_describe(pieces)
|
|
||||||
elif style == "git-describe-long":
|
|
||||||
rendered = render_git_describe_long(pieces)
|
|
||||||
else:
|
|
||||||
raise ValueError("unknown style '%s'" % style)
|
|
||||||
|
|
||||||
return {"version": rendered, "full-revisionid": pieces["long"],
|
|
||||||
"dirty": pieces["dirty"], "error": None,
|
|
||||||
"date": pieces.get("date")}
|
|
||||||
|
|
||||||
|
|
||||||
def get_versions():
|
|
||||||
"""Get version information or return default if unable to do so."""
|
|
||||||
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
|
|
||||||
# __file__, we can work backwards from there to the root. Some
|
|
||||||
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
|
|
||||||
# case we can only use expanded keywords.
|
|
||||||
|
|
||||||
cfg = get_config()
|
|
||||||
verbose = cfg.verbose
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
|
here = os.path.abspath(__file__)
|
||||||
verbose)
|
|
||||||
except NotThisMethod:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
root = os.path.realpath(__file__)
|
|
||||||
# versionfile_source is the relative path from the top of the source
|
|
||||||
# tree (where the .git directory might live) to this file. Invert
|
|
||||||
# this to find the root from __file__.
|
|
||||||
for i in cfg.versionfile_source.split('/'):
|
|
||||||
root = os.path.dirname(root)
|
|
||||||
except NameError:
|
except NameError:
|
||||||
return {"version": "0+unknown", "full-revisionid": None,
|
# some py2exe/bbfreeze/non-CPython implementations don't do __file__
|
||||||
"dirty": None,
|
return {} # not always correct
|
||||||
"error": "unable to find root of source tree",
|
|
||||||
"date": None}
|
|
||||||
|
|
||||||
|
# versionfile_source is the relative path from the top of the source tree
|
||||||
|
# (where the .git directory might live) to this file. Invert this to find
|
||||||
|
# the root from __file__.
|
||||||
|
root = here
|
||||||
|
if IN_LONG_VERSION_PY:
|
||||||
|
for i in range(len(versionfile_source.split("/"))):
|
||||||
|
root = os.path.dirname(root)
|
||||||
|
else:
|
||||||
|
root = os.path.dirname(here)
|
||||||
|
if not os.path.exists(os.path.join(root, ".git")):
|
||||||
|
if verbose:
|
||||||
|
print("no .git in %s" % root)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
GIT = "git"
|
||||||
|
if sys.platform == "win32":
|
||||||
|
GIT = "git.cmd"
|
||||||
|
stdout = run_command([GIT, "describe", "--tags", "--dirty", "--always"],
|
||||||
|
cwd=root)
|
||||||
|
if stdout is None:
|
||||||
|
return {}
|
||||||
|
if not stdout.startswith(tag_prefix):
|
||||||
|
if verbose:
|
||||||
|
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
|
||||||
|
return {}
|
||||||
|
tag = stdout[len(tag_prefix):]
|
||||||
|
stdout = run_command([GIT, "rev-parse", "HEAD"], cwd=root)
|
||||||
|
if stdout is None:
|
||||||
|
return {}
|
||||||
|
full = stdout.strip()
|
||||||
|
if tag.endswith("-dirty"):
|
||||||
|
full += "-dirty"
|
||||||
|
return {"version": tag, "full": full}
|
||||||
|
|
||||||
|
|
||||||
|
def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False):
|
||||||
|
if IN_LONG_VERSION_PY:
|
||||||
|
# We're running from _version.py. If it's from a source tree
|
||||||
|
# (execute-in-place), we can work upwards to find the root of the
|
||||||
|
# tree, and then check the parent directory for a version string. If
|
||||||
|
# it's in an installed application, there's no hope.
|
||||||
try:
|
try:
|
||||||
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
|
here = os.path.abspath(__file__)
|
||||||
return render(pieces, cfg.style)
|
except NameError:
|
||||||
except NotThisMethod:
|
# py2exe/bbfreeze/non-CPython don't have __file__
|
||||||
pass
|
return {} # without __file__, we have no hope
|
||||||
|
# versionfile_source is the relative path from the top of the source
|
||||||
|
# tree to _version.py. Invert this to find the root from __file__.
|
||||||
|
root = here
|
||||||
|
for i in range(len(versionfile_source.split("/"))):
|
||||||
|
root = os.path.dirname(root)
|
||||||
|
else:
|
||||||
|
# we're running from versioneer.py, which means we're running from
|
||||||
|
# the setup.py in a source tree. sys.argv[0] is setup.py in the root.
|
||||||
|
here = os.path.abspath(sys.argv[0])
|
||||||
|
root = os.path.dirname(here)
|
||||||
|
|
||||||
try:
|
# Source tarballs conventionally unpack into a directory that includes
|
||||||
if cfg.parentdir_prefix:
|
# both the project name and a version string.
|
||||||
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
|
dirname = os.path.basename(root)
|
||||||
except NotThisMethod:
|
if not dirname.startswith(parentdir_prefix):
|
||||||
pass
|
if verbose:
|
||||||
|
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
|
||||||
|
(root, dirname, parentdir_prefix))
|
||||||
|
return None
|
||||||
|
return {"version": dirname[len(parentdir_prefix):], "full": ""}
|
||||||
|
|
||||||
|
tag_prefix = "nilmdb-"
|
||||||
|
parentdir_prefix = "nilmdb-"
|
||||||
|
versionfile_source = "nilmdb/_version.py"
|
||||||
|
|
||||||
|
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
|
||||||
|
variables = { "refnames": git_refnames, "full": git_full }
|
||||||
|
ver = versions_from_expanded_variables(variables, tag_prefix, verbose)
|
||||||
|
if not ver:
|
||||||
|
ver = versions_from_vcs(tag_prefix, versionfile_source, verbose)
|
||||||
|
if not ver:
|
||||||
|
ver = versions_from_parentdir(parentdir_prefix, versionfile_source,
|
||||||
|
verbose)
|
||||||
|
if not ver:
|
||||||
|
ver = default
|
||||||
|
return ver
|
||||||
|
|
||||||
return {"version": "0+unknown", "full-revisionid": None,
|
|
||||||
"dirty": None,
|
|
||||||
"error": "unable to compute version", "date": None}
|
|
||||||
|
|
|
@ -2,29 +2,27 @@
|
||||||
|
|
||||||
"""Class for performing HTTP client requests via libcurl"""
|
"""Class for performing HTTP client requests via libcurl"""
|
||||||
|
|
||||||
import json
|
import nilmdb
|
||||||
import contextlib
|
|
||||||
|
|
||||||
import nilmdb.utils
|
import nilmdb.utils
|
||||||
import nilmdb.client.httpclient
|
import nilmdb.client.httpclient
|
||||||
from nilmdb.client.errors import ClientError
|
|
||||||
from nilmdb.utils.time import timestamp_to_string, string_to_timestamp
|
|
||||||
|
|
||||||
|
import time
|
||||||
|
import simplejson as json
|
||||||
|
import contextlib
|
||||||
|
|
||||||
|
def float_to_string(f):
|
||||||
|
"""Use repr to maintain full precision in the string output."""
|
||||||
|
return repr(float(f))
|
||||||
|
|
||||||
def extract_timestamp(line):
|
def extract_timestamp(line):
|
||||||
"""Extract just the timestamp from a line of data text"""
|
"""Extract just the timestamp from a line of data text"""
|
||||||
return string_to_timestamp(line.split()[0])
|
return float(line.split()[0])
|
||||||
|
|
||||||
|
class Client(object):
|
||||||
class Client():
|
|
||||||
"""Main client interface to the Nilm database."""
|
"""Main client interface to the Nilm database."""
|
||||||
|
|
||||||
def __init__(self, url, post_json=False):
|
def __init__(self, url):
|
||||||
"""Initialize client with given URL. If post_json is true,
|
self.http = nilmdb.client.httpclient.HTTPClient(url)
|
||||||
POST requests are sent with Content-Type 'application/json'
|
|
||||||
instead of the default 'x-www-form-urlencoded'."""
|
|
||||||
self.http = nilmdb.client.httpclient.HTTPClient(url, post_json)
|
|
||||||
self.post_json = post_json
|
|
||||||
|
|
||||||
# __enter__/__exit__ allow this class to be a context manager
|
# __enter__/__exit__ allow this class to be a context manager
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
|
@ -33,11 +31,8 @@ class Client():
|
||||||
def __exit__(self, exc_type, exc_value, traceback):
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
self.close()
|
self.close()
|
||||||
|
|
||||||
def _json_post_param(self, data):
|
def _json_param(self, data):
|
||||||
"""Return compact json-encoded version of parameter"""
|
"""Return compact json-encoded version of parameter"""
|
||||||
if self.post_json:
|
|
||||||
# If we're posting as JSON, we don't need to encode it further here
|
|
||||||
return data
|
|
||||||
return json.dumps(data, separators=(',',':'))
|
return json.dumps(data, separators=(',',':'))
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
|
@ -57,24 +52,17 @@ class Client():
|
||||||
as a dictionary."""
|
as a dictionary."""
|
||||||
return self.http.get("dbinfo")
|
return self.http.get("dbinfo")
|
||||||
|
|
||||||
def stream_list(self, path=None, layout=None, extended=False):
|
def stream_list(self, path = None, layout = None, extent = False):
|
||||||
"""Return a sorted list of [path, layout] lists. If 'path' or
|
|
||||||
'layout' are specified, only return streams that match those
|
|
||||||
exact values. If 'extended' is True, the returned lists have
|
|
||||||
extended info, e.g.: [path, layout, extent_min, extent_max,
|
|
||||||
total_rows, total_seconds."""
|
|
||||||
params = {}
|
params = {}
|
||||||
if path is not None:
|
if path is not None:
|
||||||
params["path"] = path
|
params["path"] = path
|
||||||
if layout is not None:
|
if layout is not None:
|
||||||
params["layout"] = layout
|
params["layout"] = layout
|
||||||
if extended:
|
if extent:
|
||||||
params["extended"] = 1
|
params["extent"] = 1
|
||||||
streams = self.http.get("stream/list", params)
|
return self.http.get("stream/list", params)
|
||||||
return nilmdb.utils.sort.sort_human(streams, key=lambda s: s[0])
|
|
||||||
|
|
||||||
def stream_get_metadata(self, path, keys = None):
|
def stream_get_metadata(self, path, keys = None):
|
||||||
"""Get stream metadata"""
|
|
||||||
params = { "path": path }
|
params = { "path": path }
|
||||||
if keys is not None:
|
if keys is not None:
|
||||||
params["key"] = keys
|
params["key"] = keys
|
||||||
|
@ -85,7 +73,7 @@ class Client():
|
||||||
metadata."""
|
metadata."""
|
||||||
params = {
|
params = {
|
||||||
"path": path,
|
"path": path,
|
||||||
"data": self._json_post_param(data)
|
"data": self._json_param(data)
|
||||||
}
|
}
|
||||||
return self.http.post("stream/set_metadata", params)
|
return self.http.post("stream/set_metadata", params)
|
||||||
|
|
||||||
|
@ -93,60 +81,44 @@ class Client():
|
||||||
"""Update stream metadata from a dictionary"""
|
"""Update stream metadata from a dictionary"""
|
||||||
params = {
|
params = {
|
||||||
"path": path,
|
"path": path,
|
||||||
"data": self._json_post_param(data)
|
"data": self._json_param(data)
|
||||||
}
|
}
|
||||||
return self.http.post("stream/update_metadata", params)
|
return self.http.post("stream/update_metadata", params)
|
||||||
|
|
||||||
def stream_create(self, path, layout):
|
def stream_create(self, path, layout):
|
||||||
"""Create a new stream"""
|
"""Create a new stream"""
|
||||||
params = {
|
params = { "path": path,
|
||||||
"path": path,
|
"layout" : layout }
|
||||||
"layout": layout
|
|
||||||
}
|
|
||||||
return self.http.post("stream/create", params)
|
return self.http.post("stream/create", params)
|
||||||
|
|
||||||
def stream_destroy(self, path):
|
def stream_destroy(self, path):
|
||||||
"""Delete stream. Fails if any data is still present."""
|
"""Delete stream and its contents"""
|
||||||
params = {
|
params = { "path": path }
|
||||||
"path": path
|
|
||||||
}
|
|
||||||
return self.http.post("stream/destroy", params)
|
return self.http.post("stream/destroy", params)
|
||||||
|
|
||||||
def stream_rename(self, oldpath, newpath):
|
|
||||||
"""Rename a stream."""
|
|
||||||
params = {
|
|
||||||
"oldpath": oldpath,
|
|
||||||
"newpath": newpath
|
|
||||||
}
|
|
||||||
return self.http.post("stream/rename", params)
|
|
||||||
|
|
||||||
def stream_remove(self, path, start = None, end = None):
|
def stream_remove(self, path, start = None, end = None):
|
||||||
"""Remove data from the specified time range"""
|
"""Remove data from the specified time range"""
|
||||||
params = {
|
params = {
|
||||||
"path": path
|
"path": path
|
||||||
}
|
}
|
||||||
if start is not None:
|
if start is not None:
|
||||||
params["start"] = timestamp_to_string(start)
|
params["start"] = float_to_string(start)
|
||||||
if end is not None:
|
if end is not None:
|
||||||
params["end"] = timestamp_to_string(end)
|
params["end"] = float_to_string(end)
|
||||||
total = 0
|
return self.http.post("stream/remove", params)
|
||||||
for count in self.http.post_gen("stream/remove", params):
|
|
||||||
total += int(count)
|
|
||||||
return total
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def stream_insert_context(self, path, start = None, end = None):
|
def stream_insert_context(self, path, start = None, end = None):
|
||||||
"""Return a context manager that allows data to be efficiently
|
"""Return a context manager that allows data to be efficiently
|
||||||
inserted into a stream in a piecewise manner. Data is
|
inserted into a stream in a piecewise manner. Data is be provided
|
||||||
provided as ASCII lines, and is aggregated and sent to the
|
as single lines, and is aggregated and sent to the server in larger
|
||||||
server in larger or smaller chunks as necessary. Data lines
|
chunks as necessary. Data lines must match the database layout for
|
||||||
must match the database layout for the given path, and end
|
the given path, and end with a newline.
|
||||||
with a newline.
|
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
with client.stream_insert_context('/path', start, end) as ctx:
|
with client.stream_insert_context('/path', start, end) as ctx:
|
||||||
ctx.insert('1234567890000000 1 2 3 4\\n')
|
ctx.insert_line('1234567890.0 1 2 3 4\\n')
|
||||||
ctx.insert('1234567891000000 1 2 3 4\\n')
|
ctx.insert_line('1234567891.0 1 2 3 4\\n')
|
||||||
|
|
||||||
For more details, see help for nilmdb.client.client.StreamInserter
|
For more details, see help for nilmdb.client.client.StreamInserter
|
||||||
|
|
||||||
|
@ -156,89 +128,57 @@ class Client():
|
||||||
ctx = StreamInserter(self, path, start, end)
|
ctx = StreamInserter(self, path, start, end)
|
||||||
yield ctx
|
yield ctx
|
||||||
ctx.finalize()
|
ctx.finalize()
|
||||||
ctx.destroy()
|
|
||||||
|
|
||||||
def stream_insert(self, path, data, start = None, end = None):
|
def stream_insert(self, path, data, start = None, end = None):
|
||||||
"""Insert rows of data into a stream. data should be a string
|
"""Insert rows of data into a stream. data should be an
|
||||||
or iterable that provides ASCII data that matches the database
|
iterable object that provides ASCII data that matches the
|
||||||
layout for path. Data is passed through stream_insert_context,
|
database layout for path. See stream_insert_context for
|
||||||
so it will be broken into reasonably-sized chunks and
|
details on the 'start' and 'end' parameters."""
|
||||||
start/end will be deduced if missing."""
|
|
||||||
with self.stream_insert_context(path, start, end) as ctx:
|
with self.stream_insert_context(path, start, end) as ctx:
|
||||||
if isinstance(data, bytes):
|
ctx.insert_iter(data)
|
||||||
ctx.insert(data)
|
|
||||||
else:
|
|
||||||
for chunk in data:
|
|
||||||
ctx.insert(chunk)
|
|
||||||
return ctx.last_response
|
return ctx.last_response
|
||||||
|
|
||||||
def stream_insert_block(self, path, data, start, end, binary=False):
|
def stream_insert_block(self, path, block, start, end):
|
||||||
"""Insert a single fixed block of data into the stream. It is
|
"""Insert an entire block of data into a stream. Like
|
||||||
sent directly to the server in one block with no further
|
stream_insert, except 'block' contains multiple lines of ASCII
|
||||||
processing.
|
text and is sent in one single chunk."""
|
||||||
|
params = { "path": path,
|
||||||
|
"start": float_to_string(start),
|
||||||
|
"end": float_to_string(end) }
|
||||||
|
return self.http.put("stream/insert", block, params)
|
||||||
|
|
||||||
If 'binary' is True, provide raw binary data in little-endian
|
def stream_intervals(self, path, start = None, end = None):
|
||||||
format matching the path layout, including an int64 timestamp.
|
|
||||||
Otherwise, provide ASCII data matching the layout."""
|
|
||||||
params = {
|
|
||||||
"path": path,
|
|
||||||
"start": timestamp_to_string(start),
|
|
||||||
"end": timestamp_to_string(end),
|
|
||||||
}
|
|
||||||
if binary:
|
|
||||||
params["binary"] = 1
|
|
||||||
return self.http.put("stream/insert", data, params)
|
|
||||||
|
|
||||||
def stream_intervals(self, path, start=None, end=None, diffpath=None):
|
|
||||||
"""
|
"""
|
||||||
Return a generator that yields each stream interval.
|
Return a generator that yields each stream interval.
|
||||||
|
|
||||||
If 'diffpath' is not None, yields only interval ranges that are
|
|
||||||
present in 'path' but not in 'diffpath'.
|
|
||||||
"""
|
"""
|
||||||
params = {
|
params = {
|
||||||
"path": path
|
"path": path
|
||||||
}
|
}
|
||||||
if diffpath is not None:
|
|
||||||
params["diffpath"] = diffpath
|
|
||||||
if start is not None:
|
if start is not None:
|
||||||
params["start"] = timestamp_to_string(start)
|
params["start"] = float_to_string(start)
|
||||||
if end is not None:
|
if end is not None:
|
||||||
params["end"] = timestamp_to_string(end)
|
params["end"] = float_to_string(end)
|
||||||
return self.http.get_gen("stream/intervals", params)
|
return self.http.get_gen("stream/intervals", params)
|
||||||
|
|
||||||
def stream_extract(self, path, start=None, end=None,
|
def stream_extract(self, path, start = None, end = None, count = False):
|
||||||
count=False, markup=False, binary=False):
|
|
||||||
"""
|
"""
|
||||||
Extract data from a stream. Returns a generator that yields
|
Extract data from a stream. Returns a generator that yields
|
||||||
lines of ASCII-formatted data that matches the database
|
lines of ASCII-formatted data that matches the database
|
||||||
layout for the given path.
|
layout for the given path.
|
||||||
|
|
||||||
If 'count' is True, return a count of matching data points
|
Specify count = True to return a count of matching data points
|
||||||
rather than the actual data. The output format is unchanged.
|
rather than the actual data. The output format is unchanged.
|
||||||
|
|
||||||
If 'markup' is True, include comments in the returned data
|
|
||||||
that indicate interval starts and ends.
|
|
||||||
|
|
||||||
If 'binary' is True, return chunks of raw binary data, rather
|
|
||||||
than lines of ASCII-formatted data. Raw binary data is
|
|
||||||
little-endian and matches the database types (including an
|
|
||||||
int64 timestamp).
|
|
||||||
"""
|
"""
|
||||||
params = {
|
params = {
|
||||||
"path": path,
|
"path": path,
|
||||||
}
|
}
|
||||||
if start is not None:
|
if start is not None:
|
||||||
params["start"] = timestamp_to_string(start)
|
params["start"] = float_to_string(start)
|
||||||
if end is not None:
|
if end is not None:
|
||||||
params["end"] = timestamp_to_string(end)
|
params["end"] = float_to_string(end)
|
||||||
if count:
|
if count:
|
||||||
params["count"] = 1
|
params["count"] = 1
|
||||||
if markup:
|
return self.http.get_gen("stream/extract", params)
|
||||||
params["markup"] = 1
|
|
||||||
if binary:
|
|
||||||
params["binary"] = 1
|
|
||||||
return self.http.get_gen("stream/extract", params, binary=binary)
|
|
||||||
|
|
||||||
def stream_count(self, path, start = None, end = None):
|
def stream_count(self, path, start = None, end = None):
|
||||||
"""
|
"""
|
||||||
|
@ -248,18 +188,15 @@ class Client():
|
||||||
counts = list(self.stream_extract(path, start, end, count = True))
|
counts = list(self.stream_extract(path, start, end, count = True))
|
||||||
return int(counts[0])
|
return int(counts[0])
|
||||||
|
|
||||||
|
class StreamInserter(object):
|
||||||
class StreamInserter():
|
|
||||||
"""Object returned by stream_insert_context() that manages
|
"""Object returned by stream_insert_context() that manages
|
||||||
the insertion of rows of data into a particular path.
|
the insertion of rows of data into a particular path.
|
||||||
|
|
||||||
The basic data flow is that we are filling a contiguous interval
|
The basic data flow is that we are filling a contiguous interval
|
||||||
on the server, with no gaps, that extends from timestamp 'start'
|
on the server, with no gaps, that extends from timestamp 'start'
|
||||||
to timestamp 'end'. Data timestamps satisfy 'start <= t < end'.
|
to timestamp 'end'. Data timestamps satisfy 'start <= t < end'.
|
||||||
|
Data is provided by the user one line at a time with
|
||||||
Data is provided to .insert() as ASCII formatted data separated by
|
.insert_line() or .insert_iter().
|
||||||
newlines. The chunks of data passed to .insert() do not need to
|
|
||||||
match up with the newlines; less or more than one line can be passed.
|
|
||||||
|
|
||||||
1. The first inserted line begins a new interval that starts at
|
1. The first inserted line begins a new interval that starts at
|
||||||
'start'. If 'start' is not given, it is deduced from the first
|
'start'. If 'start' is not given, it is deduced from the first
|
||||||
|
@ -272,9 +209,7 @@ class StreamInserter():
|
||||||
3. The current contiguous interval can be completed by manually
|
3. The current contiguous interval can be completed by manually
|
||||||
calling .finalize(), which the context manager will also do
|
calling .finalize(), which the context manager will also do
|
||||||
automatically. This will send any remaining data to the server,
|
automatically. This will send any remaining data to the server,
|
||||||
using the 'end' timestamp to end the interval. If no 'end'
|
using the 'end' timestamp to end the interval.
|
||||||
was provided, it is deduced from the last timestamp seen,
|
|
||||||
plus a small delta.
|
|
||||||
|
|
||||||
After a .finalize(), inserting new data goes back to step 1.
|
After a .finalize(), inserting new data goes back to step 1.
|
||||||
|
|
||||||
|
@ -283,15 +218,20 @@ class StreamInserter():
|
||||||
to change the end time for the interval.
|
to change the end time for the interval.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# See design.md for a discussion of how much data to send. This
|
# See design.md for a discussion of how much data to send.
|
||||||
# is a soft limit -- we might send up to twice as much or so
|
# These are soft limits -- actual data might be rounded up.
|
||||||
|
# We send when we have a certain amount of data queued, or
|
||||||
|
# when a certain amount of time has passed since the last send.
|
||||||
_max_data = 2 * 1024 * 1024
|
_max_data = 2 * 1024 * 1024
|
||||||
_max_data_after_send = 64 * 1024
|
_max_time = 30
|
||||||
|
|
||||||
def __init__(self, client, path, start, end):
|
# Delta to add to the final timestamp, if "end" wasn't given
|
||||||
"""'client' is the client object. 'path' is the database
|
_end_epsilon = 1e-6
|
||||||
|
|
||||||
|
def __init__(self, client, path, start = None, end = None):
|
||||||
|
"""'http' is the httpclient object. 'path' is the database
|
||||||
path to insert to. 'start' and 'end' are used for the first
|
path to insert to. 'start' and 'end' are used for the first
|
||||||
contiguous interval and may be None."""
|
contiguous interval."""
|
||||||
self.last_response = None
|
self.last_response = None
|
||||||
|
|
||||||
self._client = client
|
self._client = client
|
||||||
|
@ -302,46 +242,60 @@ class StreamInserter():
|
||||||
self._interval_start = start
|
self._interval_start = start
|
||||||
self._interval_end = end
|
self._interval_end = end
|
||||||
|
|
||||||
# Current data we're building up to send. Each string
|
# Data for the specific block we're building up to send
|
||||||
# goes into the array, and gets joined all at once.
|
|
||||||
self._block_data = []
|
self._block_data = []
|
||||||
self._block_len = 0
|
self._block_len = 0
|
||||||
|
self._block_start = None
|
||||||
|
|
||||||
self.destroyed = False
|
# Time of last request
|
||||||
|
self._last_time = time.time()
|
||||||
|
|
||||||
def destroy(self):
|
# We keep a buffer of the two most recently inserted lines.
|
||||||
"""Ensure this object can't be used again without raising
|
# Only the older one actually gets processed; the newer one
|
||||||
an error"""
|
# is used to "look-ahead" to the next timestamp if we need
|
||||||
def error(*args, **kwargs):
|
# to internally split an insertion into two requests.
|
||||||
raise Exception("don't reuse this context object")
|
self._line_old = None
|
||||||
self._send_block = self.insert = self.finalize = self.send = error
|
self._line_new = None
|
||||||
|
|
||||||
def insert(self, data):
|
def insert_iter(self, iter):
|
||||||
"""Insert a chunk of ASCII formatted data in string form. The
|
"""Insert all lines of ASCII formatted data from the given
|
||||||
overall data must consist of lines terminated by '\\n'."""
|
iterable. Lines must be terminated with '\\n'."""
|
||||||
length = len(data)
|
for line in iter:
|
||||||
maxdata = self._max_data
|
self.insert_line(line)
|
||||||
|
|
||||||
if length > maxdata:
|
def insert_line(self, line, allow_intermediate = True):
|
||||||
# This could make our buffer more than twice what we
|
"""Insert a single line of ASCII formatted data. Line
|
||||||
# wanted to send, so split it up. This is a bit
|
must be terminated with '\\n'."""
|
||||||
# inefficient, but the user really shouldn't be providing
|
if line and (len(line) < 1 or line[-1] != '\n'):
|
||||||
# this much data at once.
|
raise ValueError("lines must end in with a newline character")
|
||||||
for cut in range(0, length, maxdata):
|
|
||||||
self.insert(data[cut:(cut + maxdata)])
|
# Store this new line, but process the previous (old) one.
|
||||||
|
# This lets us "look ahead" to the next line.
|
||||||
|
self._line_old = self._line_new
|
||||||
|
self._line_new = line
|
||||||
|
if self._line_old is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Append this string to our list
|
# If starting a new block, pull out the timestamp if needed.
|
||||||
self._block_data.append(data)
|
if self._block_start is None:
|
||||||
self._block_len += length
|
if self._interval_start is not None:
|
||||||
|
# User provided a start timestamp. Use it once, then
|
||||||
|
# clear it for the next block.
|
||||||
|
self._block_start = self._interval_start
|
||||||
|
self._interval_start = None
|
||||||
|
else:
|
||||||
|
# Extract timestamp from the first row
|
||||||
|
self._block_start = extract_timestamp(self._line_old)
|
||||||
|
|
||||||
# Send the block once we have enough data
|
# Save the line
|
||||||
if self._block_len >= maxdata:
|
self._block_data.append(self._line_old)
|
||||||
self._send_block(final=False)
|
self._block_len += len(self._line_old)
|
||||||
if self._block_len >= self._max_data_after_send:
|
|
||||||
raise ValueError("too much data left over after trying"
|
if allow_intermediate:
|
||||||
" to send intermediate block; is it"
|
# Send an intermediate block to the server if needed.
|
||||||
" missing newlines or malformed?")
|
elapsed = time.time() - self._last_time
|
||||||
|
if (self._block_len > self._max_data) or (elapsed > self._max_time):
|
||||||
|
self._send_block_intermediate()
|
||||||
|
|
||||||
def update_start(self, start):
|
def update_start(self, start):
|
||||||
"""Update the start time for the next contiguous interval.
|
"""Update the start time for the next contiguous interval.
|
||||||
|
@ -364,114 +318,63 @@ class StreamInserter():
|
||||||
|
|
||||||
If more data is inserted after a finalize(), it will become
|
If more data is inserted after a finalize(), it will become
|
||||||
part of a new interval and there may be a gap left in-between."""
|
part of a new interval and there may be a gap left in-between."""
|
||||||
self._send_block(final=True)
|
# Special marker tells insert_line that this is the end
|
||||||
|
self.insert_line(None, allow_intermediate = False)
|
||||||
|
|
||||||
def send(self):
|
if self._block_len > 0:
|
||||||
"""Send any data that we might have buffered up. Does not affect
|
# We have data pending, so send the final block
|
||||||
any other treatment of timestamps or endpoints."""
|
self._send_block_final()
|
||||||
self._send_block(final=False)
|
elif None not in (self._interval_start, self._interval_end):
|
||||||
|
# We have no data, but enough information to create an
|
||||||
|
# empty interval.
|
||||||
|
self._block_start = self._interval_start
|
||||||
|
self._interval_start = None
|
||||||
|
self._send_block_final()
|
||||||
|
else:
|
||||||
|
# No data, and no timestamps to use to create an empty
|
||||||
|
# interval.
|
||||||
|
pass
|
||||||
|
|
||||||
def _get_first_noncomment(self, block):
|
# Make sure both timestamps are emptied for future intervals.
|
||||||
"""Return the (start, end) indices of the first full line in
|
|
||||||
block that isn't a comment, or raise IndexError if
|
|
||||||
there isn't one."""
|
|
||||||
start = 0
|
|
||||||
while True:
|
|
||||||
end = block.find(b'\n', start)
|
|
||||||
if end < 0:
|
|
||||||
raise IndexError
|
|
||||||
if block[start] != b'#'[0]:
|
|
||||||
return (start, (end + 1))
|
|
||||||
start = end + 1
|
|
||||||
|
|
||||||
def _get_last_noncomment(self, block):
|
|
||||||
"""Return the (start, end) indices of the last full line in
|
|
||||||
block[:length] that isn't a comment, or raise IndexError if
|
|
||||||
there isn't one."""
|
|
||||||
end = block.rfind(b'\n')
|
|
||||||
if end <= 0:
|
|
||||||
raise IndexError
|
|
||||||
while True:
|
|
||||||
start = block.rfind(b'\n', 0, end)
|
|
||||||
if block[start + 1] != b'#'[0]:
|
|
||||||
return ((start + 1), end)
|
|
||||||
if start == -1:
|
|
||||||
raise IndexError
|
|
||||||
end = start
|
|
||||||
|
|
||||||
def _send_block(self, final=False):
|
|
||||||
"""Send data currently in the block. The data sent will
|
|
||||||
consist of full lines only, so some might be left over."""
|
|
||||||
# Build the full string to send
|
|
||||||
block = b"".join(self._block_data)
|
|
||||||
|
|
||||||
start_ts = self._interval_start
|
|
||||||
if start_ts is None:
|
|
||||||
# Pull start from the first line
|
|
||||||
try:
|
|
||||||
(spos, epos) = self._get_first_noncomment(block)
|
|
||||||
start_ts = extract_timestamp(block[spos:epos])
|
|
||||||
except (ValueError, IndexError):
|
|
||||||
pass # no timestamp is OK, if we have no data
|
|
||||||
|
|
||||||
if final:
|
|
||||||
# For a final block, it must end in a newline, and the
|
|
||||||
# ending timestamp is either the user-provided end,
|
|
||||||
# or the timestamp of the last line plus epsilon.
|
|
||||||
end_ts = self._interval_end
|
|
||||||
try:
|
|
||||||
if block[-1] != b'\n'[0]:
|
|
||||||
raise ValueError("final block didn't end with a newline")
|
|
||||||
if end_ts is None:
|
|
||||||
(spos, epos) = self._get_last_noncomment(block)
|
|
||||||
end_ts = extract_timestamp(block[spos:epos])
|
|
||||||
end_ts += nilmdb.utils.time.epsilon
|
|
||||||
except (ValueError, IndexError):
|
|
||||||
pass # no timestamp is OK, if we have no data
|
|
||||||
self._block_data = []
|
|
||||||
self._block_len = 0
|
|
||||||
|
|
||||||
# Next block is completely fresh
|
|
||||||
self._interval_start = None
|
self._interval_start = None
|
||||||
self._interval_end = None
|
self._interval_end = None
|
||||||
|
|
||||||
|
def _send_block_intermediate(self):
|
||||||
|
"""Send data, when we still have more data to send.
|
||||||
|
Use the timestamp from the next line, so that the blocks
|
||||||
|
are contiguous."""
|
||||||
|
block_end = extract_timestamp(self._line_new)
|
||||||
|
if self._interval_end is not None and block_end > self._interval_end:
|
||||||
|
# Something's fishy -- the timestamp we found is after
|
||||||
|
# the user's specified end. Limit it here, and the
|
||||||
|
# server will return an error.
|
||||||
|
block_end = self._interval_end
|
||||||
|
self._send_block(block_end)
|
||||||
|
|
||||||
|
def _send_block_final(self):
|
||||||
|
"""Send data, when this is the last block for the interval.
|
||||||
|
There is no next line, so figure out the actual interval end
|
||||||
|
using interval_end or end_epsilon."""
|
||||||
|
if self._interval_end is not None:
|
||||||
|
# Use the user's specified end timestamp
|
||||||
|
block_end = self._interval_end
|
||||||
|
# Clear it in case we send more intervals in the future.
|
||||||
|
self._interval_end = None
|
||||||
else:
|
else:
|
||||||
# An intermediate block, e.g. "line1\nline2\nline3\nline4"
|
# Add an epsilon to the last timestamp we saw
|
||||||
# We need to save "line3\nline4" for the next block, and
|
block_end = extract_timestamp(self._line_old) + self._end_epsilon
|
||||||
# use the timestamp from "line3" as the ending timestamp
|
self._send_block(block_end)
|
||||||
# for this one.
|
|
||||||
try:
|
|
||||||
(spos, epos) = self._get_last_noncomment(block)
|
|
||||||
end_ts = extract_timestamp(block[spos:epos])
|
|
||||||
except (ValueError, IndexError):
|
|
||||||
# If we found no timestamp, give up; we could send this
|
|
||||||
# block later when we have more data.
|
|
||||||
return
|
|
||||||
if spos == 0:
|
|
||||||
# Not enough data to send an intermediate block
|
|
||||||
return
|
|
||||||
if self._interval_end is not None and end_ts > self._interval_end:
|
|
||||||
# User gave us bad endpoints; send it anyway, and let
|
|
||||||
# the server complain so that the error is the same
|
|
||||||
# as if we hadn't done this chunking.
|
|
||||||
end_ts = self._interval_end
|
|
||||||
self._block_data = [block[spos:]]
|
|
||||||
self._block_len = (epos - spos)
|
|
||||||
block = block[:spos]
|
|
||||||
|
|
||||||
# Next block continues where this one ended
|
def _send_block(self, block_end):
|
||||||
self._interval_start = end_ts
|
"""Send current block to the server"""
|
||||||
|
|
||||||
# Double check endpoints
|
|
||||||
if (start_ts is None or end_ts is None) or (start_ts == end_ts):
|
|
||||||
# If the block has no non-comment lines, it's OK
|
|
||||||
try:
|
|
||||||
self._get_first_noncomment(block)
|
|
||||||
except IndexError:
|
|
||||||
return
|
|
||||||
raise ClientError("have data to send, but no start/end times")
|
|
||||||
|
|
||||||
# Send it
|
|
||||||
self.last_response = self._client.stream_insert_block(
|
self.last_response = self._client.stream_insert_block(
|
||||||
self._path, block, start_ts, end_ts, binary=False)
|
self._path, "".join(self._block_data),
|
||||||
|
self._block_start, block_end)
|
||||||
|
|
||||||
return
|
# Clear out the block
|
||||||
|
self._block_data = []
|
||||||
|
self._block_len = 0
|
||||||
|
self._block_start = None
|
||||||
|
|
||||||
|
# Note when we sent it
|
||||||
|
self._last_time = time.time()
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
"""HTTP client errors"""
|
"""HTTP client errors"""
|
||||||
|
|
||||||
from nilmdb.utils.printf import sprintf
|
from nilmdb.utils.printf import *
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
class Error(Exception):
|
||||||
"""Base exception for both ClientError and ServerError responses"""
|
"""Base exception for both ClientError and ServerError responses"""
|
||||||
|
@ -10,32 +9,25 @@ class Error(Exception):
|
||||||
message = None,
|
message = None,
|
||||||
url = None,
|
url = None,
|
||||||
traceback = None):
|
traceback = None):
|
||||||
super().__init__(status)
|
Exception.__init__(self, status)
|
||||||
self.status = status # e.g. "400 Bad Request"
|
self.status = status # e.g. "400 Bad Request"
|
||||||
self.message = message # textual message from the server
|
self.message = message # textual message from the server
|
||||||
self.url = url # URL we were requesting
|
self.url = url # URL we were requesting
|
||||||
self.traceback = traceback # server traceback, if available
|
self.traceback = traceback # server traceback, if available
|
||||||
|
|
||||||
def _format_error(self, show_url):
|
def _format_error(self, show_url):
|
||||||
s = sprintf("[%s]", self.status)
|
s = sprintf("[%s]", self.status)
|
||||||
if self.message:
|
if self.message:
|
||||||
s += sprintf(" %s", self.message)
|
s += sprintf(" %s", self.message)
|
||||||
if show_url and self.url:
|
if show_url and self.url: # pragma: no cover
|
||||||
s += sprintf(" (%s)", self.url)
|
s += sprintf(" (%s)", self.url)
|
||||||
if self.traceback:
|
if self.traceback: # pragma: no cover
|
||||||
s += sprintf("\nServer traceback:\n%s", self.traceback)
|
s += sprintf("\nServer traceback:\n%s", self.traceback)
|
||||||
return s
|
return s
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self._format_error(show_url = False)
|
return self._format_error(show_url = False)
|
||||||
|
def __repr__(self): # pragma: no cover
|
||||||
def __repr__(self):
|
|
||||||
return self._format_error(show_url = True)
|
return self._format_error(show_url = True)
|
||||||
|
|
||||||
|
|
||||||
class ClientError(Error):
|
class ClientError(Error):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ServerError(Error):
|
class ServerError(Error):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,95 +1,72 @@
|
||||||
"""HTTP client library"""
|
"""HTTP client library"""
|
||||||
|
|
||||||
import json
|
import nilmdb
|
||||||
import urllib.parse
|
import nilmdb.utils
|
||||||
import requests
|
|
||||||
|
|
||||||
from nilmdb.client.errors import ClientError, ServerError, Error
|
from nilmdb.client.errors import ClientError, ServerError, Error
|
||||||
|
|
||||||
|
import simplejson as json
|
||||||
|
import urlparse
|
||||||
|
import requests
|
||||||
|
|
||||||
class HTTPClient():
|
class HTTPClient(object):
|
||||||
"""Class to manage and perform HTTP requests from the client"""
|
"""Class to manage and perform HTTP requests from the client"""
|
||||||
def __init__(self, baseurl="", post_json=False, verify_ssl=True):
|
def __init__(self, baseurl = ""):
|
||||||
"""If baseurl is supplied, all other functions that take
|
"""If baseurl is supplied, all other functions that take
|
||||||
a URL can be given a relative URL instead."""
|
a URL can be given a relative URL instead."""
|
||||||
# Verify / clean up URL
|
# Verify / clean up URL
|
||||||
reparsed = urllib.parse.urlparse(baseurl).geturl()
|
reparsed = urlparse.urlparse(baseurl).geturl()
|
||||||
if '://' not in reparsed:
|
if '://' not in reparsed:
|
||||||
reparsed = urllib.parse.urlparse("http://" + baseurl).geturl()
|
reparsed = urlparse.urlparse("http://" + baseurl).geturl()
|
||||||
self.baseurl = reparsed.rstrip('/') + '/'
|
self.baseurl = reparsed
|
||||||
|
|
||||||
# Note whether we want SSL verification
|
# Build Requests session object, enable SSL verification
|
||||||
self.verify_ssl = verify_ssl
|
self.session = requests.Session()
|
||||||
|
self.session.verify = True
|
||||||
|
|
||||||
# Saved response, so that tests can verify a few things.
|
# Saved response, so that tests can verify a few things.
|
||||||
self._last_response = {}
|
self._last_response = {}
|
||||||
|
|
||||||
# Whether to send application/json POST bodies (versus
|
|
||||||
# x-www-form-urlencoded)
|
|
||||||
self.post_json = post_json
|
|
||||||
|
|
||||||
def _handle_error(self, url, code, body):
|
def _handle_error(self, url, code, body):
|
||||||
# Default variables for exception. We use the entire body as
|
# Default variables for exception. We use the entire body as
|
||||||
# the default message, in case we can't extract it from a JSON
|
# the default message, in case we can't extract it from a JSON
|
||||||
# response.
|
# response.
|
||||||
args = {
|
args = { "url" : url,
|
||||||
"url": url,
|
|
||||||
"status" : str(code),
|
"status" : str(code),
|
||||||
"message" : body,
|
"message" : body,
|
||||||
"traceback": None
|
"traceback" : None }
|
||||||
}
|
|
||||||
try:
|
try:
|
||||||
# Fill with server-provided data if we can
|
# Fill with server-provided data if we can
|
||||||
jsonerror = json.loads(body)
|
jsonerror = json.loads(body)
|
||||||
args["status"] = jsonerror["status"]
|
args["status"] = jsonerror["status"]
|
||||||
args["message"] = jsonerror["message"]
|
args["message"] = jsonerror["message"]
|
||||||
args["traceback"] = jsonerror["traceback"]
|
args["traceback"] = jsonerror["traceback"]
|
||||||
except Exception:
|
except Exception: # pragma: no cover
|
||||||
pass
|
pass
|
||||||
if 400 <= code <= 499:
|
if code >= 400 and code <= 499:
|
||||||
raise ClientError(**args)
|
raise ClientError(**args)
|
||||||
else:
|
else: # pragma: no cover
|
||||||
if 500 <= code <= 599:
|
if code >= 500 and code <= 599:
|
||||||
if args["message"] is None:
|
if args["message"] is None:
|
||||||
args["message"] = ("(no message; try disabling "
|
args["message"] = ("(no message; try disabling " +
|
||||||
"response.stream option in "
|
"response.stream option in " +
|
||||||
"nilmdb.server for better debugging)")
|
"nilmdb.server for better debugging)")
|
||||||
raise ServerError(**args)
|
raise ServerError(**args)
|
||||||
else:
|
else:
|
||||||
raise Error(**args)
|
raise Error(**args)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
pass
|
self.session.close()
|
||||||
|
|
||||||
def _do_req(self, method, url, query_data, body_data, stream, headers):
|
def _do_req(self, method, url, query_data, body_data, stream):
|
||||||
url = urllib.parse.urljoin(self.baseurl, url)
|
url = urlparse.urljoin(self.baseurl, url)
|
||||||
try:
|
try:
|
||||||
# Create a new session, ensure we send "Connection: close",
|
response = self.session.request(method, url,
|
||||||
# and explicitly close connection after the transfer.
|
|
||||||
# This is to avoid HTTP/1.1 persistent connections
|
|
||||||
# (keepalive), because they have fundamental race
|
|
||||||
# conditions when there are delays between requests:
|
|
||||||
# a new request may be sent at the same instant that the
|
|
||||||
# server decides to timeout the connection.
|
|
||||||
session = requests.Session()
|
|
||||||
if headers is None:
|
|
||||||
headers = {}
|
|
||||||
headers["Connection"] = "close"
|
|
||||||
response = session.request(method, url,
|
|
||||||
params = query_data,
|
params = query_data,
|
||||||
data = body_data,
|
data = body_data,
|
||||||
stream=stream,
|
stream = stream)
|
||||||
headers=headers,
|
|
||||||
verify=self.verify_ssl)
|
|
||||||
|
|
||||||
# Close the connection. If it's a generator (stream =
|
|
||||||
# True), the requests library shouldn't actually close the
|
|
||||||
# HTTP connection until all data has been read from the
|
|
||||||
# response.
|
|
||||||
session.close()
|
|
||||||
except requests.RequestException as e:
|
except requests.RequestException as e:
|
||||||
raise ServerError(status = "502 Error", url = url,
|
raise ServerError(status = "502 Error", url = url,
|
||||||
message=str(e))
|
message = str(e.message))
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
self._handle_error(url, response.status_code, response.content)
|
self._handle_error(url, response.status_code, response.content)
|
||||||
self._last_response = response
|
self._last_response = response
|
||||||
|
@ -100,16 +77,15 @@ class HTTPClient():
|
||||||
return (response, False)
|
return (response, False)
|
||||||
|
|
||||||
# Normal versions that return data directly
|
# Normal versions that return data directly
|
||||||
def _req(self, method, url, query=None, body=None, headers=None):
|
def _req(self, method, url, query = None, body = None):
|
||||||
"""
|
"""
|
||||||
Make a request and return the body data as a string or parsed
|
Make a request and return the body data as a string or parsed
|
||||||
JSON object, or raise an error if it contained an error.
|
JSON object, or raise an error if it contained an error.
|
||||||
"""
|
"""
|
||||||
(response, isjson) = self._do_req(method, url, query, body,
|
(response, isjson) = self._do_req(method, url, query, body, False)
|
||||||
stream=False, headers=headers)
|
|
||||||
if isjson:
|
if isjson:
|
||||||
return json.loads(response.content)
|
return json.loads(response.content)
|
||||||
return response.text
|
return response.content
|
||||||
|
|
||||||
def get(self, url, params = None):
|
def get(self, url, params = None):
|
||||||
"""Simple GET (parameters in URL)"""
|
"""Simple GET (parameters in URL)"""
|
||||||
|
@ -117,73 +93,29 @@ class HTTPClient():
|
||||||
|
|
||||||
def post(self, url, params = None):
|
def post(self, url, params = None):
|
||||||
"""Simple POST (parameters in body)"""
|
"""Simple POST (parameters in body)"""
|
||||||
if self.post_json:
|
|
||||||
return self._req("POST", url, None,
|
|
||||||
json.dumps(params),
|
|
||||||
{'Content-type': 'application/json'})
|
|
||||||
else:
|
|
||||||
return self._req("POST", url, None, params)
|
return self._req("POST", url, None, params)
|
||||||
|
|
||||||
def put(self, url, data, params=None,
|
def put(self, url, data, params = None):
|
||||||
content_type="application/octet-stream"):
|
|
||||||
"""Simple PUT (parameters in URL, data in body)"""
|
"""Simple PUT (parameters in URL, data in body)"""
|
||||||
h = {'Content-type': content_type}
|
return self._req("PUT", url, params, data)
|
||||||
return self._req("PUT", url, query=params, body=data, headers=h)
|
|
||||||
|
|
||||||
# Generator versions that return data one line at a time.
|
# Generator versions that return data one line at a time.
|
||||||
def _req_gen(self, method, url, query=None, body=None,
|
def _req_gen(self, method, url, query = None, body = None):
|
||||||
headers=None, binary=False):
|
|
||||||
"""
|
"""
|
||||||
Make a request and return a generator that gives back strings
|
Make a request and return a generator that gives back strings
|
||||||
or JSON decoded lines of the body data, or raise an error if
|
or JSON decoded lines of the body data, or raise an error if
|
||||||
it contained an eror.
|
it contained an eror.
|
||||||
"""
|
"""
|
||||||
(response, isjson) = self._do_req(method, url, query, body,
|
(response, isjson) = self._do_req(method, url, query, body, True)
|
||||||
stream=True, headers=headers)
|
for line in response.iter_lines():
|
||||||
|
if isjson:
|
||||||
# Like the iter_lines function in Requests, but only splits on
|
|
||||||
# the specified line ending.
|
|
||||||
def lines(source, ending):
|
|
||||||
pending = None
|
|
||||||
for chunk in source:
|
|
||||||
if pending is not None:
|
|
||||||
chunk = pending + chunk
|
|
||||||
tmp = chunk.split(ending)
|
|
||||||
lines = tmp[:-1]
|
|
||||||
if chunk.endswith(ending):
|
|
||||||
pending = None
|
|
||||||
else:
|
|
||||||
pending = tmp[-1]
|
|
||||||
for line in lines:
|
|
||||||
yield line
|
|
||||||
if pending is not None:
|
|
||||||
yield pending
|
|
||||||
|
|
||||||
# Yield the chunks or lines as requested
|
|
||||||
if binary:
|
|
||||||
for chunk in response.iter_content(chunk_size=65536):
|
|
||||||
yield chunk
|
|
||||||
elif isjson:
|
|
||||||
for line in lines(response.iter_content(chunk_size=1),
|
|
||||||
ending=b'\r\n'):
|
|
||||||
yield json.loads(line)
|
yield json.loads(line)
|
||||||
else:
|
else:
|
||||||
for line in lines(response.iter_content(chunk_size=65536),
|
|
||||||
ending=b'\n'):
|
|
||||||
yield line
|
yield line
|
||||||
|
|
||||||
def get_gen(self, url, params=None, binary=False):
|
def get_gen(self, url, params = None):
|
||||||
"""Simple GET (parameters in URL) returning a generator"""
|
"""Simple GET (parameters in URL) returning a generator"""
|
||||||
return self._req_gen("GET", url, params, binary=binary)
|
return self._req_gen("GET", url, params)
|
||||||
|
|
||||||
def post_gen(self, url, params=None):
|
|
||||||
"""Simple POST (parameters in body) returning a generator"""
|
|
||||||
if self.post_json:
|
|
||||||
return self._req_gen("POST", url, None,
|
|
||||||
json.dumps(params),
|
|
||||||
{'Content-type': 'application/json'})
|
|
||||||
else:
|
|
||||||
return self._req_gen("POST", url, None, params)
|
|
||||||
|
|
||||||
# Not much use for a POST or PUT generator, since they don't
|
# Not much use for a POST or PUT generator, since they don't
|
||||||
# return much data.
|
# return much data.
|
||||||
|
|
|
@ -1,263 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""Provide a NumpyClient class that is based on normal Client, but has
|
|
||||||
additional methods for extracting and inserting data via Numpy arrays."""
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
|
|
||||||
import numpy
|
|
||||||
|
|
||||||
import nilmdb.utils
|
|
||||||
import nilmdb.client.client
|
|
||||||
import nilmdb.client.httpclient
|
|
||||||
from nilmdb.client.errors import ClientError
|
|
||||||
|
|
||||||
|
|
||||||
def layout_to_dtype(layout):
|
|
||||||
ltype = layout.split('_')[0]
|
|
||||||
lcount = int(layout.split('_')[1])
|
|
||||||
if ltype.startswith('int'):
|
|
||||||
atype = '<i' + str(int(ltype[3:]) // 8)
|
|
||||||
elif ltype.startswith('uint'):
|
|
||||||
atype = '<u' + str(int(ltype[4:]) // 8)
|
|
||||||
elif ltype.startswith('float'):
|
|
||||||
atype = '<f' + str(int(ltype[5:]) // 8)
|
|
||||||
else:
|
|
||||||
raise ValueError("bad layout")
|
|
||||||
if lcount == 1:
|
|
||||||
dtype = [('timestamp', '<i8'), ('data', atype)]
|
|
||||||
else:
|
|
||||||
dtype = [('timestamp', '<i8'), ('data', atype, lcount)]
|
|
||||||
return numpy.dtype(dtype)
|
|
||||||
|
|
||||||
|
|
||||||
class NumpyClient(nilmdb.client.client.Client):
|
|
||||||
"""Subclass of nilmdb.client.Client that adds additional methods for
|
|
||||||
extracting and inserting data via Numpy arrays."""
|
|
||||||
|
|
||||||
def _get_dtype(self, path, layout):
|
|
||||||
if layout is None:
|
|
||||||
streams = self.stream_list(path)
|
|
||||||
if len(streams) != 1:
|
|
||||||
raise ClientError("can't get layout for path: " + path)
|
|
||||||
layout = streams[0][1]
|
|
||||||
return layout_to_dtype(layout)
|
|
||||||
|
|
||||||
def stream_extract_numpy(self, path, start=None, end=None,
|
|
||||||
layout=None, maxrows=100000,
|
|
||||||
structured=False):
|
|
||||||
"""
|
|
||||||
Extract data from a stream. Returns a generator that yields
|
|
||||||
Numpy arrays of up to 'maxrows' of data each.
|
|
||||||
|
|
||||||
If 'layout' is None, it is read using stream_info.
|
|
||||||
|
|
||||||
If 'structured' is False, all data is converted to float64
|
|
||||||
and returned in a flat 2D array. Otherwise, data is returned
|
|
||||||
as a structured dtype in a 1D array.
|
|
||||||
"""
|
|
||||||
dtype = self._get_dtype(path, layout)
|
|
||||||
|
|
||||||
def to_numpy(data):
|
|
||||||
a = numpy.frombuffer(data, dtype)
|
|
||||||
if structured:
|
|
||||||
return a
|
|
||||||
return numpy.c_[a['timestamp'], a['data']]
|
|
||||||
|
|
||||||
chunks = []
|
|
||||||
total_len = 0
|
|
||||||
maxsize = dtype.itemsize * maxrows
|
|
||||||
for data in self.stream_extract(path, start, end, binary=True):
|
|
||||||
# Add this block of binary data
|
|
||||||
chunks.append(data)
|
|
||||||
total_len += len(data)
|
|
||||||
|
|
||||||
# See if we have enough to make the requested Numpy array
|
|
||||||
while total_len >= maxsize:
|
|
||||||
assembled = b"".join(chunks)
|
|
||||||
total_len -= maxsize
|
|
||||||
chunks = [assembled[maxsize:]]
|
|
||||||
block = assembled[:maxsize]
|
|
||||||
yield to_numpy(block)
|
|
||||||
|
|
||||||
if total_len:
|
|
||||||
yield to_numpy(b"".join(chunks))
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def stream_insert_numpy_context(self, path, start=None, end=None,
|
|
||||||
layout=None):
|
|
||||||
"""Return a context manager that allows data to be efficiently
|
|
||||||
inserted into a stream in a piecewise manner. Data is
|
|
||||||
provided as Numpy arrays, and is aggregated and sent to the
|
|
||||||
server in larger or smaller chunks as necessary. Data format
|
|
||||||
must match the database layout for the given path.
|
|
||||||
|
|
||||||
For more details, see help for
|
|
||||||
nilmdb.client.numpyclient.StreamInserterNumpy
|
|
||||||
|
|
||||||
If 'layout' is not None, use it as the layout rather than
|
|
||||||
querying the database.
|
|
||||||
"""
|
|
||||||
dtype = self._get_dtype(path, layout)
|
|
||||||
ctx = StreamInserterNumpy(self, path, start, end, dtype)
|
|
||||||
yield ctx
|
|
||||||
ctx.finalize()
|
|
||||||
ctx.destroy()
|
|
||||||
|
|
||||||
def stream_insert_numpy(self, path, data, start=None, end=None,
|
|
||||||
layout=None):
|
|
||||||
"""Insert data into a stream. data should be a Numpy array
|
|
||||||
which will be passed through stream_insert_numpy_context to
|
|
||||||
break it into chunks etc. See the help for that function
|
|
||||||
for details."""
|
|
||||||
with self.stream_insert_numpy_context(path, start, end, layout) as ctx:
|
|
||||||
if isinstance(data, numpy.ndarray):
|
|
||||||
ctx.insert(data)
|
|
||||||
else:
|
|
||||||
for chunk in data:
|
|
||||||
ctx.insert(chunk)
|
|
||||||
return ctx.last_response
|
|
||||||
|
|
||||||
|
|
||||||
class StreamInserterNumpy(nilmdb.client.client.StreamInserter):
|
|
||||||
"""Object returned by stream_insert_numpy_context() that manages
|
|
||||||
the insertion of rows of data into a particular path.
|
|
||||||
|
|
||||||
See help for nilmdb.client.client.StreamInserter for details.
|
|
||||||
The only difference is that, instead of ASCII formatted data,
|
|
||||||
this context manager can take Numpy arrays, which are either
|
|
||||||
structured (1D with complex dtype) or flat (2D with simple dtype).
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Soft limit of how many bytes to send per HTTP request.
|
|
||||||
_max_data = 2 * 1024 * 1024
|
|
||||||
|
|
||||||
def __init__(self, client, path, start, end, dtype):
|
|
||||||
"""
|
|
||||||
'client' is the client object. 'path' is the database path
|
|
||||||
to insert to. 'start' and 'end' are used for the first
|
|
||||||
contiguous interval and may be None. 'dtype' is the Numpy
|
|
||||||
dtype for this stream.
|
|
||||||
"""
|
|
||||||
super(StreamInserterNumpy, self).__init__(client, path, start, end)
|
|
||||||
self._dtype = dtype
|
|
||||||
|
|
||||||
# Max rows to send at once
|
|
||||||
self._max_rows = self._max_data // self._dtype.itemsize
|
|
||||||
|
|
||||||
# List of the current arrays we're building up to send
|
|
||||||
self._block_arrays = []
|
|
||||||
self._block_rows = 0
|
|
||||||
|
|
||||||
def insert(self, array):
|
|
||||||
"""Insert Numpy data, which must match the layout type."""
|
|
||||||
if not isinstance(array, numpy.ndarray):
|
|
||||||
array = numpy.array(array)
|
|
||||||
if array.ndim == 1:
|
|
||||||
# Already a structured array; just verify the type
|
|
||||||
if array.dtype != self._dtype:
|
|
||||||
raise ValueError("wrong dtype for 1D (structured) array")
|
|
||||||
elif array.ndim == 2:
|
|
||||||
# Convert to structured array
|
|
||||||
sarray = numpy.zeros(array.shape[0], dtype=self._dtype)
|
|
||||||
try:
|
|
||||||
sarray['timestamp'] = array[:, 0]
|
|
||||||
# Need the squeeze in case sarray['data'] is 1 dimensional
|
|
||||||
sarray['data'] = numpy.squeeze(array[:, 1:])
|
|
||||||
except (IndexError, ValueError):
|
|
||||||
raise ValueError("wrong number of fields for this data type")
|
|
||||||
array = sarray
|
|
||||||
else:
|
|
||||||
raise ValueError("wrong number of dimensions in array")
|
|
||||||
|
|
||||||
length = len(array)
|
|
||||||
maxrows = self._max_rows
|
|
||||||
|
|
||||||
if length == 0:
|
|
||||||
return
|
|
||||||
if length > maxrows:
|
|
||||||
# This is more than twice what we wanted to send, so split
|
|
||||||
# it up. This is a bit inefficient, but the user really
|
|
||||||
# shouldn't be providing this much data at once.
|
|
||||||
for cut in range(0, length, maxrows):
|
|
||||||
self.insert(array[cut:(cut + maxrows)])
|
|
||||||
return
|
|
||||||
|
|
||||||
# Add this array to our list
|
|
||||||
self._block_arrays.append(array)
|
|
||||||
self._block_rows += length
|
|
||||||
|
|
||||||
# Send if it's too long
|
|
||||||
if self._block_rows >= maxrows:
|
|
||||||
self._send_block(final=False)
|
|
||||||
|
|
||||||
def _send_block(self, final=False):
|
|
||||||
"""Send the data current stored up. One row might be left
|
|
||||||
over if we need its timestamp saved."""
|
|
||||||
|
|
||||||
# Build the full array to send
|
|
||||||
if self._block_rows == 0:
|
|
||||||
array = numpy.zeros(0, dtype=self._dtype)
|
|
||||||
else:
|
|
||||||
array = numpy.hstack(self._block_arrays)
|
|
||||||
|
|
||||||
# Get starting timestamp
|
|
||||||
start_ts = self._interval_start
|
|
||||||
if start_ts is None:
|
|
||||||
# Pull start from the first row
|
|
||||||
try:
|
|
||||||
start_ts = array['timestamp'][0]
|
|
||||||
except IndexError:
|
|
||||||
pass # no timestamp is OK, if we have no data
|
|
||||||
|
|
||||||
# Get ending timestamp
|
|
||||||
if final:
|
|
||||||
# For a final block, the timestamp is either the
|
|
||||||
# user-provided end, or the timestamp of the last line
|
|
||||||
# plus epsilon.
|
|
||||||
end_ts = self._interval_end
|
|
||||||
if end_ts is None:
|
|
||||||
try:
|
|
||||||
end_ts = array['timestamp'][-1]
|
|
||||||
end_ts += nilmdb.utils.time.epsilon
|
|
||||||
except IndexError:
|
|
||||||
pass # no timestamp is OK, if we have no data
|
|
||||||
self._block_arrays = []
|
|
||||||
self._block_rows = 0
|
|
||||||
|
|
||||||
# Next block is completely fresh
|
|
||||||
self._interval_start = None
|
|
||||||
self._interval_end = None
|
|
||||||
else:
|
|
||||||
# An intermediate block. We need to save the last row
|
|
||||||
# for the next block, and use its timestamp as the ending
|
|
||||||
# timestamp for this one.
|
|
||||||
if len(array) < 2:
|
|
||||||
# Not enough data to send an intermediate block
|
|
||||||
return
|
|
||||||
end_ts = array['timestamp'][-1]
|
|
||||||
if self._interval_end is not None and end_ts > self._interval_end:
|
|
||||||
# User gave us bad endpoints; send it anyway, and let
|
|
||||||
# the server complain so that the error is the same
|
|
||||||
# as if we hadn't done this chunking.
|
|
||||||
end_ts = self._interval_end
|
|
||||||
self._block_arrays = [array[-1:]]
|
|
||||||
self._block_rows = 1
|
|
||||||
array = array[:-1]
|
|
||||||
|
|
||||||
# Next block continues where this one ended
|
|
||||||
self._interval_start = end_ts
|
|
||||||
|
|
||||||
# If we have no endpoints, or equal endpoints, it's OK as long
|
|
||||||
# as there's no data to send
|
|
||||||
if (start_ts is None or end_ts is None) or (start_ts == end_ts):
|
|
||||||
if not array:
|
|
||||||
return
|
|
||||||
raise ClientError("have data to send, but invalid start/end times")
|
|
||||||
|
|
||||||
# Send it
|
|
||||||
data = array.tostring()
|
|
||||||
self.last_response = self._client.stream_insert_block(
|
|
||||||
self._path, data, start_ts, end_ts, binary=True)
|
|
||||||
|
|
||||||
return
|
|
|
@ -1,109 +1,46 @@
|
||||||
"""Command line client functionality"""
|
"""Command line client functionality"""
|
||||||
|
|
||||||
import os
|
import nilmdb
|
||||||
|
from nilmdb.utils.printf import *
|
||||||
|
from nilmdb.utils import datetime_tz
|
||||||
|
import nilmdb.utils.time
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import signal
|
import os
|
||||||
import argparse
|
import argparse
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
import nilmdb.client
|
|
||||||
from nilmdb.utils.printf import fprintf, sprintf
|
|
||||||
import nilmdb.utils.time
|
|
||||||
|
|
||||||
import argcomplete
|
|
||||||
import datetime_tz
|
|
||||||
|
|
||||||
# Valid subcommands. Defined in separate files just to break
|
# Valid subcommands. Defined in separate files just to break
|
||||||
# things up -- they're still called with Cmdline as self.
|
# things up -- they're still called with Cmdline as self.
|
||||||
subcommands = ["help", "info", "create", "rename", "list", "intervals",
|
subcommands = [ "help", "info", "create", "list", "metadata",
|
||||||
"metadata", "insert", "extract", "remove", "destroy"]
|
"insert", "extract", "remove", "destroy" ]
|
||||||
|
|
||||||
# Import the subcommand modules
|
# Import the subcommand modules
|
||||||
subcmd_mods = {}
|
subcmd_mods = {}
|
||||||
for cmd in subcommands:
|
for cmd in subcommands:
|
||||||
subcmd_mods[cmd] = __import__("nilmdb.cmdline." + cmd, fromlist = [ cmd ])
|
subcmd_mods[cmd] = __import__("nilmdb.cmdline." + cmd, fromlist = [ cmd ])
|
||||||
|
|
||||||
|
|
||||||
class JimArgumentParser(argparse.ArgumentParser):
|
class JimArgumentParser(argparse.ArgumentParser):
|
||||||
def parse_args(self, args=None, namespace=None):
|
|
||||||
# Look for --version anywhere and change it to just "nilmtool
|
|
||||||
# --version". This makes "nilmtool cmd --version" work, which
|
|
||||||
# is needed by help2man.
|
|
||||||
if "--version" in (args or sys.argv[1:]):
|
|
||||||
args = ["--version"]
|
|
||||||
return argparse.ArgumentParser.parse_args(self, args, namespace)
|
|
||||||
|
|
||||||
def error(self, message):
|
def error(self, message):
|
||||||
self.print_usage(sys.stderr)
|
self.print_usage(sys.stderr)
|
||||||
self.exit(2, sprintf("error: %s\n", message))
|
self.exit(2, sprintf("error: %s\n", message))
|
||||||
|
|
||||||
|
class Cmdline(object):
|
||||||
class Complete():
|
|
||||||
# Completion helpers, for using argcomplete (see
|
|
||||||
# extras/nilmtool-bash-completion.sh)
|
|
||||||
def escape(self, s):
|
|
||||||
quote_chars = ["\\", "\"", "'", " "]
|
|
||||||
for char in quote_chars:
|
|
||||||
s = s.replace(char, "\\" + char)
|
|
||||||
return s
|
|
||||||
|
|
||||||
def none(self, prefix, parsed_args, **kwargs):
|
|
||||||
return []
|
|
||||||
rate = none
|
|
||||||
time = none
|
|
||||||
url = none
|
|
||||||
|
|
||||||
def path(self, prefix, parsed_args, **kwargs):
|
|
||||||
client = nilmdb.client.Client(parsed_args.url)
|
|
||||||
return (self.escape(s[0])
|
|
||||||
for s in client.stream_list()
|
|
||||||
if s[0].startswith(prefix))
|
|
||||||
|
|
||||||
def layout(self, prefix, parsed_args, **kwargs):
|
|
||||||
types = ["int8", "int16", "int32", "int64",
|
|
||||||
"uint8", "uint16", "uint32", "uint64",
|
|
||||||
"float32", "float64"]
|
|
||||||
layouts = []
|
|
||||||
for i in range(1, 10):
|
|
||||||
layouts.extend([(t + "_" + str(i)) for t in types])
|
|
||||||
return (lay for lay in layouts if lay.startswith(prefix))
|
|
||||||
|
|
||||||
def meta_key(self, prefix, parsed_args, **kwargs):
|
|
||||||
return (kv.split('=')[0] for kv
|
|
||||||
in self.meta_keyval(prefix, parsed_args, **kwargs))
|
|
||||||
|
|
||||||
def meta_keyval(self, prefix, parsed_args, **kwargs):
|
|
||||||
client = nilmdb.client.Client(parsed_args.url)
|
|
||||||
path = parsed_args.path
|
|
||||||
if not path:
|
|
||||||
return []
|
|
||||||
results = []
|
|
||||||
for (k, v) in client.stream_get_metadata(path).items():
|
|
||||||
kv = self.escape(k + '=' + v)
|
|
||||||
if kv.startswith(prefix):
|
|
||||||
results.append(kv)
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
class Cmdline():
|
|
||||||
|
|
||||||
def __init__(self, argv = None):
|
def __init__(self, argv = None):
|
||||||
self.argv = argv or sys.argv[1:]
|
self.argv = argv or sys.argv[1:]
|
||||||
self.client = None
|
self.client = None
|
||||||
self.def_url = os.environ.get("NILMDB_URL", "http://localhost/nilmdb/")
|
self.def_url = os.environ.get("NILMDB_URL", "http://localhost:12380")
|
||||||
self.subcmd = {}
|
self.subcmd = {}
|
||||||
self.complete = Complete()
|
|
||||||
self.complete_output_stream = None # overridden by test suite
|
|
||||||
|
|
||||||
def arg_time(self, toparse):
|
def arg_time(self, toparse):
|
||||||
"""Parse a time string argument"""
|
"""Parse a time string argument"""
|
||||||
try:
|
try:
|
||||||
return nilmdb.utils.time.parse_time(toparse)
|
return nilmdb.utils.time.parse_time(toparse).totimestamp()
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(sprintf("%s \"%s\"",
|
raise argparse.ArgumentTypeError(sprintf("%s \"%s\"",
|
||||||
str(e), toparse))
|
str(e), toparse))
|
||||||
|
|
||||||
# Set up the parser
|
|
||||||
def parser_setup(self):
|
def parser_setup(self):
|
||||||
self.parser = JimArgumentParser(add_help = False,
|
self.parser = JimArgumentParser(add_help = False,
|
||||||
formatter_class = def_form)
|
formatter_class = def_form)
|
||||||
|
@ -111,17 +48,16 @@ class Cmdline():
|
||||||
group = self.parser.add_argument_group("General options")
|
group = self.parser.add_argument_group("General options")
|
||||||
group.add_argument("-h", "--help", action='help',
|
group.add_argument("-h", "--help", action='help',
|
||||||
help='show this help message and exit')
|
help='show this help message and exit')
|
||||||
group.add_argument("-v", "--version", action="version",
|
group.add_argument("-V", "--version", action="version",
|
||||||
version = nilmdb.__version__)
|
version = nilmdb.__version__)
|
||||||
|
|
||||||
group = self.parser.add_argument_group("Server")
|
group = self.parser.add_argument_group("Server")
|
||||||
group.add_argument("-u", "--url", action="store",
|
group.add_argument("-u", "--url", action="store",
|
||||||
default=self.def_url,
|
default=self.def_url,
|
||||||
help="NilmDB server URL (default: %(default)s)"
|
help="NilmDB server URL (default: %(default)s)")
|
||||||
).completer = self.complete.url
|
|
||||||
|
|
||||||
sub = self.parser.add_subparsers(
|
sub = self.parser.add_subparsers(
|
||||||
title="Commands", dest="command", required=True,
|
title="Commands", dest="command",
|
||||||
description="Use 'help command' or 'command --help' for more "
|
description="Use 'help command' or 'command --help' for more "
|
||||||
"details on a particular command.")
|
"details on a particular command.")
|
||||||
|
|
||||||
|
@ -136,31 +72,25 @@ class Cmdline():
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
# Set SIGPIPE to its default handler -- we don't need Python
|
|
||||||
# to catch it for us.
|
|
||||||
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
|
||||||
|
|
||||||
# Clear cached timezone, so that we can pick up timezone changes
|
# Clear cached timezone, so that we can pick up timezone changes
|
||||||
# while running this from the test suite.
|
# while running this from the test suite.
|
||||||
datetime_tz._localtz = None
|
datetime_tz._localtz = None
|
||||||
|
|
||||||
# Run parser
|
# Run parser
|
||||||
self.parser_setup()
|
self.parser_setup()
|
||||||
argcomplete.autocomplete(self.parser, exit_method=sys.exit,
|
|
||||||
output_stream=self.complete_output_stream)
|
|
||||||
self.args = self.parser.parse_args(self.argv)
|
self.args = self.parser.parse_args(self.argv)
|
||||||
|
|
||||||
# Run arg verify handler if there is one
|
# Run arg verify handler if there is one
|
||||||
if "verify" in self.args:
|
if "verify" in self.args:
|
||||||
self.args.verify(self)
|
self.args.verify(self)
|
||||||
|
|
||||||
self.client = nilmdb.client.Client(self.args.url)
|
self.client = nilmdb.Client(self.args.url)
|
||||||
|
|
||||||
# Make a test connection to make sure things work,
|
# Make a test connection to make sure things work,
|
||||||
# unless the particular command requests that we don't.
|
# unless the particular command requests that we don't.
|
||||||
if "no_test_connect" not in self.args:
|
if "no_test_connect" not in self.args:
|
||||||
try:
|
try:
|
||||||
self.client.version()
|
server_version = self.client.version()
|
||||||
except nilmdb.client.Error as e:
|
except nilmdb.client.Error as e:
|
||||||
self.die("error connecting to server: %s", str(e))
|
self.die("error connecting to server: %s", str(e))
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
from argparse import RawDescriptionHelpFormatter as raw_form
|
from nilmdb.utils.printf import *
|
||||||
|
import nilmdb
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
|
|
||||||
|
from argparse import RawDescriptionHelpFormatter as raw_form
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("create", help="Create a new stream",
|
cmd = sub.add_parser("create", help="Create a new stream",
|
||||||
|
@ -22,14 +23,11 @@ Layout types are of the format: type_count
|
||||||
cmd.set_defaults(handler = cmd_create)
|
cmd.set_defaults(handler = cmd_create)
|
||||||
group = cmd.add_argument_group("Required arguments")
|
group = cmd.add_argument_group("Required arguments")
|
||||||
group.add_argument("path",
|
group.add_argument("path",
|
||||||
help="Path (in database) of new stream, e.g. /foo/bar",
|
help="Path (in database) of new stream, e.g. /foo/bar")
|
||||||
).completer = self.complete.path
|
|
||||||
group.add_argument("layout",
|
group.add_argument("layout",
|
||||||
help="Layout type for new stream, e.g. float32_8",
|
help="Layout type for new stream, e.g. float32_8")
|
||||||
).completer = self.complete.layout
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_create(self):
|
def cmd_create(self):
|
||||||
"""Create new stream"""
|
"""Create new stream"""
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,52 +1,26 @@
|
||||||
import fnmatch
|
from nilmdb.utils.printf import *
|
||||||
|
import nilmdb
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
|
||||||
|
|
||||||
from nilmdb.utils.printf import printf
|
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
|
|
||||||
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("destroy", help="Delete a stream and all data",
|
cmd = sub.add_parser("destroy", help="Delete a stream and all data",
|
||||||
formatter_class = def_form,
|
formatter_class = def_form,
|
||||||
description="""
|
description="""
|
||||||
Destroy the stream at the specified path.
|
Destroy the stream at the specified path. All
|
||||||
The stream must be empty. All metadata
|
data and metadata related to the stream is
|
||||||
related to the stream is permanently deleted.
|
permanently deleted.
|
||||||
|
|
||||||
Wildcards and multiple paths are supported.
|
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(handler = cmd_destroy)
|
cmd.set_defaults(handler = cmd_destroy)
|
||||||
group = cmd.add_argument_group("Options")
|
|
||||||
group.add_argument("-R", "--remove", action="store_true",
|
|
||||||
help="Remove all data before destroying stream")
|
|
||||||
group.add_argument("-q", "--quiet", action="store_true",
|
|
||||||
help="Don't display names when destroying "
|
|
||||||
"multiple paths")
|
|
||||||
group = cmd.add_argument_group("Required arguments")
|
group = cmd.add_argument_group("Required arguments")
|
||||||
group.add_argument("path", nargs='+',
|
group.add_argument("path",
|
||||||
help="Path of the stream to delete, e.g. /foo/bar/*",
|
help="Path of the stream to delete, e.g. /foo/bar")
|
||||||
).completer = self.complete.path
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_destroy(self):
|
def cmd_destroy(self):
|
||||||
"""Destroy stream"""
|
"""Destroy stream"""
|
||||||
streams = [s[0] for s in self.client.stream_list()]
|
|
||||||
paths = []
|
|
||||||
for path in self.args.path:
|
|
||||||
new = fnmatch.filter(streams, path)
|
|
||||||
if not new:
|
|
||||||
self.die("error: no stream matched path: %s", path)
|
|
||||||
paths.extend(new)
|
|
||||||
|
|
||||||
for path in paths:
|
|
||||||
if not self.args.quiet and len(paths) > 1:
|
|
||||||
printf("Destroying %s\n", path)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if self.args.remove:
|
self.client.stream_destroy(self.args.path)
|
||||||
self.client.stream_remove(path)
|
|
||||||
self.client.stream_destroy(path)
|
|
||||||
except nilmdb.client.ClientError as e:
|
except nilmdb.client.ClientError as e:
|
||||||
self.die("error destroying stream: %s", str(e))
|
self.die("error destroying stream: %s", str(e))
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
import sys
|
from __future__ import print_function
|
||||||
|
from nilmdb.utils.printf import *
|
||||||
from nilmdb.utils.printf import printf
|
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("extract", help="Extract data",
|
cmd = sub.add_parser("extract", help="Extract data",
|
||||||
description="""
|
description="""
|
||||||
|
@ -14,44 +12,31 @@ def setup(self, sub):
|
||||||
|
|
||||||
group = cmd.add_argument_group("Data selection")
|
group = cmd.add_argument_group("Data selection")
|
||||||
group.add_argument("path",
|
group.add_argument("path",
|
||||||
help="Path of stream, e.g. /foo/bar",
|
help="Path of stream, e.g. /foo/bar")
|
||||||
).completer = self.complete.path
|
|
||||||
group.add_argument("-s", "--start", required=True,
|
group.add_argument("-s", "--start", required=True,
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Starting timestamp (free-form, inclusive)",
|
help="Starting timestamp (free-form, inclusive)")
|
||||||
).completer = self.complete.time
|
|
||||||
group.add_argument("-e", "--end", required=True,
|
group.add_argument("-e", "--end", required=True,
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Ending timestamp (free-form, noninclusive)",
|
help="Ending timestamp (free-form, noninclusive)")
|
||||||
).completer = self.complete.time
|
|
||||||
|
|
||||||
group = cmd.add_argument_group("Output format")
|
group = cmd.add_argument_group("Output format")
|
||||||
group.add_argument("-B", "--binary", action="store_true",
|
|
||||||
help="Raw binary output")
|
|
||||||
group.add_argument("-b", "--bare", action="store_true",
|
group.add_argument("-b", "--bare", action="store_true",
|
||||||
help="Exclude timestamps from output lines")
|
help="Exclude timestamps from output lines")
|
||||||
group.add_argument("-a", "--annotate", action="store_true",
|
group.add_argument("-a", "--annotate", action="store_true",
|
||||||
help="Include comments with some information "
|
help="Include comments with some information "
|
||||||
"about the stream")
|
"about the stream")
|
||||||
group.add_argument("-m", "--markup", action="store_true",
|
|
||||||
help="Include comments with interval starts and ends")
|
|
||||||
group.add_argument("-T", "--timestamp-raw", action="store_true",
|
group.add_argument("-T", "--timestamp-raw", action="store_true",
|
||||||
help="Show raw timestamps in annotated information")
|
help="Show raw timestamps in annotated information")
|
||||||
group.add_argument("-c", "--count", action="store_true",
|
group.add_argument("-c", "--count", action="store_true",
|
||||||
help="Just output a count of matched data points")
|
help="Just output a count of matched data points")
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_extract_verify(self):
|
def cmd_extract_verify(self):
|
||||||
|
if self.args.start is not None and self.args.end is not None:
|
||||||
if self.args.start > self.args.end:
|
if self.args.start > self.args.end:
|
||||||
self.parser.error("start is after end")
|
self.parser.error("start is after end")
|
||||||
|
|
||||||
if self.args.binary:
|
|
||||||
if (self.args.bare or self.args.annotate or self.args.markup or
|
|
||||||
self.args.timestamp_raw or self.args.count):
|
|
||||||
self.parser.error("--binary cannot be combined with other options")
|
|
||||||
|
|
||||||
|
|
||||||
def cmd_extract(self):
|
def cmd_extract(self):
|
||||||
streams = self.client.stream_list(self.args.path)
|
streams = self.client.stream_list(self.args.path)
|
||||||
if len(streams) != 1:
|
if len(streams) != 1:
|
||||||
|
@ -59,9 +44,9 @@ def cmd_extract(self):
|
||||||
layout = streams[0][1]
|
layout = streams[0][1]
|
||||||
|
|
||||||
if self.args.timestamp_raw:
|
if self.args.timestamp_raw:
|
||||||
time_string = nilmdb.utils.time.timestamp_to_string
|
time_string = repr
|
||||||
else:
|
else:
|
||||||
time_string = nilmdb.utils.time.timestamp_to_human
|
time_string = nilmdb.utils.time.format_time
|
||||||
|
|
||||||
if self.args.annotate:
|
if self.args.annotate:
|
||||||
printf("# path: %s\n", self.args.path)
|
printf("# path: %s\n", self.args.path)
|
||||||
|
@ -70,23 +55,15 @@ def cmd_extract(self):
|
||||||
printf("# end: %s\n", time_string(self.args.end))
|
printf("# end: %s\n", time_string(self.args.end))
|
||||||
|
|
||||||
printed = False
|
printed = False
|
||||||
if self.args.binary:
|
|
||||||
printer = sys.stdout.buffer.write
|
|
||||||
else:
|
|
||||||
printer = lambda x: print(x.decode('utf-8'))
|
|
||||||
bare = self.args.bare
|
|
||||||
count = self.args.count
|
|
||||||
for dataline in self.client.stream_extract(self.args.path,
|
for dataline in self.client.stream_extract(self.args.path,
|
||||||
self.args.start,
|
self.args.start,
|
||||||
self.args.end,
|
self.args.end,
|
||||||
self.args.count,
|
self.args.count):
|
||||||
self.args.markup,
|
if self.args.bare and not self.args.count:
|
||||||
self.args.binary):
|
|
||||||
if bare and not count:
|
|
||||||
# Strip timestamp (first element). Doesn't make sense
|
# Strip timestamp (first element). Doesn't make sense
|
||||||
# if we are only returning a count.
|
# if we are only returning a count.
|
||||||
dataline = b' '.join(dataline.split(b' ')[1:])
|
dataline = ' '.join(dataline.split(' ')[1:])
|
||||||
printer(dataline)
|
print(dataline)
|
||||||
printed = True
|
printed = True
|
||||||
if not printed:
|
if not printed:
|
||||||
if self.args.annotate:
|
if self.args.annotate:
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
import argparse
|
from nilmdb.utils.printf import *
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("help", help="Show detailed help for a command",
|
cmd = sub.add_parser("help", help="Show detailed help for a command",
|
||||||
|
@ -15,7 +17,6 @@ def setup(self, sub):
|
||||||
help=argparse.SUPPRESS)
|
help=argparse.SUPPRESS)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_help(self):
|
def cmd_help(self):
|
||||||
if self.args.command in self.subcmd:
|
if self.args.command in self.subcmd:
|
||||||
self.subcmd[self.args.command].print_help()
|
self.subcmd[self.args.command].print_help()
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
import nilmdb
|
||||||
|
from nilmdb.utils.printf import *
|
||||||
import nilmdb.client
|
|
||||||
from nilmdb.utils.printf import printf
|
|
||||||
from nilmdb.utils import human_size
|
from nilmdb.utils import human_size
|
||||||
|
|
||||||
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("info", help="Server information",
|
cmd = sub.add_parser("info", help="Server information",
|
||||||
|
@ -15,7 +14,6 @@ def setup(self, sub):
|
||||||
cmd.set_defaults(handler = cmd_info)
|
cmd.set_defaults(handler = cmd_info)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_info(self):
|
def cmd_info(self):
|
||||||
"""Print info about the server"""
|
"""Print info about the server"""
|
||||||
printf("Client version: %s\n", nilmdb.__version__)
|
printf("Client version: %s\n", nilmdb.__version__)
|
||||||
|
@ -23,8 +21,5 @@ def cmd_info(self):
|
||||||
printf("Server URL: %s\n", self.client.geturl())
|
printf("Server URL: %s\n", self.client.geturl())
|
||||||
dbinfo = self.client.dbinfo()
|
dbinfo = self.client.dbinfo()
|
||||||
printf("Server database path: %s\n", dbinfo["path"])
|
printf("Server database path: %s\n", dbinfo["path"])
|
||||||
for (desc, field) in [("used by NilmDB", "size"),
|
printf("Server database size: %s\n", human_size(dbinfo["size"]))
|
||||||
("used by other", "other"),
|
printf("Server database free space: %s\n", human_size(dbinfo["free"]))
|
||||||
("reserved", "reserved"),
|
|
||||||
("free", "free")]:
|
|
||||||
printf("Server disk space %s: %s\n", desc, human_size(dbinfo[field]))
|
|
||||||
|
|
|
@ -1,129 +1,99 @@
|
||||||
import sys
|
from nilmdb.utils.printf import *
|
||||||
|
import nilmdb
|
||||||
from nilmdb.utils.printf import printf
|
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
import nilmdb.utils.timestamper as timestamper
|
import nilmdb.utils.timestamper as timestamper
|
||||||
import nilmdb.utils.time
|
import nilmdb.utils.time
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("insert", help="Insert data",
|
cmd = sub.add_parser("insert", help="Insert data",
|
||||||
description="""
|
description="""
|
||||||
Insert data into a stream.
|
Insert data into a stream.
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(verify=cmd_insert_verify,
|
cmd.set_defaults(handler = cmd_insert)
|
||||||
handler=cmd_insert)
|
|
||||||
cmd.add_argument("-q", "--quiet", action='store_true',
|
cmd.add_argument("-q", "--quiet", action='store_true',
|
||||||
help='suppress unnecessary messages')
|
help='suppress unnecessary messages')
|
||||||
|
|
||||||
group = cmd.add_argument_group("Timestamping",
|
group = cmd.add_argument_group("Timestamping",
|
||||||
description="""
|
description="""
|
||||||
To add timestamps, specify the
|
If timestamps are already provided in the
|
||||||
arguments --timestamp and --rate,
|
input date, use --none. Otherwise,
|
||||||
and provide a starting time.
|
provide --start, or use --filename to
|
||||||
|
try to deduce timestamps from the file.
|
||||||
|
|
||||||
|
Set the TZ environment variable to change
|
||||||
|
the default timezone.
|
||||||
""")
|
""")
|
||||||
|
|
||||||
group.add_argument("-t", "--timestamp", action="store_true",
|
|
||||||
help="Add timestamps to each line")
|
|
||||||
group.add_argument("-r", "--rate", type=float,
|
group.add_argument("-r", "--rate", type=float,
|
||||||
help="Data rate, in Hz",
|
help="""
|
||||||
).completer = self.complete.rate
|
If needed, rate in Hz (required when using --start)
|
||||||
|
""")
|
||||||
group = cmd.add_argument_group("Start time",
|
|
||||||
description="""
|
|
||||||
Start time may be manually
|
|
||||||
specified with --start, or guessed
|
|
||||||
from the filenames using
|
|
||||||
--filename. Set the TZ environment
|
|
||||||
variable to change the default
|
|
||||||
timezone.""")
|
|
||||||
|
|
||||||
exc = group.add_mutually_exclusive_group()
|
exc = group.add_mutually_exclusive_group()
|
||||||
exc.add_argument("-s", "--start",
|
exc.add_argument("-s", "--start",
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Starting timestamp (free-form)",
|
help="Starting timestamp (free-form)")
|
||||||
).completer = self.complete.time
|
|
||||||
exc.add_argument("-f", "--filename", action="store_true",
|
exc.add_argument("-f", "--filename", action="store_true",
|
||||||
help="Use filename to determine start time")
|
help="""
|
||||||
|
Use filenames to determine start time
|
||||||
group = cmd.add_argument_group("End time",
|
(default, if filenames are provided)
|
||||||
description="""
|
""")
|
||||||
End time for the overall stream.
|
exc.add_argument("-n", "--none", action="store_true",
|
||||||
(required when not using --timestamp).
|
help="Timestamp is already present, don't add one")
|
||||||
Set the TZ environment
|
|
||||||
variable to change the default
|
|
||||||
timezone.""")
|
|
||||||
group.add_argument("-e", "--end",
|
|
||||||
metavar="TIME", type=self.arg_time,
|
|
||||||
help="Ending timestamp (free-form)",
|
|
||||||
).completer = self.complete.time
|
|
||||||
|
|
||||||
group = cmd.add_argument_group("Required parameters")
|
group = cmd.add_argument_group("Required parameters")
|
||||||
group.add_argument("path",
|
group.add_argument("path",
|
||||||
help="Path of stream, e.g. /foo/bar",
|
help="Path of stream, e.g. /foo/bar")
|
||||||
).completer = self.complete.path
|
group.add_argument("file", nargs="*", default=['-'],
|
||||||
group.add_argument("file", nargs='?', default='-',
|
help="File(s) to insert (default: - (stdin))")
|
||||||
help="File to insert (default: - (stdin))")
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_insert_verify(self):
|
|
||||||
if self.args.timestamp:
|
|
||||||
if not self.args.rate:
|
|
||||||
self.die("error: --rate is needed, but was not specified")
|
|
||||||
if not self.args.filename and self.args.start is None:
|
|
||||||
self.die("error: need --start or --filename "
|
|
||||||
"when adding timestamps")
|
|
||||||
else:
|
|
||||||
if self.args.start is None or self.args.end is None:
|
|
||||||
self.die("error: when not adding timestamps, --start and "
|
|
||||||
"--end are required")
|
|
||||||
|
|
||||||
|
|
||||||
def cmd_insert(self):
|
def cmd_insert(self):
|
||||||
# Find requested stream
|
# Find requested stream
|
||||||
streams = self.client.stream_list(self.args.path)
|
streams = self.client.stream_list(self.args.path)
|
||||||
if len(streams) != 1:
|
if len(streams) != 1:
|
||||||
self.die("error getting stream info for path %s", self.args.path)
|
self.die("error getting stream info for path %s", self.args.path)
|
||||||
|
|
||||||
arg = self.args
|
if self.args.start and len(self.args.file) != 1:
|
||||||
|
self.die("error: --start can only be used with one input file")
|
||||||
|
|
||||||
try:
|
for filename in self.args.file:
|
||||||
filename = arg.file
|
|
||||||
if filename == '-':
|
if filename == '-':
|
||||||
infile = sys.stdin.buffer
|
infile = sys.stdin
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
infile = open(filename, "rb")
|
infile = open(filename, "r")
|
||||||
except IOError:
|
except IOError:
|
||||||
self.die("error opening input file %s", filename)
|
self.die("error opening input file %s", filename)
|
||||||
|
|
||||||
if arg.start is None:
|
# Build a timestamper for this file
|
||||||
|
if self.args.none:
|
||||||
|
ts = timestamper.TimestamperNull(infile)
|
||||||
|
else:
|
||||||
|
if self.args.start:
|
||||||
|
start = self.args.start
|
||||||
|
else:
|
||||||
try:
|
try:
|
||||||
arg.start = nilmdb.utils.time.parse_time(filename)
|
start = nilmdb.utils.time.parse_time(filename)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
self.die("error extracting start time from filename '%s'",
|
self.die("error extracting time from filename '%s'",
|
||||||
filename)
|
filename)
|
||||||
|
|
||||||
if arg.timestamp:
|
if not self.args.rate:
|
||||||
data = timestamper.TimestamperRate(infile, arg.start, arg.rate)
|
self.die("error: --rate is needed, but was not specified")
|
||||||
else:
|
rate = self.args.rate
|
||||||
data = iter(lambda: infile.read(1048576), b'')
|
|
||||||
|
ts = timestamper.TimestamperRate(infile, start, rate)
|
||||||
|
|
||||||
# Print info
|
# Print info
|
||||||
if not arg.quiet:
|
if not self.args.quiet:
|
||||||
printf("Input file: %s\n", filename)
|
printf("Input file: %s\n", filename)
|
||||||
printf(" Start time: %s\n",
|
printf("Timestamper: %s\n", str(ts))
|
||||||
nilmdb.utils.time.timestamp_to_human(arg.start))
|
|
||||||
if arg.end:
|
|
||||||
printf(" End time: %s\n",
|
|
||||||
nilmdb.utils.time.timestamp_to_human(arg.end))
|
|
||||||
if arg.timestamp:
|
|
||||||
printf("Timestamper: %s\n", str(data))
|
|
||||||
|
|
||||||
# Insert the data
|
# Insert the data
|
||||||
self.client.stream_insert(arg.path, data, arg.start, arg.end)
|
try:
|
||||||
|
self.client.stream_insert(self.args.path, ts)
|
||||||
except nilmdb.client.Error as e:
|
except nilmdb.client.Error as e:
|
||||||
# TODO: It would be nice to be able to offer better errors
|
# TODO: It would be nice to be able to offer better errors
|
||||||
# here, particularly in the case of overlap, which just shows
|
# here, particularly in the case of overlap, which just shows
|
||||||
|
|
|
@ -1,76 +0,0 @@
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
|
||||||
|
|
||||||
from nilmdb.utils.printf import printf
|
|
||||||
import nilmdb.utils.time
|
|
||||||
from nilmdb.utils.interval import Interval
|
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
|
||||||
cmd = sub.add_parser("intervals", help="List intervals",
|
|
||||||
formatter_class=def_form,
|
|
||||||
description="""
|
|
||||||
List intervals in a stream, similar to
|
|
||||||
'list --detail path'.
|
|
||||||
|
|
||||||
If '--diff diffpath' is provided, only
|
|
||||||
interval ranges that are present in 'path'
|
|
||||||
and not present in 'diffpath' are printed.
|
|
||||||
""")
|
|
||||||
cmd.set_defaults(verify=cmd_intervals_verify,
|
|
||||||
handler=cmd_intervals)
|
|
||||||
|
|
||||||
group = cmd.add_argument_group("Stream selection")
|
|
||||||
group.add_argument("path", metavar="PATH",
|
|
||||||
help="List intervals for this path",
|
|
||||||
).completer = self.complete.path
|
|
||||||
group.add_argument("-d", "--diff", metavar="PATH",
|
|
||||||
help="Subtract intervals from this path",
|
|
||||||
).completer = self.complete.path
|
|
||||||
|
|
||||||
group = cmd.add_argument_group("Interval details")
|
|
||||||
group.add_argument("-s", "--start",
|
|
||||||
metavar="TIME", type=self.arg_time,
|
|
||||||
help="Starting timestamp for intervals "
|
|
||||||
"(free-form, inclusive)",
|
|
||||||
).completer = self.complete.time
|
|
||||||
group.add_argument("-e", "--end",
|
|
||||||
metavar="TIME", type=self.arg_time,
|
|
||||||
help="Ending timestamp for intervals "
|
|
||||||
"(free-form, noninclusive)",
|
|
||||||
).completer = self.complete.time
|
|
||||||
|
|
||||||
group = cmd.add_argument_group("Misc options")
|
|
||||||
group.add_argument("-T", "--timestamp-raw", action="store_true",
|
|
||||||
help="Show raw timestamps when printing times")
|
|
||||||
group.add_argument("-o", "--optimize", action="store_true",
|
|
||||||
help="Optimize (merge adjacent) intervals")
|
|
||||||
|
|
||||||
return cmd
|
|
||||||
|
|
||||||
|
|
||||||
def cmd_intervals_verify(self):
|
|
||||||
if self.args.start is not None and self.args.end is not None:
|
|
||||||
if self.args.start >= self.args.end:
|
|
||||||
self.parser.error("start must precede end")
|
|
||||||
|
|
||||||
|
|
||||||
def cmd_intervals(self):
|
|
||||||
"""List intervals in a stream"""
|
|
||||||
if self.args.timestamp_raw:
|
|
||||||
time_string = nilmdb.utils.time.timestamp_to_string
|
|
||||||
else:
|
|
||||||
time_string = nilmdb.utils.time.timestamp_to_human
|
|
||||||
|
|
||||||
try:
|
|
||||||
intervals = (Interval(start, end) for (start, end) in
|
|
||||||
self.client.stream_intervals(self.args.path,
|
|
||||||
self.args.start,
|
|
||||||
self.args.end,
|
|
||||||
self.args.diff))
|
|
||||||
if self.args.optimize:
|
|
||||||
intervals = nilmdb.utils.interval.optimize(intervals)
|
|
||||||
for i in intervals:
|
|
||||||
printf("[ %s -> %s ]\n", time_string(i.start), time_string(i.end))
|
|
||||||
|
|
||||||
except nilmdb.client.ClientError as e:
|
|
||||||
self.die("error listing intervals: %s", str(e))
|
|
|
@ -1,105 +1,96 @@
|
||||||
import fnmatch
|
from nilmdb.utils.printf import *
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
|
||||||
|
|
||||||
from nilmdb.utils.printf import printf
|
|
||||||
import nilmdb.utils.time
|
import nilmdb.utils.time
|
||||||
|
|
||||||
|
import fnmatch
|
||||||
|
import argparse
|
||||||
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("list", help="List streams",
|
cmd = sub.add_parser("list", help="List streams",
|
||||||
formatter_class = def_form,
|
formatter_class = def_form,
|
||||||
description="""
|
description="""
|
||||||
List streams available in the database,
|
List streams available in the database,
|
||||||
optionally filtering by path. Wildcards
|
optionally filtering by layout or path. Wildcards
|
||||||
are accepted; non-matching paths or wildcards
|
are accepted.
|
||||||
are ignored.
|
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(verify = cmd_list_verify,
|
cmd.set_defaults(verify = cmd_list_verify,
|
||||||
handler = cmd_list)
|
handler = cmd_list)
|
||||||
|
|
||||||
group = cmd.add_argument_group("Stream filtering")
|
group = cmd.add_argument_group("Stream filtering")
|
||||||
group.add_argument("path", metavar="PATH", default=["*"], nargs='*',
|
group.add_argument("-p", "--path", metavar="PATH", default="*",
|
||||||
).completer = self.complete.path
|
help="Match only this path (-p can be omitted)")
|
||||||
|
group.add_argument("path_positional", default="*",
|
||||||
|
nargs="?", help=argparse.SUPPRESS)
|
||||||
|
group.add_argument("-l", "--layout", default="*",
|
||||||
|
help="Match only this stream layout")
|
||||||
|
|
||||||
group = cmd.add_argument_group("Interval info")
|
group = cmd.add_argument_group("Interval extent")
|
||||||
group.add_argument("-E", "--ext", action="store_true",
|
group.add_argument("-E", "--extent", action="store_true",
|
||||||
help="Show extended stream info, like interval "
|
help="Show min/max timestamps in this stream")
|
||||||
"extents and row count")
|
|
||||||
|
|
||||||
group = cmd.add_argument_group("Interval details")
|
group = cmd.add_argument_group("Interval details")
|
||||||
group.add_argument("-d", "--detail", action="store_true",
|
group.add_argument("-d", "--detail", action="store_true",
|
||||||
help="Show available data time intervals")
|
help="Show available data time intervals")
|
||||||
group.add_argument("-s", "--start",
|
group.add_argument("-s", "--start",
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Starting timestamp for intervals "
|
help="Starting timestamp (free-form, inclusive)")
|
||||||
"(free-form, inclusive)",
|
|
||||||
).completer = self.complete.time
|
|
||||||
group.add_argument("-e", "--end",
|
group.add_argument("-e", "--end",
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Ending timestamp for intervals "
|
help="Ending timestamp (free-form, noninclusive)")
|
||||||
"(free-form, noninclusive)",
|
|
||||||
).completer = self.complete.time
|
|
||||||
|
|
||||||
group = cmd.add_argument_group("Misc options")
|
group = cmd.add_argument_group("Misc options")
|
||||||
group.add_argument("-T", "--timestamp-raw", action="store_true",
|
group.add_argument("-T", "--timestamp-raw", action="store_true",
|
||||||
help="Show raw timestamps when printing times")
|
help="Show raw timestamps in time intervals or extents")
|
||||||
group.add_argument("-l", "--layout", action="store_true",
|
|
||||||
help="Show layout type next to path name")
|
|
||||||
group.add_argument("-n", "--no-decim", action="store_true",
|
|
||||||
help="Skip paths containing \"~decim-\"")
|
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_list_verify(self):
|
def cmd_list_verify(self):
|
||||||
|
# A hidden "path_positional" argument lets the user leave off the
|
||||||
|
# "-p" when specifying the path. Handle it here.
|
||||||
|
got_opt = self.args.path != "*"
|
||||||
|
got_pos = self.args.path_positional != "*"
|
||||||
|
if got_pos:
|
||||||
|
if got_opt:
|
||||||
|
self.parser.error("too many paths specified")
|
||||||
|
else:
|
||||||
|
self.args.path = self.args.path_positional
|
||||||
|
|
||||||
if self.args.start is not None and self.args.end is not None:
|
if self.args.start is not None and self.args.end is not None:
|
||||||
if self.args.start >= self.args.end:
|
if self.args.start >= self.args.end:
|
||||||
self.parser.error("start must precede end")
|
self.parser.error("start must precede end")
|
||||||
|
|
||||||
if self.args.start is not None or self.args.end is not None:
|
if self.args.start is not None or self.args.end is not None:
|
||||||
if not self.args.detail:
|
if not self.args.detail:
|
||||||
self.parser.error("--start and --end only make sense "
|
self.parser.error("--start and --end only make sense with --detail")
|
||||||
"with --detail")
|
|
||||||
|
|
||||||
|
|
||||||
def cmd_list(self):
|
def cmd_list(self):
|
||||||
"""List available streams"""
|
"""List available streams"""
|
||||||
streams = self.client.stream_list(extended=True)
|
streams = self.client.stream_list(extent = True)
|
||||||
|
|
||||||
if self.args.timestamp_raw:
|
if self.args.timestamp_raw:
|
||||||
time_string = nilmdb.utils.time.timestamp_to_string
|
time_string = repr
|
||||||
else:
|
else:
|
||||||
time_string = nilmdb.utils.time.timestamp_to_human
|
time_string = nilmdb.utils.time.format_time
|
||||||
|
|
||||||
for argpath in self.args.path:
|
for (path, layout, extent_min, extent_max) in streams:
|
||||||
for stream in streams:
|
if not (fnmatch.fnmatch(path, self.args.path) and
|
||||||
(path, layout, int_min, int_max, rows, time) = stream[:6]
|
fnmatch.fnmatch(layout, self.args.layout)):
|
||||||
if not fnmatch.fnmatch(path, argpath):
|
|
||||||
continue
|
|
||||||
if self.args.no_decim and "~decim-" in path:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.args.layout:
|
|
||||||
printf("%s %s\n", path, layout)
|
printf("%s %s\n", path, layout)
|
||||||
else:
|
|
||||||
printf("%s\n", path)
|
|
||||||
|
|
||||||
if self.args.ext:
|
if self.args.extent:
|
||||||
if int_min is None or int_max is None:
|
if extent_min is None or extent_max is None:
|
||||||
printf(" interval extents: (no data)\n")
|
printf(" extent: (no data)\n")
|
||||||
else:
|
else:
|
||||||
printf(" interval extents: %s -> %s\n",
|
printf(" extent: %s -> %s\n",
|
||||||
time_string(int_min), time_string(int_max))
|
time_string(extent_min), time_string(extent_max))
|
||||||
printf(" total data: %d rows, %.6f seconds\n",
|
|
||||||
rows or 0,
|
|
||||||
nilmdb.utils.time.timestamp_to_seconds(time or 0))
|
|
||||||
|
|
||||||
if self.args.detail:
|
if self.args.detail:
|
||||||
printed = False
|
printed = False
|
||||||
for (start, end) in self.client.stream_intervals(
|
for (start, end) in self.client.stream_intervals(
|
||||||
path, self.args.start, self.args.end):
|
path, self.args.start, self.args.end):
|
||||||
printf(" [ %s -> %s ]\n",
|
printf(" [ %s -> %s ]\n", time_string(start), time_string(end))
|
||||||
time_string(start), time_string(end))
|
|
||||||
printed = True
|
printed = True
|
||||||
if not printed:
|
if not printed:
|
||||||
printf(" (no intervals)\n")
|
printf(" (no intervals)\n")
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
from nilmdb.utils.printf import printf
|
from nilmdb.utils.printf import *
|
||||||
import nilmdb
|
import nilmdb
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("metadata", help="Get or set stream metadata",
|
cmd = sub.add_parser("metadata", help="Get or set stream metadata",
|
||||||
description="""
|
description="""
|
||||||
|
@ -10,34 +9,25 @@ def setup(self, sub):
|
||||||
a stream.
|
a stream.
|
||||||
""",
|
""",
|
||||||
usage="%(prog)s path [-g [key ...] | "
|
usage="%(prog)s path [-g [key ...] | "
|
||||||
"-s key=value [...] | -u key=value [...]] | "
|
"-s key=value [...] | -u key=value [...]]")
|
||||||
"-d [key ...]")
|
|
||||||
cmd.set_defaults(handler = cmd_metadata)
|
cmd.set_defaults(handler = cmd_metadata)
|
||||||
|
|
||||||
group = cmd.add_argument_group("Required arguments")
|
group = cmd.add_argument_group("Required arguments")
|
||||||
group.add_argument("path",
|
group.add_argument("path",
|
||||||
help="Path of stream, e.g. /foo/bar",
|
help="Path of stream, e.g. /foo/bar")
|
||||||
).completer = self.complete.path
|
|
||||||
|
|
||||||
group = cmd.add_argument_group("Actions")
|
group = cmd.add_argument_group("Actions")
|
||||||
exc = group.add_mutually_exclusive_group()
|
exc = group.add_mutually_exclusive_group()
|
||||||
exc.add_argument("-g", "--get", nargs="*", metavar="key",
|
exc.add_argument("-g", "--get", nargs="*", metavar="key",
|
||||||
help="Get metadata for specified keys (default all)",
|
help="Get metadata for specified keys (default all)")
|
||||||
).completer = self.complete.meta_key
|
|
||||||
exc.add_argument("-s", "--set", nargs="+", metavar="key=value",
|
exc.add_argument("-s", "--set", nargs="+", metavar="key=value",
|
||||||
help="Replace all metadata with provided "
|
help="Replace all metadata with provided "
|
||||||
"key=value pairs",
|
"key=value pairs")
|
||||||
).completer = self.complete.meta_keyval
|
|
||||||
exc.add_argument("-u", "--update", nargs="+", metavar="key=value",
|
exc.add_argument("-u", "--update", nargs="+", metavar="key=value",
|
||||||
help="Update metadata using provided "
|
help="Update metadata using provided "
|
||||||
"key=value pairs",
|
"key=value pairs")
|
||||||
).completer = self.complete.meta_keyval
|
|
||||||
exc.add_argument("-d", "--delete", nargs="*", metavar="key",
|
|
||||||
help="Delete metadata for specified keys (default all)",
|
|
||||||
).completer = self.complete.meta_key
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_metadata(self):
|
def cmd_metadata(self):
|
||||||
"""Manipulate metadata"""
|
"""Manipulate metadata"""
|
||||||
if self.args.set is not None or self.args.update is not None:
|
if self.args.set is not None or self.args.update is not None:
|
||||||
|
@ -62,29 +52,15 @@ def cmd_metadata(self):
|
||||||
handler(self.args.path, data)
|
handler(self.args.path, data)
|
||||||
except nilmdb.client.ClientError as e:
|
except nilmdb.client.ClientError as e:
|
||||||
self.die("error setting/updating metadata: %s", str(e))
|
self.die("error setting/updating metadata: %s", str(e))
|
||||||
elif self.args.delete is not None:
|
|
||||||
# Delete (by setting values to empty strings)
|
|
||||||
keys = None
|
|
||||||
if self.args.delete:
|
|
||||||
keys = list(self.args.delete)
|
|
||||||
try:
|
|
||||||
data = self.client.stream_get_metadata(self.args.path, keys)
|
|
||||||
for key in data:
|
|
||||||
data[key] = ""
|
|
||||||
self.client.stream_update_metadata(self.args.path, data)
|
|
||||||
except nilmdb.client.ClientError as e:
|
|
||||||
self.die("error deleting metadata: %s", str(e))
|
|
||||||
else:
|
else:
|
||||||
# Get (or unspecified)
|
# Get (or unspecified)
|
||||||
keys = None
|
keys = self.args.get or None
|
||||||
if self.args.get:
|
|
||||||
keys = list(self.args.get)
|
|
||||||
try:
|
try:
|
||||||
data = self.client.stream_get_metadata(self.args.path, keys)
|
data = self.client.stream_get_metadata(self.args.path, keys)
|
||||||
except nilmdb.client.ClientError as e:
|
except nilmdb.client.ClientError as e:
|
||||||
self.die("error getting metadata: %s", str(e))
|
self.die("error getting metadata: %s", str(e))
|
||||||
for key, value in sorted(data.items()):
|
for key, value in sorted(data.items()):
|
||||||
# Print nonexistant keys as having empty value
|
# Omit nonexistant keys
|
||||||
if value is None:
|
if value is None:
|
||||||
value = ""
|
value = ""
|
||||||
printf("%s=%s\n", key, value)
|
printf("%s=%s\n", key, value)
|
||||||
|
|
|
@ -1,59 +1,38 @@
|
||||||
import fnmatch
|
from nilmdb.utils.printf import *
|
||||||
|
import nilmdb
|
||||||
from nilmdb.utils.printf import printf
|
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("remove", help="Remove data",
|
cmd = sub.add_parser("remove", help="Remove data",
|
||||||
description="""
|
description="""
|
||||||
Remove all data from a specified time range within a
|
Remove all data from a specified time range within a
|
||||||
stream. If multiple streams or wildcards are
|
stream.
|
||||||
provided, the same time range is removed from all
|
|
||||||
streams.
|
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(handler = cmd_remove)
|
cmd.set_defaults(handler = cmd_remove)
|
||||||
|
|
||||||
group = cmd.add_argument_group("Data selection")
|
group = cmd.add_argument_group("Data selection")
|
||||||
group.add_argument("path", nargs='+',
|
group.add_argument("path",
|
||||||
help="Path of stream, e.g. /foo/bar/*",
|
help="Path of stream, e.g. /foo/bar")
|
||||||
).completer = self.complete.path
|
|
||||||
group.add_argument("-s", "--start", required=True,
|
group.add_argument("-s", "--start", required=True,
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Starting timestamp (free-form, inclusive)",
|
help="Starting timestamp (free-form, inclusive)")
|
||||||
).completer = self.complete.time
|
|
||||||
group.add_argument("-e", "--end", required=True,
|
group.add_argument("-e", "--end", required=True,
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Ending timestamp (free-form, noninclusive)",
|
help="Ending timestamp (free-form, noninclusive)")
|
||||||
).completer = self.complete.time
|
|
||||||
|
|
||||||
group = cmd.add_argument_group("Output format")
|
group = cmd.add_argument_group("Output format")
|
||||||
group.add_argument("-q", "--quiet", action="store_true",
|
|
||||||
help="Don't display names when removing "
|
|
||||||
"from multiple paths")
|
|
||||||
group.add_argument("-c", "--count", action="store_true",
|
group.add_argument("-c", "--count", action="store_true",
|
||||||
help="Output number of data points removed")
|
help="Output number of data points removed")
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_remove(self):
|
def cmd_remove(self):
|
||||||
streams = [s[0] for s in self.client.stream_list()]
|
|
||||||
paths = []
|
|
||||||
for path in self.args.path:
|
|
||||||
new = fnmatch.filter(streams, path)
|
|
||||||
if not new:
|
|
||||||
self.die("error: no stream matched path: %s", path)
|
|
||||||
paths.extend(new)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for path in paths:
|
count = self.client.stream_remove(self.args.path,
|
||||||
if not self.args.quiet and len(paths) > 1:
|
|
||||||
printf("Removing from %s\n", path)
|
|
||||||
count = self.client.stream_remove(path,
|
|
||||||
self.args.start, self.args.end)
|
self.args.start, self.args.end)
|
||||||
if self.args.count:
|
|
||||||
printf("%d\n", count)
|
|
||||||
except nilmdb.client.ClientError as e:
|
except nilmdb.client.ClientError as e:
|
||||||
self.die("error removing data: %s", str(e))
|
self.die("error removing data: %s", str(e))
|
||||||
|
|
||||||
|
if self.args.count:
|
||||||
|
printf("%d\n", count)
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
|
@ -1,32 +0,0 @@
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
|
||||||
|
|
||||||
import nilmdb.client
|
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
|
||||||
cmd = sub.add_parser("rename", help="Rename a stream",
|
|
||||||
formatter_class=def_form,
|
|
||||||
description="""
|
|
||||||
Rename a stream.
|
|
||||||
|
|
||||||
Only the stream's path is renamed; no
|
|
||||||
metadata is changed.
|
|
||||||
""")
|
|
||||||
cmd.set_defaults(handler=cmd_rename)
|
|
||||||
group = cmd.add_argument_group("Required arguments")
|
|
||||||
group.add_argument("oldpath",
|
|
||||||
help="Old path, e.g. /foo/old",
|
|
||||||
).completer = self.complete.path
|
|
||||||
group.add_argument("newpath",
|
|
||||||
help="New path, e.g. /foo/bar/new",
|
|
||||||
).completer = self.complete.path
|
|
||||||
|
|
||||||
return cmd
|
|
||||||
|
|
||||||
|
|
||||||
def cmd_rename(self):
|
|
||||||
"""Rename a stream"""
|
|
||||||
try:
|
|
||||||
self.client.stream_rename(self.args.oldpath, self.args.newpath)
|
|
||||||
except nilmdb.client.ClientError as e:
|
|
||||||
self.die("error renaming stream: %s", str(e))
|
|
|
@ -1,3 +0,0 @@
|
||||||
"""nilmdb.fsck"""
|
|
||||||
|
|
||||||
from nilmdb.fsck.fsck import Fsck
|
|
|
@ -1,610 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""Check database consistency, with some ability to fix problems.
|
|
||||||
This should be able to fix cases where a database gets corrupted due
|
|
||||||
to unexpected system shutdown, and detect other cases that may cause
|
|
||||||
NilmDB to return errors when trying to manipulate the database."""
|
|
||||||
|
|
||||||
import nilmdb.utils
|
|
||||||
import nilmdb.server
|
|
||||||
import nilmdb.client.numpyclient
|
|
||||||
from nilmdb.utils.interval import IntervalError
|
|
||||||
from nilmdb.server.interval import Interval, IntervalSet
|
|
||||||
from nilmdb.utils.printf import printf, fprintf, sprintf
|
|
||||||
|
|
||||||
from collections import defaultdict
|
|
||||||
import sqlite3
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import progressbar
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import pickle
|
|
||||||
import numpy
|
|
||||||
|
|
||||||
|
|
||||||
class FsckError(Exception):
|
|
||||||
def __init__(self, msg="", *args):
|
|
||||||
if args:
|
|
||||||
msg = sprintf(msg, *args)
|
|
||||||
Exception.__init__(self, msg)
|
|
||||||
|
|
||||||
|
|
||||||
class FixableFsckError(FsckError):
|
|
||||||
def __init__(self, msg=""):
|
|
||||||
FsckError.__init__(self, f'{msg}\nThis may be fixable with "--fix".')
|
|
||||||
|
|
||||||
|
|
||||||
class RetryFsck(FsckError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class FsckFormatError(FsckError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def log(format, *args):
|
|
||||||
printf(format, *args)
|
|
||||||
|
|
||||||
|
|
||||||
def err(format, *args):
|
|
||||||
fprintf(sys.stderr, format, *args)
|
|
||||||
|
|
||||||
|
|
||||||
# Decorator that retries a function if it returns a specific value
|
|
||||||
def retry_if_raised(exc, message=None, max_retries=1000):
|
|
||||||
def f1(func):
|
|
||||||
def f2(*args, **kwargs):
|
|
||||||
for n in range(max_retries):
|
|
||||||
try:
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
except exc:
|
|
||||||
if message:
|
|
||||||
log(f"{message} ({n+1})\n\n")
|
|
||||||
raise Exception("Max number of retries (%d) exceeded; giving up" %
|
|
||||||
max_retries)
|
|
||||||
return f2
|
|
||||||
return f1
|
|
||||||
|
|
||||||
|
|
||||||
class Progress(object):
|
|
||||||
def __init__(self, maxval):
|
|
||||||
if maxval == 0:
|
|
||||||
maxval = 1
|
|
||||||
self.bar = progressbar.ProgressBar(
|
|
||||||
maxval=maxval,
|
|
||||||
widgets=[progressbar.Percentage(), ' ',
|
|
||||||
progressbar.Bar(), ' ',
|
|
||||||
progressbar.ETA()])
|
|
||||||
self.bar.term_width = self.bar.term_width or 75
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.bar.start()
|
|
||||||
self.last_update = 0
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_value, traceback):
|
|
||||||
if exc_type is None:
|
|
||||||
self.bar.finish()
|
|
||||||
else:
|
|
||||||
printf("\n")
|
|
||||||
|
|
||||||
def update(self, val):
|
|
||||||
self.bar.update(val)
|
|
||||||
|
|
||||||
|
|
||||||
class Fsck(object):
|
|
||||||
def __init__(self, path, fix=False):
|
|
||||||
self.basepath = path
|
|
||||||
self.sqlpath = os.path.join(path, "data.sql")
|
|
||||||
self.bulkpath = os.path.join(path, "data")
|
|
||||||
self.bulklock = os.path.join(path, "data.lock")
|
|
||||||
self.fix = fix
|
|
||||||
|
|
||||||
### Main checks
|
|
||||||
|
|
||||||
@retry_if_raised(RetryFsck, "Something was fixed: restarting fsck")
|
|
||||||
def check(self, skip_data=False):
|
|
||||||
self.bulk = None
|
|
||||||
self.sql = None
|
|
||||||
try:
|
|
||||||
self.check_paths()
|
|
||||||
self.check_sql()
|
|
||||||
self.check_streams()
|
|
||||||
self.check_intervals()
|
|
||||||
if skip_data:
|
|
||||||
log("skipped data check\n")
|
|
||||||
else:
|
|
||||||
self.check_data()
|
|
||||||
finally:
|
|
||||||
if self.bulk:
|
|
||||||
self.bulk.close()
|
|
||||||
if self.sql: # pragma: no cover
|
|
||||||
# (coverage doesn't handle finally clauses correctly;
|
|
||||||
# both branches here are tested)
|
|
||||||
self.sql.commit()
|
|
||||||
self.sql.close()
|
|
||||||
log("ok\n")
|
|
||||||
|
|
||||||
### Check basic path structure
|
|
||||||
|
|
||||||
def check_paths(self):
|
|
||||||
log("checking paths\n")
|
|
||||||
if self.bulk:
|
|
||||||
self.bulk.close()
|
|
||||||
if not os.path.isfile(self.sqlpath):
|
|
||||||
raise FsckError("SQL database missing (%s)", self.sqlpath)
|
|
||||||
if not os.path.isdir(self.bulkpath):
|
|
||||||
raise FsckError("Bulk data directory missing (%s)", self.bulkpath)
|
|
||||||
with open(self.bulklock, "w") as lockfile:
|
|
||||||
if not nilmdb.utils.lock.exclusive_lock(lockfile):
|
|
||||||
raise FsckError('Database already locked by another process\n'
|
|
||||||
'Make sure all other processes that might be '
|
|
||||||
'using the database are stopped.\n'
|
|
||||||
'Restarting apache will cause it to unlock '
|
|
||||||
'the db until a request is received.')
|
|
||||||
# unlocked immediately
|
|
||||||
self.bulk = nilmdb.server.bulkdata.BulkData(self.basepath)
|
|
||||||
|
|
||||||
### Check SQL database health
|
|
||||||
|
|
||||||
def check_sql(self):
|
|
||||||
log("checking sqlite database\n")
|
|
||||||
|
|
||||||
self.sql = sqlite3.connect(self.sqlpath)
|
|
||||||
with self.sql:
|
|
||||||
cur = self.sql.cursor()
|
|
||||||
ver = cur.execute("PRAGMA user_version").fetchone()[0]
|
|
||||||
good = max(nilmdb.server.nilmdb._sql_schema_updates.keys())
|
|
||||||
if ver != good:
|
|
||||||
raise FsckError("database version %d too old, should be %d",
|
|
||||||
ver, good)
|
|
||||||
self.stream_path = {}
|
|
||||||
self.stream_layout = {}
|
|
||||||
log(" loading paths\n")
|
|
||||||
result = cur.execute("SELECT id, path, layout FROM streams")
|
|
||||||
for r in result:
|
|
||||||
if r[0] in self.stream_path:
|
|
||||||
raise FsckError("duplicated ID %d in stream IDs", r[0])
|
|
||||||
self.stream_path[r[0]] = r[1]
|
|
||||||
self.stream_layout[r[0]] = r[2]
|
|
||||||
|
|
||||||
log(" loading intervals\n")
|
|
||||||
self.stream_interval = defaultdict(list)
|
|
||||||
result = cur.execute("SELECT stream_id, start_time, end_time, "
|
|
||||||
"start_pos, end_pos FROM ranges "
|
|
||||||
"ORDER BY start_time")
|
|
||||||
for r in result:
|
|
||||||
if r[0] not in self.stream_path:
|
|
||||||
raise FsckError("interval ID %d not in streams", r[0])
|
|
||||||
self.stream_interval[r[0]].append((r[1], r[2], r[3], r[4]))
|
|
||||||
|
|
||||||
log(" loading metadata\n")
|
|
||||||
self.stream_meta = defaultdict(dict)
|
|
||||||
result = cur.execute("SELECT stream_id, key, value FROM metadata")
|
|
||||||
for r in result:
|
|
||||||
if r[0] not in self.stream_path:
|
|
||||||
raise FsckError("metadata ID %d not in streams", r[0])
|
|
||||||
if r[1] in self.stream_meta[r[0]]:
|
|
||||||
raise FsckError(
|
|
||||||
"duplicate metadata key '%s' for stream %d",
|
|
||||||
r[1], r[0])
|
|
||||||
self.stream_meta[r[0]][r[1]] = r[2]
|
|
||||||
|
|
||||||
### Check streams and basic interval overlap
|
|
||||||
|
|
||||||
def check_streams(self):
|
|
||||||
ids = list(self.stream_path.keys())
|
|
||||||
log("checking %s streams\n", "{:,d}".format(len(ids)))
|
|
||||||
with Progress(len(ids)) as pbar:
|
|
||||||
for i, sid in enumerate(ids):
|
|
||||||
pbar.update(i)
|
|
||||||
path = self.stream_path[sid]
|
|
||||||
|
|
||||||
# unique path, valid layout
|
|
||||||
if list(self.stream_path.values()).count(path) != 1:
|
|
||||||
raise FsckError("duplicated path %s", path)
|
|
||||||
layout = self.stream_layout[sid].split('_')[0]
|
|
||||||
if layout not in ('int8', 'int16', 'int32', 'int64',
|
|
||||||
'uint8', 'uint16', 'uint32', 'uint64',
|
|
||||||
'float32', 'float64'):
|
|
||||||
raise FsckError("bad layout %s for %s", layout, path)
|
|
||||||
count = int(self.stream_layout[sid].split('_')[1])
|
|
||||||
if count < 1 or count > 1024:
|
|
||||||
raise FsckError("bad count %d for %s", count, path)
|
|
||||||
|
|
||||||
# must exist in bulkdata
|
|
||||||
bulk = self.bulkpath + path
|
|
||||||
bulk = bulk.encode('utf-8')
|
|
||||||
if not os.path.isdir(bulk):
|
|
||||||
raise FsckError("%s: missing bulkdata dir", path)
|
|
||||||
if not nilmdb.server.bulkdata.Table.exists(bulk):
|
|
||||||
raise FsckError("%s: bad bulkdata table", path)
|
|
||||||
|
|
||||||
# intervals don't overlap. Abuse IntervalSet to check
|
|
||||||
# for intervals in file positions, too.
|
|
||||||
timeiset = IntervalSet()
|
|
||||||
posiset = IntervalSet()
|
|
||||||
for (stime, etime, spos, epos) in self.stream_interval[sid]:
|
|
||||||
new = Interval(stime, etime)
|
|
||||||
try:
|
|
||||||
timeiset += new
|
|
||||||
except IntervalError:
|
|
||||||
raise FsckError("%s: overlap in intervals:\n"
|
|
||||||
"set: %s\nnew: %s",
|
|
||||||
path, str(timeiset), str(new))
|
|
||||||
if spos != epos:
|
|
||||||
new = Interval(spos, epos)
|
|
||||||
try:
|
|
||||||
posiset += new
|
|
||||||
except IntervalError:
|
|
||||||
self.fix_row_overlap(sid, path, posiset, new)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Check bulkdata
|
|
||||||
self.check_bulkdata(sid, path, bulk)
|
|
||||||
|
|
||||||
# Check that we can open bulkdata
|
|
||||||
tab = nilmdb.server.bulkdata.Table(bulk)
|
|
||||||
except FsckFormatError:
|
|
||||||
# If there are no files except _format, try deleting
|
|
||||||
# the entire stream; this may remove metadata, but
|
|
||||||
# it's probably unimportant.
|
|
||||||
files = list(os.listdir(bulk))
|
|
||||||
if len(files) > 1:
|
|
||||||
raise FsckFormatError(f"{path}: can't load _format, "
|
|
||||||
f"but data is also present")
|
|
||||||
|
|
||||||
# Since the stream was empty, just remove it
|
|
||||||
self.fix_remove_stream(sid, path, bulk,
|
|
||||||
"empty, with corrupted format file")
|
|
||||||
except FsckError as e:
|
|
||||||
raise e
|
|
||||||
except Exception as e: # pragma: no cover
|
|
||||||
# No coverage because this is an unknown/unexpected error
|
|
||||||
raise FsckError("%s: can't open bulkdata: %s",
|
|
||||||
path, str(e))
|
|
||||||
tab.close()
|
|
||||||
|
|
||||||
def fix_row_overlap(self, sid, path, existing, new):
|
|
||||||
# If the file rows (spos, epos) overlap in the interval table,
|
|
||||||
# and the overlapping ranges look like this:
|
|
||||||
# A --------- C
|
|
||||||
# B -------- D
|
|
||||||
# Then we can try changing the first interval to go from
|
|
||||||
# A to B instead.
|
|
||||||
msg = (f"{path}: overlap in file offsets:\n"
|
|
||||||
f"existing ranges: {existing}\n"
|
|
||||||
f"overlapping interval: {new}")
|
|
||||||
if not self.fix:
|
|
||||||
raise FixableFsckError(msg)
|
|
||||||
err(f"\n{msg}\nSeeing if we can truncate one of them...\n")
|
|
||||||
|
|
||||||
# See if there'e exactly one interval that overlaps the
|
|
||||||
# conflicting one in the right way
|
|
||||||
match = None
|
|
||||||
for intv in self.stream_interval[sid]:
|
|
||||||
(stime, etime, spos, epos) = intv
|
|
||||||
if spos < new.start and epos > new.start:
|
|
||||||
if match:
|
|
||||||
err(f"no, more than one interval matched:\n"
|
|
||||||
f"{intv}\n{match}\n")
|
|
||||||
raise FsckError(f"{path}: unfixable overlap")
|
|
||||||
match = intv
|
|
||||||
if match is None:
|
|
||||||
err("no intervals overlapped in the right way\n")
|
|
||||||
raise FsckError(f"{path}: unfixable overlap")
|
|
||||||
|
|
||||||
# Truncate the file position
|
|
||||||
err(f"truncating {match}\n")
|
|
||||||
with self.sql:
|
|
||||||
cur = self.sql.cursor()
|
|
||||||
cur.execute("UPDATE ranges SET end_pos=? "
|
|
||||||
"WHERE stream_id=? AND start_time=? AND "
|
|
||||||
"end_time=? AND start_pos=? AND end_pos=?",
|
|
||||||
(new.start, sid, *match))
|
|
||||||
if cur.rowcount != 1: # pragma: no cover (shouldn't fail)
|
|
||||||
raise FsckError("failed to fix SQL database")
|
|
||||||
raise RetryFsck
|
|
||||||
|
|
||||||
### Check that bulkdata is good enough to be opened
|
|
||||||
|
|
||||||
@retry_if_raised(RetryFsck)
|
|
||||||
def check_bulkdata(self, sid, path, bulk):
|
|
||||||
try:
|
|
||||||
with open(os.path.join(bulk, b"_format"), "rb") as f:
|
|
||||||
fmt = pickle.load(f)
|
|
||||||
except Exception as e:
|
|
||||||
raise FsckFormatError(f"{path}: can't load _format file ({e})")
|
|
||||||
|
|
||||||
if fmt["version"] != 3:
|
|
||||||
raise FsckFormatError("%s: bad or unsupported bulkdata version %d",
|
|
||||||
path, fmt["version"])
|
|
||||||
rows_per_file = int(fmt["rows_per_file"])
|
|
||||||
if rows_per_file < 1:
|
|
||||||
raise FsckFormatError(f"{path}: bad rows_per_file {rows_per_file}")
|
|
||||||
files_per_dir = int(fmt["files_per_dir"])
|
|
||||||
if files_per_dir < 1:
|
|
||||||
raise FsckFormatError(f"{path}: bad files_per_dir {files_per_dir}")
|
|
||||||
layout = fmt["layout"]
|
|
||||||
if layout != self.stream_layout[sid]:
|
|
||||||
raise FsckFormatError("%s: layout mismatch %s != %s", path,
|
|
||||||
layout, self.stream_layout[sid])
|
|
||||||
|
|
||||||
# Every file should have a size that's the multiple of the row size
|
|
||||||
rkt = nilmdb.server.rocket.Rocket(layout, None)
|
|
||||||
row_size = rkt.binary_size
|
|
||||||
rkt.close()
|
|
||||||
|
|
||||||
# Find all directories
|
|
||||||
regex = re.compile(b"^[0-9a-f]{4,}$")
|
|
||||||
subdirs = sorted(filter(regex.search, os.listdir(bulk)),
|
|
||||||
key=lambda x: int(x, 16), reverse=True)
|
|
||||||
for subdir in subdirs:
|
|
||||||
# Find all files in that dir
|
|
||||||
subpath = os.path.join(bulk, subdir)
|
|
||||||
files = list(filter(regex.search, os.listdir(subpath)))
|
|
||||||
if not files:
|
|
||||||
self.fix_empty_subdir(subpath)
|
|
||||||
|
|
||||||
# Verify that their size is a multiple of the row size
|
|
||||||
for filename in files:
|
|
||||||
filepath = os.path.join(subpath, filename)
|
|
||||||
offset = os.path.getsize(filepath)
|
|
||||||
if offset % row_size:
|
|
||||||
self.fix_bad_filesize(path, filepath, offset, row_size)
|
|
||||||
|
|
||||||
def fix_empty_subdir(self, subpath):
|
|
||||||
msg = sprintf("bulkdata path %s is missing data files", subpath)
|
|
||||||
if not self.fix:
|
|
||||||
raise FixableFsckError(msg)
|
|
||||||
# Try to fix it by just deleting whatever is present,
|
|
||||||
# as long as it's only ".removed" files.
|
|
||||||
err("\n%s\n", msg)
|
|
||||||
for fn in os.listdir(subpath):
|
|
||||||
if not fn.endswith(b".removed"):
|
|
||||||
raise FsckError("can't fix automatically: please manually "
|
|
||||||
"remove the file '%s' and try again",
|
|
||||||
os.path.join(subpath, fn).decode(
|
|
||||||
'utf-8', errors='backslashreplace'))
|
|
||||||
# Remove the whole thing
|
|
||||||
err("Removing empty subpath\n")
|
|
||||||
shutil.rmtree(subpath)
|
|
||||||
raise RetryFsck
|
|
||||||
|
|
||||||
def fix_bad_filesize(self, path, filepath, offset, row_size):
|
|
||||||
extra = offset % row_size
|
|
||||||
msg = sprintf("%s: size of file %s (%d) is not a multiple" +
|
|
||||||
" of row size (%d): %d extra bytes present",
|
|
||||||
path, filepath, offset, row_size, extra)
|
|
||||||
if not self.fix:
|
|
||||||
raise FixableFsckError(msg)
|
|
||||||
# Try to fix it by just truncating the file
|
|
||||||
err("\n%s\n", msg)
|
|
||||||
newsize = offset - extra
|
|
||||||
err("Truncating file to %d bytes and retrying\n", newsize)
|
|
||||||
with open(filepath, "r+b") as f:
|
|
||||||
f.truncate(newsize)
|
|
||||||
raise RetryFsck
|
|
||||||
|
|
||||||
def fix_remove_stream(self, sid, path, bulk, reason):
|
|
||||||
msg = f"stream {path} is corrupted: {reason}"
|
|
||||||
if not self.fix:
|
|
||||||
raise FixableFsckError(msg)
|
|
||||||
# Remove the stream from disk and the database
|
|
||||||
err(f"\n{msg}\n")
|
|
||||||
err(f"Removing stream {path} from disk and database\n")
|
|
||||||
shutil.rmtree(bulk)
|
|
||||||
with self.sql:
|
|
||||||
cur = self.sql.cursor()
|
|
||||||
cur.execute("DELETE FROM streams WHERE id=?",
|
|
||||||
(sid,))
|
|
||||||
if cur.rowcount != 1: # pragma: no cover (shouldn't fail)
|
|
||||||
raise FsckError("failed to remove stream")
|
|
||||||
cur.execute("DELETE FROM ranges WHERE stream_id=?", (sid,))
|
|
||||||
cur.execute("DELETE FROM metadata WHERE stream_id=?", (sid,))
|
|
||||||
raise RetryFsck
|
|
||||||
|
|
||||||
### Check interval endpoints
|
|
||||||
|
|
||||||
def check_intervals(self):
|
|
||||||
total_ints = sum(len(x) for x in list(self.stream_interval.values()))
|
|
||||||
log("checking %s intervals\n", "{:,d}".format(total_ints))
|
|
||||||
done = 0
|
|
||||||
with Progress(total_ints) as pbar:
|
|
||||||
for sid in self.stream_interval:
|
|
||||||
try:
|
|
||||||
bulk = self.bulkpath + self.stream_path[sid]
|
|
||||||
bulk = bulk.encode('utf-8')
|
|
||||||
tab = nilmdb.server.bulkdata.Table(bulk)
|
|
||||||
|
|
||||||
def update(x):
|
|
||||||
pbar.update(done + x)
|
|
||||||
|
|
||||||
ints = self.stream_interval[sid]
|
|
||||||
done += self.check_table_intervals(sid, ints, tab, update)
|
|
||||||
finally:
|
|
||||||
tab.close()
|
|
||||||
|
|
||||||
def check_table_intervals(self, sid, ints, tab, update):
|
|
||||||
# look in the table to make sure we can pick out the interval's
|
|
||||||
# endpoints
|
|
||||||
path = self.stream_path[sid] # noqa: F841 unused
|
|
||||||
tab.file_open.cache_remove_all()
|
|
||||||
for (i, intv) in enumerate(ints):
|
|
||||||
update(i)
|
|
||||||
(stime, etime, spos, epos) = intv
|
|
||||||
if spos == epos and spos >= 0 and spos <= tab.nrows:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
srow = tab[spos] # noqa: F841 unused
|
|
||||||
erow = tab[epos-1] # noqa: F841 unused
|
|
||||||
except Exception as e:
|
|
||||||
self.fix_bad_interval(sid, intv, tab, str(e))
|
|
||||||
|
|
||||||
return len(ints)
|
|
||||||
|
|
||||||
def fix_bad_interval(self, sid, intv, tab, msg):
|
|
||||||
path = self.stream_path[sid]
|
|
||||||
msg = sprintf("%s: interval %s error accessing rows: %s",
|
|
||||||
path, str(intv), str(msg))
|
|
||||||
if not self.fix:
|
|
||||||
raise FixableFsckError(msg)
|
|
||||||
err("\n%s\n", msg)
|
|
||||||
|
|
||||||
(stime, etime, spos, epos) = intv
|
|
||||||
# If it's just that the end pos is more than the number of rows
|
|
||||||
# in the table, lower end pos and truncate interval time too.
|
|
||||||
if spos < tab.nrows and epos >= tab.nrows:
|
|
||||||
err("end position is past endrows, but it can be truncated\n")
|
|
||||||
err("old end: time %d, pos %d\n", etime, epos)
|
|
||||||
new_epos = tab.nrows
|
|
||||||
new_etime = tab[new_epos-1] + 1
|
|
||||||
err("new end: time %d, pos %d\n", new_etime, new_epos)
|
|
||||||
if stime < new_etime:
|
|
||||||
# Change it in SQL
|
|
||||||
with self.sql:
|
|
||||||
cur = self.sql.cursor()
|
|
||||||
cur.execute("UPDATE ranges SET end_time=?, end_pos=? "
|
|
||||||
"WHERE stream_id=? AND start_time=? AND "
|
|
||||||
"end_time=? AND start_pos=? AND end_pos=?",
|
|
||||||
(new_etime, new_epos, sid, stime, etime,
|
|
||||||
spos, epos))
|
|
||||||
if cur.rowcount != 1: # pragma: no cover (shouldn't fail)
|
|
||||||
raise FsckError("failed to fix SQL database")
|
|
||||||
raise RetryFsck
|
|
||||||
err("actually it can't be truncated; times are bad too\n")
|
|
||||||
|
|
||||||
# Otherwise, the only hope is to delete the interval entirely.
|
|
||||||
err("*** Deleting the entire interval from SQL.\n")
|
|
||||||
err("This may leave stale data on disk. To fix that, copy all "
|
|
||||||
"data from this stream to a new stream using nilm-copy, then\n")
|
|
||||||
err("remove all data from and destroy %s.\n", path)
|
|
||||||
with self.sql:
|
|
||||||
cur = self.sql.cursor()
|
|
||||||
cur.execute("DELETE FROM ranges WHERE "
|
|
||||||
"stream_id=? AND start_time=? AND "
|
|
||||||
"end_time=? AND start_pos=? AND end_pos=?",
|
|
||||||
(sid, stime, etime, spos, epos))
|
|
||||||
if cur.rowcount != 1: # pragma: no cover (shouldn't fail)
|
|
||||||
raise FsckError("failed to remove interval")
|
|
||||||
raise RetryFsck
|
|
||||||
|
|
||||||
### Check data in each interval
|
|
||||||
|
|
||||||
def check_data(self):
|
|
||||||
total_rows = sum(sum((y[3] - y[2]) for y in x)
|
|
||||||
for x in list(self.stream_interval.values()))
|
|
||||||
log("checking %s rows of data\n", "{:,d}".format(total_rows))
|
|
||||||
done = 0
|
|
||||||
with Progress(total_rows) as pbar:
|
|
||||||
for sid in self.stream_interval:
|
|
||||||
try:
|
|
||||||
bulk = self.bulkpath + self.stream_path[sid]
|
|
||||||
bulk = bulk.encode('utf-8')
|
|
||||||
tab = nilmdb.server.bulkdata.Table(bulk)
|
|
||||||
|
|
||||||
def update(x):
|
|
||||||
pbar.update(done + x)
|
|
||||||
|
|
||||||
ints = self.stream_interval[sid]
|
|
||||||
done += self.check_table_data(sid, ints, tab, update)
|
|
||||||
finally:
|
|
||||||
tab.close()
|
|
||||||
|
|
||||||
def check_table_data(self, sid, ints, tab, update):
|
|
||||||
# Pull out all of the interval's data and verify that it's
|
|
||||||
# monotonic.
|
|
||||||
maxrows = getattr(self, 'maxrows_override', 100000)
|
|
||||||
path = self.stream_path[sid]
|
|
||||||
layout = self.stream_layout[sid]
|
|
||||||
dtype = nilmdb.client.numpyclient.layout_to_dtype(layout)
|
|
||||||
tab.file_open.cache_remove_all()
|
|
||||||
done = 0
|
|
||||||
for intv in ints:
|
|
||||||
last_ts = None
|
|
||||||
(stime, etime, spos, epos) = intv
|
|
||||||
|
|
||||||
# Break interval into maxrows-sized chunks
|
|
||||||
next_start = spos
|
|
||||||
while next_start < epos:
|
|
||||||
start = next_start
|
|
||||||
stop = min(start + maxrows, epos)
|
|
||||||
count = stop - start
|
|
||||||
next_start = stop
|
|
||||||
|
|
||||||
# Get raw data, convert to NumPy arary
|
|
||||||
try:
|
|
||||||
raw = tab.get_data(start, stop, binary=True)
|
|
||||||
data = numpy.frombuffer(raw, dtype)
|
|
||||||
except Exception as e: # pragma: no cover
|
|
||||||
# No coverage because it's hard to trigger this -- earlier
|
|
||||||
# checks check the ranges, so this would probably be a real
|
|
||||||
# disk error, malloc failure, etc.
|
|
||||||
raise FsckError(
|
|
||||||
"%s: failed to grab rows %d through %d: %s",
|
|
||||||
path, start, stop, repr(e))
|
|
||||||
|
|
||||||
ts = data['timestamp']
|
|
||||||
|
|
||||||
# Verify that all timestamps are in range.
|
|
||||||
match = (ts < stime) | (ts >= etime)
|
|
||||||
if match.any():
|
|
||||||
row = numpy.argmax(match)
|
|
||||||
if ts[row] != 0:
|
|
||||||
raise FsckError("%s: data timestamp %d at row %d "
|
|
||||||
"outside interval range [%d,%d)",
|
|
||||||
path, ts[row], row + start,
|
|
||||||
stime, etime)
|
|
||||||
|
|
||||||
# Timestamp is zero and out of the expected range;
|
|
||||||
# assume file ends with zeroed data and just truncate it.
|
|
||||||
self.fix_table_by_truncating(
|
|
||||||
path, tab, row + start,
|
|
||||||
"data timestamp is out of range, and zero")
|
|
||||||
|
|
||||||
# Verify that timestamps are monotonic
|
|
||||||
match = numpy.diff(ts) <= 0
|
|
||||||
if match.any():
|
|
||||||
row = numpy.argmax(match)
|
|
||||||
if ts[row+1] != 0:
|
|
||||||
raise FsckError(
|
|
||||||
"%s: non-monotonic timestamp (%d -> %d) "
|
|
||||||
"at row %d", path, ts[row], ts[row+1],
|
|
||||||
row + start)
|
|
||||||
|
|
||||||
# Timestamp is zero and non-monotonic;
|
|
||||||
# assume file ends with zeroed data and just truncate it.
|
|
||||||
self.fix_table_by_truncating(
|
|
||||||
path, tab, row + start + 1,
|
|
||||||
"data timestamp is non-monotonic, and zero")
|
|
||||||
|
|
||||||
first_ts = ts[0]
|
|
||||||
if last_ts is not None and first_ts <= last_ts:
|
|
||||||
raise FsckError("%s: first interval timestamp %d is not "
|
|
||||||
"greater than the previous last interval "
|
|
||||||
"timestamp %d, at row %d",
|
|
||||||
path, first_ts, last_ts, start)
|
|
||||||
last_ts = ts[-1]
|
|
||||||
|
|
||||||
# The previous errors are fixable, by removing the
|
|
||||||
# offending intervals, or changing the data
|
|
||||||
# timestamps. But these are probably unlikely errors,
|
|
||||||
# so it's not worth implementing that yet.
|
|
||||||
|
|
||||||
# Done
|
|
||||||
done += count
|
|
||||||
update(done)
|
|
||||||
return done
|
|
||||||
|
|
||||||
def fix_table_by_truncating(self, path, tab, row, reason):
|
|
||||||
# Simple fix for bad data: truncate the table at the given row.
|
|
||||||
# On retry, fix_bad_interval will correct the database and timestamps
|
|
||||||
# to account for this truncation.
|
|
||||||
msg = f"{path}: bad data in table, starting at row {row}: {reason}"
|
|
||||||
if not self.fix:
|
|
||||||
raise FixableFsckError(msg)
|
|
||||||
err(f"\n{msg}\nWill try truncating table\n")
|
|
||||||
(subdir, fname, offs, count) = tab._offset_from_row(row)
|
|
||||||
tab._remove_or_truncate_file(subdir, fname, offs)
|
|
||||||
raise RetryFsck
|
|
|
@ -1,27 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import nilmdb.fsck
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Main entry point for the 'nilmdb-fsck' command line script"""
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description='Check database consistency',
|
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
|
||||||
parser.add_argument("-v", "--version", action="version",
|
|
||||||
version=nilmdb.__version__)
|
|
||||||
parser.add_argument("-f", "--fix", action="store_true",
|
|
||||||
default=False, help='Fix errors when possible '
|
|
||||||
'(which may involve removing data)')
|
|
||||||
parser.add_argument("-n", "--no-data", action="store_true",
|
|
||||||
default=False, help='Skip the slow full-data check')
|
|
||||||
parser.add_argument('database', help='Database directory')
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
nilmdb.fsck.Fsck(args.database, args.fix).check(skip_data=args.no_data)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,14 +1,9 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/python
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import socket
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
import cherrypy
|
|
||||||
|
|
||||||
import nilmdb.server
|
import nilmdb.server
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import socket
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Main entry point for the 'nilmdb-server' command line script"""
|
"""Main entry point for the 'nilmdb-server' command line script"""
|
||||||
|
@ -17,7 +12,7 @@ def main():
|
||||||
description = 'Run the NilmDB server',
|
description = 'Run the NilmDB server',
|
||||||
formatter_class = argparse.ArgumentDefaultsHelpFormatter)
|
formatter_class = argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
|
||||||
parser.add_argument("-v", "--version", action="version",
|
parser.add_argument("-V", "--version", action="version",
|
||||||
version = nilmdb.__version__)
|
version = nilmdb.__version__)
|
||||||
|
|
||||||
group = parser.add_argument_group("Standard options")
|
group = parser.add_argument_group("Standard options")
|
||||||
|
@ -27,11 +22,11 @@ def main():
|
||||||
group.add_argument('-p', '--port', help = 'Listen on the given port',
|
group.add_argument('-p', '--port', help = 'Listen on the given port',
|
||||||
type = int, default = 12380)
|
type = int, default = 12380)
|
||||||
group.add_argument('-d', '--database', help = 'Database directory',
|
group.add_argument('-d', '--database', help = 'Database directory',
|
||||||
default="./db")
|
default = os.path.join(os.getcwd(), "db"))
|
||||||
group.add_argument('-q', '--quiet', help = 'Silence output',
|
group.add_argument('-q', '--quiet', help = 'Silence output',
|
||||||
action = 'store_true')
|
action = 'store_true')
|
||||||
group.add_argument('-t', '--traceback',
|
group.add_argument('-n', '--nosync', help = 'Use asynchronous '
|
||||||
help='Provide tracebacks in client errors',
|
'commits for sqlite transactions',
|
||||||
action = 'store_true', default = False)
|
action = 'store_true', default = False)
|
||||||
|
|
||||||
group = parser.add_argument_group("Debug options")
|
group = parser.add_argument_group("Debug options")
|
||||||
|
@ -43,57 +38,50 @@ def main():
|
||||||
|
|
||||||
# Create database object. Needs to be serialized before passing
|
# Create database object. Needs to be serialized before passing
|
||||||
# to the Server.
|
# to the Server.
|
||||||
db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(args.database)
|
db = nilmdb.utils.serializer_proxy(nilmdb.NilmDB)(args.database,
|
||||||
|
sync = not args.nosync)
|
||||||
|
|
||||||
# Configure the server
|
# Configure the server
|
||||||
if not args.quiet:
|
if args.quiet:
|
||||||
cherrypy._cpconfig.environments['embedded']['log.screen'] = True
|
embedded = True
|
||||||
|
else:
|
||||||
|
embedded = False
|
||||||
server = nilmdb.server.Server(db,
|
server = nilmdb.server.Server(db,
|
||||||
host = args.address,
|
host = args.address,
|
||||||
port = args.port,
|
port = args.port,
|
||||||
force_traceback=args.traceback)
|
embedded = embedded)
|
||||||
|
|
||||||
# Print info
|
# Print info
|
||||||
if not args.quiet:
|
if not args.quiet:
|
||||||
print("Version: %s" % nilmdb.__version__)
|
print "Version: %s" % nilmdb.__version__
|
||||||
print("Database: %s" % (os.path.realpath(args.database)))
|
print "Database: %s" % (os.path.realpath(args.database))
|
||||||
if args.address == '0.0.0.0' or args.address == '::':
|
if args.address == '0.0.0.0' or args.address == '::':
|
||||||
host = socket.getfqdn()
|
host = socket.getfqdn()
|
||||||
else:
|
else:
|
||||||
host = args.address
|
host = args.address
|
||||||
print("Server URL: http://%s:%d/" % (host, args.port))
|
print "Server URL: http://%s:%d/" % ( host, args.port)
|
||||||
print("----")
|
print "----"
|
||||||
|
|
||||||
# Run it
|
# Run it
|
||||||
try:
|
|
||||||
if args.yappi:
|
if args.yappi:
|
||||||
print("Running in yappi")
|
print "Running in yappi"
|
||||||
try:
|
try:
|
||||||
import yappi
|
import yappi
|
||||||
yappi.start()
|
yappi.start()
|
||||||
server.start(blocking = True)
|
server.start(blocking = True)
|
||||||
finally:
|
finally:
|
||||||
yappi.stop()
|
yappi.stop()
|
||||||
stats = yappi.get_func_stats()
|
yappi.print_stats(sort_type = yappi.SORTTYPE_TTOT, limit = 50)
|
||||||
stats.sort("ttot")
|
|
||||||
stats.print_all()
|
|
||||||
try:
|
|
||||||
from IPython import embed
|
from IPython import embed
|
||||||
embed(header="Use the `yappi` or `stats` object to "
|
embed(header = "Use the yappi object to explore further, "
|
||||||
"explore further, `quit` to exit")
|
"quit to exit")
|
||||||
except ModuleNotFoundError:
|
|
||||||
print("\nInstall ipython to explore further")
|
|
||||||
else:
|
else:
|
||||||
server.start(blocking = True)
|
server.start(blocking = True)
|
||||||
except nilmdb.server.serverutil.CherryPyExit:
|
|
||||||
print("Exiting due to CherryPy error", file=sys.stderr)
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
if not args.quiet:
|
|
||||||
print("Closing database")
|
|
||||||
db.close()
|
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
if not args.quiet:
|
||||||
|
print "Closing database"
|
||||||
|
db.close()
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -1,12 +1,10 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/python
|
||||||
|
|
||||||
import nilmdb.cmdline
|
import nilmdb.cmdline
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Main entry point for the 'nilmtool' command line script"""
|
"""Main entry point for the 'nilmtool' command line script"""
|
||||||
nilmdb.cmdline.Cmdline().run()
|
nilmdb.cmdline.Cmdline().run()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -1,9 +1,22 @@
|
||||||
"""nilmdb.server"""
|
"""nilmdb.server"""
|
||||||
|
|
||||||
# Set up pyximport to automatically rebuild Cython modules if needed.
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
# Try to set up pyximport to automatically rebuild Cython modules. If
|
||||||
|
# this doesn't work, it's OK, as long as the modules were built externally.
|
||||||
|
# (e.g. python setup.py build_ext --inplace)
|
||||||
|
try: # pragma: no cover
|
||||||
|
import Cython
|
||||||
|
import distutils.version
|
||||||
|
if (distutils.version.LooseVersion(Cython.__version__) <
|
||||||
|
distutils.version.LooseVersion("0.17")): # pragma: no cover
|
||||||
|
raise ImportError("Cython version too old")
|
||||||
import pyximport
|
import pyximport
|
||||||
pyximport.install(inplace = True, build_in_temp = False)
|
pyximport.install(inplace = True, build_in_temp = False)
|
||||||
|
except (ImportError, TypeError): # pragma: no cover
|
||||||
|
pass
|
||||||
|
|
||||||
|
import nilmdb.server.layout
|
||||||
from nilmdb.server.nilmdb import NilmDB
|
from nilmdb.server.nilmdb import NilmDB
|
||||||
from nilmdb.server.server import Server, wsgi_application
|
from nilmdb.server.server import Server
|
||||||
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
|
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
|
||||||
|
|
|
@ -1,138 +1,67 @@
|
||||||
# Fixed record size bulk data storage
|
# Fixed record size bulk data storage
|
||||||
|
|
||||||
|
# Need absolute_import so that "import nilmdb" won't pull in
|
||||||
|
# nilmdb.py, but will pull the parent nilmdb module instead.
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
import nilmdb
|
||||||
|
from nilmdb.utils.printf import *
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import cPickle as pickle
|
||||||
|
import struct
|
||||||
|
import mmap
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
import pickle
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from nilmdb.utils.printf import sprintf
|
# If we have the faulthandler module, use it. All of the mmap stuff
|
||||||
from nilmdb.utils.time import timestamp_to_string
|
# might trigger a SIGSEGV or SIGBUS if we're not careful, and
|
||||||
import nilmdb.utils
|
# faulthandler will give a traceback in that case. (the Python
|
||||||
|
# interpreter will still die either way).
|
||||||
import nilmdb.utils.lock
|
try: # pragma: no cover
|
||||||
from . import rocket
|
import faulthandler
|
||||||
|
faulthandler.enable()
|
||||||
|
except: # pragma: no cover
|
||||||
|
pass
|
||||||
|
|
||||||
# Up to 256 open file descriptors at any given time.
|
# Up to 256 open file descriptors at any given time.
|
||||||
# These variables are global so they can be used in the decorator arguments.
|
# These variables are global so they can be used in the decorator arguments.
|
||||||
table_cache_size = 32
|
table_cache_size = 16
|
||||||
fd_cache_size = 8
|
fd_cache_size = 16
|
||||||
|
|
||||||
|
|
||||||
@nilmdb.utils.must_close(wrap_verify = False)
|
@nilmdb.utils.must_close(wrap_verify = False)
|
||||||
class BulkData():
|
class BulkData(object):
|
||||||
def __init__(self, basepath, **kwargs):
|
def __init__(self, basepath, **kwargs):
|
||||||
if isinstance(basepath, str):
|
|
||||||
self.basepath = self._encode_filename(basepath)
|
|
||||||
else:
|
|
||||||
self.basepath = basepath
|
self.basepath = basepath
|
||||||
self.root = os.path.join(self.basepath, b"data")
|
self.root = os.path.join(self.basepath, "data")
|
||||||
self.lock = self.root + b".lock"
|
|
||||||
self.lockfile = None
|
|
||||||
|
|
||||||
# Tuneables
|
# Tuneables
|
||||||
if "file_size" in kwargs and kwargs["file_size"] is not None:
|
if "file_size" in kwargs:
|
||||||
self.file_size = kwargs["file_size"]
|
self.file_size = kwargs["file_size"]
|
||||||
else:
|
else:
|
||||||
# Default to approximately 128 MiB per file
|
# Default to approximately 128 MiB per file
|
||||||
self.file_size = 128 * 1024 * 1024
|
self.file_size = 128 * 1024 * 1024
|
||||||
|
|
||||||
if "files_per_dir" in kwargs and kwargs["files_per_dir"] is not None:
|
if "files_per_dir" in kwargs:
|
||||||
self.files_per_dir = kwargs["files_per_dir"]
|
self.files_per_dir = kwargs["files_per_dir"]
|
||||||
else:
|
else:
|
||||||
# 32768 files per dir should work even on FAT32
|
# 32768 files per dir should work even on FAT32
|
||||||
self.files_per_dir = 32768
|
self.files_per_dir = 32768
|
||||||
|
|
||||||
if "initial_nrows" in kwargs and kwargs["initial_nrows"] is not None:
|
|
||||||
self.initial_nrows = kwargs["initial_nrows"]
|
|
||||||
else:
|
|
||||||
# First row is 0
|
|
||||||
self.initial_nrows = 0
|
|
||||||
|
|
||||||
# Make root path
|
# Make root path
|
||||||
if not os.path.isdir(self.root):
|
if not os.path.isdir(self.root):
|
||||||
os.mkdir(self.root)
|
os.mkdir(self.root)
|
||||||
|
|
||||||
# Create the lock
|
|
||||||
self.lockfile = open(self.lock, "w")
|
|
||||||
if not nilmdb.utils.lock.exclusive_lock(self.lockfile):
|
|
||||||
raise IOError('database at "' +
|
|
||||||
self._decode_filename(self.basepath) +
|
|
||||||
'" is already locked by another process')
|
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.getnode.cache_remove_all()
|
self.getnode.cache_remove_all()
|
||||||
if self.lockfile:
|
|
||||||
nilmdb.utils.lock.exclusive_unlock(self.lockfile)
|
|
||||||
self.lockfile.close()
|
|
||||||
try:
|
|
||||||
os.unlink(self.lock)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
self.lockfile = None
|
|
||||||
|
|
||||||
def _encode_filename(self, path):
|
def _encode_filename(self, path):
|
||||||
# Translate unicode strings to raw bytes, if needed. We
|
# Encode all paths to UTF-8, regardless of sys.getfilesystemencoding(),
|
||||||
# always manipulate paths internally as bytes.
|
# because we want to be able to represent all code points and the user
|
||||||
|
# will never be directly exposed to filenames. We can then do path
|
||||||
|
# manipulations on the UTF-8 directly.
|
||||||
|
if isinstance(path, unicode):
|
||||||
return path.encode('utf-8')
|
return path.encode('utf-8')
|
||||||
|
return path
|
||||||
def _decode_filename(self, path):
|
|
||||||
# Translate raw bytes to unicode strings, escaping if needed
|
|
||||||
return path.decode('utf-8', errors='backslashreplace')
|
|
||||||
|
|
||||||
def _create_check_ospath(self, ospath):
|
|
||||||
if ospath[-1:] == b'/':
|
|
||||||
raise ValueError("invalid path; should not end with a /")
|
|
||||||
if Table.exists(ospath):
|
|
||||||
raise ValueError("stream already exists at this path")
|
|
||||||
if os.path.isdir(ospath):
|
|
||||||
# Look for any files in subdirectories. Fully empty subdirectories
|
|
||||||
# are OK; they might be there during a rename
|
|
||||||
for (root, dirs, files) in os.walk(ospath):
|
|
||||||
if files:
|
|
||||||
raise ValueError(
|
|
||||||
"non-empty subdirs of this path already exist")
|
|
||||||
|
|
||||||
def _create_parents(self, unicodepath):
|
|
||||||
"""Verify the path name, and create parent directories if they
|
|
||||||
don't exist. Returns a list of elements that got created."""
|
|
||||||
path = self._encode_filename(unicodepath)
|
|
||||||
|
|
||||||
if path[0:1] != b'/':
|
|
||||||
raise ValueError("paths must start with / ")
|
|
||||||
[group, node] = path.rsplit(b"/", 1)
|
|
||||||
if group == b'':
|
|
||||||
raise ValueError("invalid path; path must contain at least one "
|
|
||||||
"folder")
|
|
||||||
if node == b'':
|
|
||||||
raise ValueError("invalid path; should not end with a /")
|
|
||||||
if not Table.valid_path(path):
|
|
||||||
raise ValueError("path name is invalid or contains reserved words")
|
|
||||||
|
|
||||||
# Create the table's base dir. Note that we make a
|
|
||||||
# distinction here between NilmDB paths (always Unix style,
|
|
||||||
# split apart manually) and OS paths (built up with
|
|
||||||
# os.path.join)
|
|
||||||
|
|
||||||
# Make directories leading up to this one
|
|
||||||
elements = path.lstrip(b'/').split(b'/')
|
|
||||||
made_dirs = []
|
|
||||||
try:
|
|
||||||
# Make parent elements
|
|
||||||
for i in range(len(elements)):
|
|
||||||
ospath = os.path.join(self.root, *elements[0:i])
|
|
||||||
if Table.exists(ospath):
|
|
||||||
raise ValueError("path is subdir of existing node")
|
|
||||||
if not os.path.isdir(ospath):
|
|
||||||
os.mkdir(ospath)
|
|
||||||
made_dirs.append(ospath)
|
|
||||||
except Exception:
|
|
||||||
# Remove paths that we created
|
|
||||||
for ospath in reversed(made_dirs):
|
|
||||||
os.rmdir(ospath)
|
|
||||||
raise
|
|
||||||
|
|
||||||
return elements
|
|
||||||
|
|
||||||
def create(self, unicodepath, layout_name):
|
def create(self, unicodepath, layout_name):
|
||||||
"""
|
"""
|
||||||
|
@ -145,92 +74,70 @@ class BulkData():
|
||||||
|
|
||||||
layout_name: string for nilmdb.layout.get_named(), e.g. 'float32_8'
|
layout_name: string for nilmdb.layout.get_named(), e.g. 'float32_8'
|
||||||
"""
|
"""
|
||||||
elements = self._create_parents(unicodepath)
|
path = self._encode_filename(unicodepath)
|
||||||
|
|
||||||
|
if path[0] != '/':
|
||||||
|
raise ValueError("paths must start with /")
|
||||||
|
[ group, node ] = path.rsplit("/", 1)
|
||||||
|
if group == '':
|
||||||
|
raise ValueError("invalid path; path must contain at least one "
|
||||||
|
"folder")
|
||||||
|
|
||||||
|
# Get layout, and build format string for struct module
|
||||||
|
try:
|
||||||
|
layout = nilmdb.server.layout.get_named(layout_name)
|
||||||
|
struct_fmt = '<d' # Little endian, double timestamp
|
||||||
|
struct_mapping = {
|
||||||
|
"int8": 'b',
|
||||||
|
"uint8": 'B',
|
||||||
|
"int16": 'h',
|
||||||
|
"uint16": 'H',
|
||||||
|
"int32": 'i',
|
||||||
|
"uint32": 'I',
|
||||||
|
"int64": 'q',
|
||||||
|
"uint64": 'Q',
|
||||||
|
"float32": 'f',
|
||||||
|
"float64": 'd',
|
||||||
|
}
|
||||||
|
struct_fmt += struct_mapping[layout.datatype] * layout.count
|
||||||
|
except KeyError:
|
||||||
|
raise ValueError("no such layout, or bad data types")
|
||||||
|
|
||||||
|
# Create the table. Note that we make a distinction here
|
||||||
|
# between NilmDB paths (always Unix style, split apart
|
||||||
|
# manually) and OS paths (built up with os.path.join)
|
||||||
|
|
||||||
|
# Make directories leading up to this one
|
||||||
|
elements = path.lstrip('/').split('/')
|
||||||
|
for i in range(len(elements)):
|
||||||
|
ospath = os.path.join(self.root, *elements[0:i])
|
||||||
|
if Table.exists(ospath):
|
||||||
|
raise ValueError("path is subdir of existing node")
|
||||||
|
if not os.path.isdir(ospath):
|
||||||
|
os.mkdir(ospath)
|
||||||
|
|
||||||
# Make the final dir
|
# Make the final dir
|
||||||
ospath = os.path.join(self.root, *elements)
|
ospath = os.path.join(self.root, *elements)
|
||||||
self._create_check_ospath(ospath)
|
if os.path.isdir(ospath):
|
||||||
|
raise ValueError("subdirs of this path already exist")
|
||||||
os.mkdir(ospath)
|
os.mkdir(ospath)
|
||||||
|
|
||||||
try:
|
|
||||||
# Write format string to file
|
# Write format string to file
|
||||||
Table.create(ospath, layout_name, self.file_size,
|
Table.create(ospath, struct_fmt, self.file_size, self.files_per_dir)
|
||||||
self.files_per_dir)
|
|
||||||
|
|
||||||
# Open and cache it
|
# Open and cache it
|
||||||
self.getnode(unicodepath)
|
self.getnode(unicodepath)
|
||||||
except Exception:
|
|
||||||
exc_info = sys.exc_info()
|
|
||||||
try:
|
|
||||||
os.rmdir(ospath)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
raise exc_info[1].with_traceback(exc_info[2])
|
|
||||||
|
|
||||||
# Success
|
# Success
|
||||||
return
|
return
|
||||||
|
|
||||||
def _remove_leaves(self, unicodepath):
|
|
||||||
"""Remove empty directories starting at the leaves of unicodepath"""
|
|
||||||
path = self._encode_filename(unicodepath)
|
|
||||||
elements = path.lstrip(b'/').split(b'/')
|
|
||||||
for i in reversed(list(range(len(elements)))):
|
|
||||||
ospath = os.path.join(self.root, *elements[0:i+1])
|
|
||||||
try:
|
|
||||||
os.rmdir(ospath)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def rename(self, oldunicodepath, newunicodepath):
|
|
||||||
"""Move entire tree from 'oldunicodepath' to
|
|
||||||
'newunicodepath'"""
|
|
||||||
oldpath = self._encode_filename(oldunicodepath)
|
|
||||||
newpath = self._encode_filename(newunicodepath)
|
|
||||||
|
|
||||||
# Get OS paths
|
|
||||||
oldelements = oldpath.lstrip(b'/').split(b'/')
|
|
||||||
oldospath = os.path.join(self.root, *oldelements)
|
|
||||||
newelements = newpath.lstrip(b'/').split(b'/')
|
|
||||||
newospath = os.path.join(self.root, *newelements)
|
|
||||||
|
|
||||||
# Basic checks
|
|
||||||
if oldospath == newospath:
|
|
||||||
raise ValueError("old and new paths are the same")
|
|
||||||
|
|
||||||
# Remove Table object at old path from cache
|
|
||||||
self.getnode.cache_remove(self, oldunicodepath)
|
|
||||||
|
|
||||||
# Move the table to a temporary location
|
|
||||||
tmpdir = tempfile.mkdtemp(prefix=b"rename-", dir=self.root)
|
|
||||||
tmppath = os.path.join(tmpdir, b"table")
|
|
||||||
os.rename(oldospath, tmppath)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Check destination path
|
|
||||||
self._create_check_ospath(newospath)
|
|
||||||
|
|
||||||
# Create parent dirs for new location
|
|
||||||
self._create_parents(newunicodepath)
|
|
||||||
|
|
||||||
# Move table into new location
|
|
||||||
os.rename(tmppath, newospath)
|
|
||||||
except Exception:
|
|
||||||
# On failure, move the table back to original path
|
|
||||||
os.rename(tmppath, oldospath)
|
|
||||||
os.rmdir(tmpdir)
|
|
||||||
raise
|
|
||||||
|
|
||||||
# Prune old dirs
|
|
||||||
self._remove_leaves(oldunicodepath)
|
|
||||||
os.rmdir(tmpdir)
|
|
||||||
|
|
||||||
def destroy(self, unicodepath):
|
def destroy(self, unicodepath):
|
||||||
"""Fully remove all data at a particular path. No way to undo
|
"""Fully remove all data at a particular path. No way to undo
|
||||||
it! The group/path structure is removed, too."""
|
it! The group/path structure is removed, too."""
|
||||||
path = self._encode_filename(unicodepath)
|
path = self._encode_filename(unicodepath)
|
||||||
|
|
||||||
# Get OS path
|
# Get OS path
|
||||||
elements = path.lstrip(b'/').split(b'/')
|
elements = path.lstrip('/').split('/')
|
||||||
ospath = os.path.join(self.root, *elements)
|
ospath = os.path.join(self.root, *elements)
|
||||||
|
|
||||||
# Remove Table object from cache
|
# Remove Table object from cache
|
||||||
|
@ -245,8 +152,13 @@ class BulkData():
|
||||||
for name in dirs:
|
for name in dirs:
|
||||||
os.rmdir(os.path.join(root, name))
|
os.rmdir(os.path.join(root, name))
|
||||||
|
|
||||||
# Remove leftover empty directories
|
# Remove empty parent directories
|
||||||
self._remove_leaves(unicodepath)
|
for i in reversed(range(len(elements))):
|
||||||
|
ospath = os.path.join(self.root, *elements[0:i+1])
|
||||||
|
try:
|
||||||
|
os.rmdir(ospath)
|
||||||
|
except OSError:
|
||||||
|
break
|
||||||
|
|
||||||
# Cache open tables
|
# Cache open tables
|
||||||
@nilmdb.utils.lru_cache(size = table_cache_size,
|
@nilmdb.utils.lru_cache(size = table_cache_size,
|
||||||
|
@ -255,72 +167,115 @@ class BulkData():
|
||||||
"""Return a Table object corresponding to the given database
|
"""Return a Table object corresponding to the given database
|
||||||
path, which must exist."""
|
path, which must exist."""
|
||||||
path = self._encode_filename(unicodepath)
|
path = self._encode_filename(unicodepath)
|
||||||
elements = path.lstrip(b'/').split(b'/')
|
elements = path.lstrip('/').split('/')
|
||||||
ospath = os.path.join(self.root, *elements)
|
ospath = os.path.join(self.root, *elements)
|
||||||
return Table(ospath, self.initial_nrows)
|
return Table(ospath)
|
||||||
|
|
||||||
|
|
||||||
@nilmdb.utils.must_close(wrap_verify = False)
|
@nilmdb.utils.must_close(wrap_verify = False)
|
||||||
class Table():
|
class File(object):
|
||||||
|
"""Object representing a single file on disk. Data can be appended,
|
||||||
|
or the self.mmap handle can be used for random reads."""
|
||||||
|
|
||||||
|
def __init__(self, root, subdir, filename):
|
||||||
|
# Create path if it doesn't exist
|
||||||
|
try:
|
||||||
|
os.mkdir(os.path.join(root, subdir))
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Open/create file
|
||||||
|
self._f = open(os.path.join(root, subdir, filename), "a+b", 0)
|
||||||
|
|
||||||
|
# Seek to end, and get size
|
||||||
|
self._f.seek(0, 2)
|
||||||
|
self.size = self._f.tell()
|
||||||
|
|
||||||
|
# Open mmap object
|
||||||
|
self.mmap = None
|
||||||
|
self._mmap_reopen()
|
||||||
|
|
||||||
|
def _mmap_reopen(self):
|
||||||
|
if self.size == 0:
|
||||||
|
# Don't mmap if the file is empty; it would fail
|
||||||
|
pass
|
||||||
|
elif self.mmap is None:
|
||||||
|
# Not opened yet, so open it
|
||||||
|
self.mmap = mmap.mmap(self._f.fileno(), 0)
|
||||||
|
else:
|
||||||
|
# Already opened, so just resize it
|
||||||
|
self.mmap.resize(self.size)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
if self.mmap is not None:
|
||||||
|
self.mmap.close()
|
||||||
|
self._f.close()
|
||||||
|
|
||||||
|
def append(self, data): # pragma: no cover (below version used instead)
|
||||||
|
# Write data, flush it, and resize our mmap accordingly
|
||||||
|
self._f.write(data)
|
||||||
|
self._f.flush()
|
||||||
|
self.size += len(data)
|
||||||
|
self._mmap_reopen()
|
||||||
|
|
||||||
|
def append_pack_iter(self, count, packer, dataiter):
|
||||||
|
# An optimized verison of append, to avoid flushing the file
|
||||||
|
# and resizing the mmap after each data point.
|
||||||
|
try:
|
||||||
|
rows = []
|
||||||
|
for i in xrange(count):
|
||||||
|
row = dataiter.next()
|
||||||
|
rows.append(packer(*row))
|
||||||
|
self._f.write("".join(rows))
|
||||||
|
finally:
|
||||||
|
self._f.flush()
|
||||||
|
self.size = self._f.tell()
|
||||||
|
self._mmap_reopen()
|
||||||
|
|
||||||
|
@nilmdb.utils.must_close(wrap_verify = False)
|
||||||
|
class Table(object):
|
||||||
"""Tools to help access a single table (data at a specific OS path)."""
|
"""Tools to help access a single table (data at a specific OS path)."""
|
||||||
# See design.md for design details
|
# See design.md for design details
|
||||||
|
|
||||||
# Class methods, to help keep format details in this class.
|
# Class methods, to help keep format details in this class.
|
||||||
@classmethod
|
|
||||||
def valid_path(cls, root):
|
|
||||||
"""Return True if a root path is a valid name"""
|
|
||||||
return b"_format" not in root.split(b"/")
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def exists(cls, root):
|
def exists(cls, root):
|
||||||
"""Return True if a table appears to exist at this OS path"""
|
"""Return True if a table appears to exist at this OS path"""
|
||||||
return os.path.isfile(os.path.join(root, b"_format"))
|
return os.path.isfile(os.path.join(root, "_format"))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, root, layout, file_size, files_per_dir):
|
def create(cls, root, struct_fmt, file_size, files_per_dir):
|
||||||
"""Initialize a table at the given OS path with the
|
"""Initialize a table at the given OS path.
|
||||||
given layout string"""
|
'struct_fmt' is a Struct module format description"""
|
||||||
|
|
||||||
# Calculate rows per file so that each file is approximately
|
# Calculate rows per file so that each file is approximately
|
||||||
# file_size bytes.
|
# file_size bytes.
|
||||||
rkt = rocket.Rocket(layout, None)
|
packer = struct.Struct(struct_fmt)
|
||||||
rows_per_file = max(file_size // rkt.binary_size, 1)
|
rows_per_file = max(file_size // packer.size, 1)
|
||||||
rkt.close()
|
|
||||||
|
|
||||||
fmt = {
|
fmt = { "rows_per_file": rows_per_file,
|
||||||
"rows_per_file": rows_per_file,
|
|
||||||
"files_per_dir": files_per_dir,
|
"files_per_dir": files_per_dir,
|
||||||
"layout": layout,
|
"struct_fmt": struct_fmt,
|
||||||
"version": 3
|
"version": 1 }
|
||||||
}
|
with open(os.path.join(root, "_format"), "wb") as f:
|
||||||
nilmdb.utils.atomic.replace_file(
|
pickle.dump(fmt, f, 2)
|
||||||
os.path.join(root, b"_format"), pickle.dumps(fmt, 2))
|
|
||||||
|
|
||||||
# Normal methods
|
# Normal methods
|
||||||
def __init__(self, root, initial_nrows=0):
|
def __init__(self, root):
|
||||||
"""'root' is the full OS path to the directory of this table"""
|
"""'root' is the full OS path to the directory of this table"""
|
||||||
self.root = root
|
self.root = root
|
||||||
self.initial_nrows = initial_nrows
|
|
||||||
|
|
||||||
# Load the format
|
# Load the format and build packer
|
||||||
with open(os.path.join(self.root, b"_format"), "rb") as f:
|
with open(os.path.join(self.root, "_format"), "rb") as f:
|
||||||
fmt = pickle.load(f)
|
fmt = pickle.load(f)
|
||||||
|
|
||||||
if fmt["version"] != 3:
|
if fmt["version"] != 1: # pragma: no cover (just future proofing)
|
||||||
# Old versions used floating point timestamps, which aren't
|
raise NotImplementedError("version " + fmt["version"] +
|
||||||
# valid anymore.
|
" bulk data store not supported")
|
||||||
raise NotImplementedError("old version " + str(fmt["version"]) +
|
|
||||||
" bulk data store is not supported")
|
|
||||||
|
|
||||||
self.rows_per_file = fmt["rows_per_file"]
|
self.rows_per_file = fmt["rows_per_file"]
|
||||||
self.files_per_dir = fmt["files_per_dir"]
|
self.files_per_dir = fmt["files_per_dir"]
|
||||||
self.layout = fmt["layout"]
|
self.packer = struct.Struct(fmt["struct_fmt"])
|
||||||
|
self.file_size = self.packer.size * self.rows_per_file
|
||||||
# Use rocket to get row size and file size
|
|
||||||
rkt = rocket.Rocket(self.layout, None)
|
|
||||||
self.row_size = rkt.binary_size
|
|
||||||
self.file_size = rkt.binary_size * self.rows_per_file
|
|
||||||
rkt.close()
|
|
||||||
|
|
||||||
# Find nrows
|
# Find nrows
|
||||||
self.nrows = self._get_nrows()
|
self.nrows = self._get_nrows()
|
||||||
|
@ -336,20 +291,19 @@ class Table():
|
||||||
# greater than the row number of any piece of data that
|
# greater than the row number of any piece of data that
|
||||||
# currently exists, not necessarily all data that _ever_
|
# currently exists, not necessarily all data that _ever_
|
||||||
# existed.
|
# existed.
|
||||||
regex = re.compile(b"^[0-9a-f]{4,}$")
|
regex = re.compile("^[0-9a-f]{4,}$")
|
||||||
|
|
||||||
# Find the last directory. We sort and loop through all of them,
|
# Find the last directory. We sort and loop through all of them,
|
||||||
# starting with the numerically greatest, because the dirs could be
|
# starting with the numerically greatest, because the dirs could be
|
||||||
# empty if something was deleted but the directory was unexpectedly
|
# empty if something was deleted.
|
||||||
# not deleted.
|
|
||||||
subdirs = sorted(filter(regex.search, os.listdir(self.root)),
|
subdirs = sorted(filter(regex.search, os.listdir(self.root)),
|
||||||
key = lambda x: int(x, 16), reverse = True)
|
key = lambda x: int(x, 16), reverse = True)
|
||||||
|
|
||||||
for subdir in subdirs:
|
for subdir in subdirs:
|
||||||
# Now find the last file in that dir
|
# Now find the last file in that dir
|
||||||
path = os.path.join(self.root, subdir)
|
path = os.path.join(self.root, subdir)
|
||||||
files = list(filter(regex.search, os.listdir(path)))
|
files = filter(regex.search, os.listdir(path))
|
||||||
if not files:
|
if not files: # pragma: no cover (shouldn't occur)
|
||||||
# Empty dir: try the next one
|
# Empty dir: try the next one
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -360,14 +314,8 @@ class Table():
|
||||||
# Convert to row number
|
# Convert to row number
|
||||||
return self._row_from_offset(subdir, filename, offset)
|
return self._row_from_offset(subdir, filename, offset)
|
||||||
|
|
||||||
# No files, so no data. We typically start at row 0 in this
|
# No files, so no data
|
||||||
# case, although initial_nrows is specified during some tests
|
return 0
|
||||||
# to exercise other parts of the code better. Since we have
|
|
||||||
# no files yet, round initial_nrows up so it points to a row
|
|
||||||
# that would begin a new file.
|
|
||||||
nrows = ((self.initial_nrows + (self.rows_per_file - 1)) //
|
|
||||||
self.rows_per_file) * self.rows_per_file
|
|
||||||
return nrows
|
|
||||||
|
|
||||||
def _offset_from_row(self, row):
|
def _offset_from_row(self, row):
|
||||||
"""Return a (subdir, filename, offset, count) tuple:
|
"""Return a (subdir, filename, offset, count) tuple:
|
||||||
|
@ -380,188 +328,97 @@ class Table():
|
||||||
filenum = row // self.rows_per_file
|
filenum = row // self.rows_per_file
|
||||||
# It's OK if these format specifiers are too short; the filenames
|
# It's OK if these format specifiers are too short; the filenames
|
||||||
# will just get longer but will still sort correctly.
|
# will just get longer but will still sort correctly.
|
||||||
dirname = sprintf(b"%04x", filenum // self.files_per_dir)
|
dirname = sprintf("%04x", filenum // self.files_per_dir)
|
||||||
filename = sprintf(b"%04x", filenum % self.files_per_dir)
|
filename = sprintf("%04x", filenum % self.files_per_dir)
|
||||||
offset = (row % self.rows_per_file) * self.row_size
|
offset = (row % self.rows_per_file) * self.packer.size
|
||||||
count = self.rows_per_file - (row % self.rows_per_file)
|
count = self.rows_per_file - (row % self.rows_per_file)
|
||||||
return (dirname, filename, offset, count)
|
return (dirname, filename, offset, count)
|
||||||
|
|
||||||
def _row_from_offset(self, subdir, filename, offset):
|
def _row_from_offset(self, subdir, filename, offset):
|
||||||
"""Return the row number that corresponds to the given
|
"""Return the row number that corresponds to the given
|
||||||
'subdir/filename' and byte-offset within that file."""
|
'subdir/filename' and byte-offset within that file."""
|
||||||
if (offset % self.row_size) != 0:
|
if (offset % self.packer.size) != 0: # pragma: no cover; shouldn't occur
|
||||||
# this shouldn't occur, unless there is some corruption somewhere
|
|
||||||
raise ValueError("file offset is not a multiple of data size")
|
raise ValueError("file offset is not a multiple of data size")
|
||||||
filenum = int(subdir, 16) * self.files_per_dir + int(filename, 16)
|
filenum = int(subdir, 16) * self.files_per_dir + int(filename, 16)
|
||||||
row = (filenum * self.rows_per_file) + (offset // self.row_size)
|
row = (filenum * self.rows_per_file) + (offset // self.packer.size)
|
||||||
return row
|
return row
|
||||||
|
|
||||||
def _remove_or_truncate_file(self, subdir, filename, offset=0):
|
|
||||||
"""Remove the given file, and remove the subdirectory too
|
|
||||||
if it's empty. If offset is nonzero, truncate the file
|
|
||||||
to that size instead."""
|
|
||||||
# Close potentially open file in file_open LRU cache
|
|
||||||
self.file_open.cache_remove(self, subdir, filename)
|
|
||||||
if offset:
|
|
||||||
# Truncate it
|
|
||||||
with open(os.path.join(self.root, subdir, filename), "r+b") as f:
|
|
||||||
f.truncate(offset)
|
|
||||||
else:
|
|
||||||
# Remove file
|
|
||||||
os.remove(os.path.join(self.root, subdir, filename))
|
|
||||||
# Try deleting subdir, too
|
|
||||||
try:
|
|
||||||
os.rmdir(os.path.join(self.root, subdir))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Cache open files
|
# Cache open files
|
||||||
@nilmdb.utils.lru_cache(size = fd_cache_size,
|
@nilmdb.utils.lru_cache(size = fd_cache_size,
|
||||||
onremove = lambda f: f.close())
|
onremove = lambda f: f.close())
|
||||||
def file_open(self, subdir, filename):
|
def file_open(self, subdir, filename):
|
||||||
"""Open and map a given 'subdir/filename' (relative to self.root).
|
"""Open and map a given 'subdir/filename' (relative to self.root).
|
||||||
Will be automatically closed when evicted from the cache."""
|
Will be automatically closed when evicted from the cache."""
|
||||||
# Create path if it doesn't exist
|
return File(self.root, subdir, filename)
|
||||||
try:
|
|
||||||
os.mkdir(os.path.join(self.root, subdir))
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
# Return a rocket.Rocket object, which contains the open file
|
|
||||||
return rocket.Rocket(self.layout,
|
|
||||||
os.path.join(self.root, subdir, filename))
|
|
||||||
|
|
||||||
def append_data(self, data, start, end, binary=False):
|
def append(self, data):
|
||||||
"""Parse the formatted string in 'data', according to the
|
"""Append the data and flush it to disk.
|
||||||
current layout, and append it to the table. If any timestamps
|
data is a nested Python list [[row],[row],[...]]"""
|
||||||
are non-monotonic, or don't fall between 'start' and 'end',
|
remaining = len(data)
|
||||||
a ValueError is raised.
|
dataiter = iter(data)
|
||||||
|
while remaining:
|
||||||
|
# See how many rows we can fit into the current file, and open it
|
||||||
|
(subdir, fname, offset, count) = self._offset_from_row(self.nrows)
|
||||||
|
if count > remaining:
|
||||||
|
count = remaining
|
||||||
|
|
||||||
Note that data is always of 'bytes' type.
|
|
||||||
|
|
||||||
If 'binary' is True, the data should be in raw binary format
|
|
||||||
instead: little-endian, matching the current table's layout,
|
|
||||||
including the int64 timestamp.
|
|
||||||
|
|
||||||
If this function succeeds, it returns normally. Otherwise,
|
|
||||||
the table is reverted back to its original state by truncating
|
|
||||||
or deleting files as necessary."""
|
|
||||||
data_offset = 0
|
|
||||||
last_timestamp = nilmdb.utils.time.min_timestamp
|
|
||||||
tot_rows = self.nrows
|
|
||||||
count = 0
|
|
||||||
linenum = 0
|
|
||||||
try:
|
|
||||||
while data_offset < len(data):
|
|
||||||
# See how many rows we can fit into the current file,
|
|
||||||
# and open it
|
|
||||||
(subdir, fname, offs, count) = self._offset_from_row(tot_rows)
|
|
||||||
f = self.file_open(subdir, fname)
|
f = self.file_open(subdir, fname)
|
||||||
|
|
||||||
# Ask the rocket object to parse and append up to "count"
|
# Write the data
|
||||||
# rows of data, verifying things along the way.
|
f.append_pack_iter(count, self.packer.pack, dataiter)
|
||||||
try:
|
remaining -= count
|
||||||
if binary:
|
self.nrows += count
|
||||||
appender = f.append_binary
|
|
||||||
else:
|
|
||||||
appender = f.append_string
|
|
||||||
(added_rows, data_offset, last_timestamp, linenum
|
|
||||||
) = appender(count, data, data_offset, linenum,
|
|
||||||
start, end, last_timestamp)
|
|
||||||
except rocket.ParseError as e:
|
|
||||||
(linenum, colnum, errtype, obj) = e.args
|
|
||||||
if binary:
|
|
||||||
where = "byte %d: " % (linenum)
|
|
||||||
else:
|
|
||||||
where = "line %d, column %d: " % (linenum, colnum)
|
|
||||||
# Extract out the error line, add column marker
|
|
||||||
try:
|
|
||||||
if binary:
|
|
||||||
raise IndexError
|
|
||||||
bad = data.splitlines()[linenum-1]
|
|
||||||
bad += b'\n' + b' ' * (colnum - 1) + b'^'
|
|
||||||
except IndexError:
|
|
||||||
bad = b""
|
|
||||||
if errtype == rocket.ERR_NON_MONOTONIC:
|
|
||||||
err = "timestamp is not monotonically increasing"
|
|
||||||
elif errtype == rocket.ERR_OUT_OF_INTERVAL:
|
|
||||||
if obj < start:
|
|
||||||
err = sprintf("Data timestamp %s < start time %s",
|
|
||||||
timestamp_to_string(obj),
|
|
||||||
timestamp_to_string(start))
|
|
||||||
else:
|
|
||||||
err = sprintf("Data timestamp %s >= end time %s",
|
|
||||||
timestamp_to_string(obj),
|
|
||||||
timestamp_to_string(end))
|
|
||||||
else:
|
|
||||||
err = str(obj)
|
|
||||||
bad_str = bad.decode('utf-8', errors='backslashreplace')
|
|
||||||
raise ValueError("error parsing input data: " +
|
|
||||||
where + err + "\n" + bad_str)
|
|
||||||
tot_rows += added_rows
|
|
||||||
except Exception:
|
|
||||||
# Some failure, so try to roll things back by truncating or
|
|
||||||
# deleting files that we may have appended data to.
|
|
||||||
cleanpos = self.nrows
|
|
||||||
while cleanpos <= tot_rows:
|
|
||||||
(subdir, fname, offs, count) = self._offset_from_row(cleanpos)
|
|
||||||
self._remove_or_truncate_file(subdir, fname, offs)
|
|
||||||
cleanpos += count
|
|
||||||
# Re-raise original exception
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
# Success, so update self.nrows accordingly
|
|
||||||
self.nrows = tot_rows
|
|
||||||
|
|
||||||
def get_data(self, start, stop, binary=False):
|
def __getitem__(self, key):
|
||||||
"""Extract data corresponding to Python range [n:m],
|
"""Extract data and return it. Supports simple indexing
|
||||||
and returns a formatted string"""
|
(table[n]) and range slices (table[n:m]). Returns a nested
|
||||||
if (start is None or stop is None or
|
Python list [[row],[row],[...]]"""
|
||||||
start > stop or start < 0 or stop > self.nrows):
|
|
||||||
raise IndexError("Index out of range")
|
# Handle simple slices
|
||||||
|
if isinstance(key, slice):
|
||||||
|
# Fall back to brute force if the slice isn't simple
|
||||||
|
if ((key.step is not None and key.step != 1) or
|
||||||
|
key.start is None or
|
||||||
|
key.stop is None or
|
||||||
|
key.start >= key.stop or
|
||||||
|
key.start < 0 or
|
||||||
|
key.stop > self.nrows):
|
||||||
|
return [ self[x] for x in xrange(*key.indices(self.nrows)) ]
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
row = start
|
row = key.start
|
||||||
remaining = stop - start
|
remaining = key.stop - key.start
|
||||||
while remaining > 0:
|
while remaining:
|
||||||
(subdir, filename, offset, count) = self._offset_from_row(row)
|
(subdir, filename, offset, count) = self._offset_from_row(row)
|
||||||
if count > remaining:
|
if count > remaining:
|
||||||
count = remaining
|
count = remaining
|
||||||
f = self.file_open(subdir, filename)
|
mm = self.file_open(subdir, filename).mmap
|
||||||
if binary:
|
for i in xrange(count):
|
||||||
ret.append(f.extract_binary(offset, count))
|
ret.append(list(self.packer.unpack_from(mm, offset)))
|
||||||
else:
|
offset += self.packer.size
|
||||||
ret.append(f.extract_string(offset, count))
|
|
||||||
remaining -= count
|
remaining -= count
|
||||||
row += count
|
row += count
|
||||||
return b"".join(ret)
|
return ret
|
||||||
|
|
||||||
def __getitem__(self, row):
|
# Handle single points
|
||||||
"""Extract timestamps from a row, with table[n] notation."""
|
if key < 0 or key >= self.nrows:
|
||||||
if row < 0 or row >= self.nrows:
|
|
||||||
raise IndexError("Index out of range")
|
raise IndexError("Index out of range")
|
||||||
(subdir, filename, offset, count) = self._offset_from_row(row)
|
(subdir, filename, offset, count) = self._offset_from_row(key)
|
||||||
f = self.file_open(subdir, filename)
|
mm = self.file_open(subdir, filename).mmap
|
||||||
return f.extract_timestamp(offset)
|
# unpack_from ignores the mmap object's current seek position
|
||||||
|
return list(self.packer.unpack_from(mm, offset))
|
||||||
|
|
||||||
def _remove_rows(self, subdir, filename, start, stop):
|
def _remove_rows(self, subdir, filename, start, stop):
|
||||||
"""Helper to mark specific rows as being removed from a
|
"""Helper to mark specific rows as being removed from a
|
||||||
file, and potentially remove or truncate the file itself."""
|
file, and potentially removing or truncating the file itself."""
|
||||||
# Close potentially open file in file_open LRU cache
|
# Import an existing list of deleted rows for this file
|
||||||
self.file_open.cache_remove(self, subdir, filename)
|
|
||||||
|
|
||||||
# We keep a file like 0000.removed that contains a list of
|
|
||||||
# which rows have been "removed". Note that we never have to
|
|
||||||
# remove entries from this list, because we never decrease
|
|
||||||
# self.nrows, and so we will never overwrite those locations in the
|
|
||||||
# file. Only when the list covers the entire extent of the
|
|
||||||
# file will that file be removed.
|
|
||||||
datafile = os.path.join(self.root, subdir, filename)
|
datafile = os.path.join(self.root, subdir, filename)
|
||||||
cachefile = datafile + b".removed"
|
cachefile = datafile + ".removed"
|
||||||
try:
|
try:
|
||||||
with open(cachefile, "rb") as f:
|
with open(cachefile, "rb") as f:
|
||||||
ranges = pickle.load(f)
|
ranges = pickle.load(f)
|
||||||
cachefile_present = True
|
cachefile_present = True
|
||||||
except Exception:
|
except:
|
||||||
ranges = []
|
ranges = []
|
||||||
cachefile_present = False
|
cachefile_present = False
|
||||||
|
|
||||||
|
@ -583,8 +440,7 @@ class Table():
|
||||||
# Not connected; append previous and start again
|
# Not connected; append previous and start again
|
||||||
merged.append(prev)
|
merged.append(prev)
|
||||||
prev = new
|
prev = new
|
||||||
# Last range we were looking at goes into the file. We know
|
if prev is not None:
|
||||||
# there was at least one (the one we just removed).
|
|
||||||
merged.append(prev)
|
merged.append(prev)
|
||||||
|
|
||||||
# If the range covered the whole file, we can delete it now.
|
# If the range covered the whole file, we can delete it now.
|
||||||
|
@ -595,19 +451,20 @@ class Table():
|
||||||
# are generally easier if we don't have to special-case that.
|
# are generally easier if we don't have to special-case that.
|
||||||
if (len(merged) == 1 and
|
if (len(merged) == 1 and
|
||||||
merged[0][0] == 0 and merged[0][1] == self.rows_per_file):
|
merged[0][0] == 0 and merged[0][1] == self.rows_per_file):
|
||||||
|
# Close potentially open file in file_open LRU cache
|
||||||
|
self.file_open.cache_remove(self, subdir, filename)
|
||||||
|
|
||||||
# Delete files
|
# Delete files
|
||||||
|
os.remove(datafile)
|
||||||
if cachefile_present:
|
if cachefile_present:
|
||||||
os.remove(cachefile)
|
os.remove(cachefile)
|
||||||
self._remove_or_truncate_file(subdir, filename, 0)
|
|
||||||
else:
|
|
||||||
# File needs to stick around. This means we can get
|
|
||||||
# degenerate cases where we have large files containing as
|
|
||||||
# little as one row. Try to punch a hole in the file,
|
|
||||||
# so that this region doesn't take up filesystem space.
|
|
||||||
offset = start * self.row_size
|
|
||||||
count = (stop - start) * self.row_size
|
|
||||||
nilmdb.utils.fallocate.punch_hole(datafile, offset, count)
|
|
||||||
|
|
||||||
|
# Try deleting subdir, too
|
||||||
|
try:
|
||||||
|
os.rmdir(os.path.join(self.root, subdir))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
# Update cache. Try to do it atomically.
|
# Update cache. Try to do it atomically.
|
||||||
nilmdb.utils.atomic.replace_file(cachefile,
|
nilmdb.utils.atomic.replace_file(cachefile,
|
||||||
pickle.dumps(merged, 2))
|
pickle.dumps(merged, 2))
|
||||||
|
@ -628,8 +485,16 @@ class Table():
|
||||||
(subdir, filename, offset, count) = self._offset_from_row(row)
|
(subdir, filename, offset, count) = self._offset_from_row(row)
|
||||||
if count > remaining:
|
if count > remaining:
|
||||||
count = remaining
|
count = remaining
|
||||||
row_offset = offset // self.row_size
|
row_offset = offset // self.packer.size
|
||||||
# Mark the rows as being removed
|
# Mark the rows as being removed
|
||||||
self._remove_rows(subdir, filename, row_offset, row_offset + count)
|
self._remove_rows(subdir, filename, row_offset, row_offset + count)
|
||||||
remaining -= count
|
remaining -= count
|
||||||
row += count
|
row += count
|
||||||
|
|
||||||
|
class TimestampOnlyTable(object):
|
||||||
|
"""Helper that lets us pass a Tables object into bisect, by
|
||||||
|
returning only the timestamp when a particular row is requested."""
|
||||||
|
def __init__(self, table):
|
||||||
|
self.table = table
|
||||||
|
def __getitem__(self, index):
|
||||||
|
return self.table[index][0]
|
||||||
|
|
|
@ -1,15 +1,12 @@
|
||||||
"""Exceptions"""
|
"""Exceptions"""
|
||||||
|
|
||||||
|
|
||||||
class NilmDBError(Exception):
|
class NilmDBError(Exception):
|
||||||
"""Base exception for NilmDB errors"""
|
"""Base exception for NilmDB errors"""
|
||||||
def __init__(self, msg="Unspecified error"):
|
def __init__(self, message = "Unspecified error"):
|
||||||
super().__init__(msg)
|
Exception.__init__(self, message)
|
||||||
|
|
||||||
|
|
||||||
class StreamError(NilmDBError):
|
class StreamError(NilmDBError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class OverlapError(NilmDBError):
|
class OverlapError(NilmDBError):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,11 +1,5 @@
|
||||||
# cython: language_level=2
|
|
||||||
|
|
||||||
"""Interval, IntervalSet
|
"""Interval, IntervalSet
|
||||||
|
|
||||||
The Interval implemented here is just like
|
|
||||||
nilmdb.utils.interval.Interval, except implemented in Cython for
|
|
||||||
speed.
|
|
||||||
|
|
||||||
Represents an interval of time, and a set of such intervals.
|
Represents an interval of time, and a set of such intervals.
|
||||||
|
|
||||||
Intervals are half-open, ie. they include data points with timestamps
|
Intervals are half-open, ie. they include data points with timestamps
|
||||||
|
@ -25,54 +19,49 @@ Intervals are half-open, ie. they include data points with timestamps
|
||||||
# Fourth version is an optimized rb-tree that stores interval starts
|
# Fourth version is an optimized rb-tree that stores interval starts
|
||||||
# and ends directly in the tree, like bxinterval did.
|
# and ends directly in the tree, like bxinterval did.
|
||||||
|
|
||||||
from ..utils.time import min_timestamp as nilmdb_min_timestamp
|
|
||||||
from ..utils.time import max_timestamp as nilmdb_max_timestamp
|
|
||||||
from ..utils.time import timestamp_to_string
|
|
||||||
from ..utils.iterator import imerge
|
|
||||||
from ..utils.interval import IntervalError
|
|
||||||
import itertools
|
|
||||||
|
|
||||||
cimport rbtree
|
cimport rbtree
|
||||||
from libc.stdint cimport uint64_t, int64_t
|
cdef extern from "stdint.h":
|
||||||
|
ctypedef unsigned long long uint64_t
|
||||||
|
|
||||||
ctypedef int64_t timestamp_t
|
class IntervalError(Exception):
|
||||||
|
"""Error due to interval overlap, etc"""
|
||||||
|
pass
|
||||||
|
|
||||||
cdef class Interval:
|
cdef class Interval:
|
||||||
"""Represents an interval of time."""
|
"""Represents an interval of time."""
|
||||||
|
|
||||||
cdef public timestamp_t start, end
|
cdef public double start, end
|
||||||
|
|
||||||
def __init__(self, timestamp_t start, timestamp_t end):
|
def __init__(self, double start, double end):
|
||||||
"""
|
"""
|
||||||
'start' and 'end' are arbitrary numbers that represent time
|
'start' and 'end' are arbitrary floats that represent time
|
||||||
"""
|
"""
|
||||||
if start >= end:
|
if start >= end:
|
||||||
# Explicitly disallow zero-width intervals (since they're half-open)
|
# Explicitly disallow zero-width intervals (since they're half-open)
|
||||||
raise IntervalError("start %s must precede end %s" % (start, end))
|
raise IntervalError("start %s must precede end %s" % (start, end))
|
||||||
self.start = start
|
self.start = float(start)
|
||||||
self.end = end
|
self.end = float(end)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
s = repr(self.start) + ", " + repr(self.end)
|
s = repr(self.start) + ", " + repr(self.end)
|
||||||
return self.__class__.__name__ + "(" + s + ")"
|
return self.__class__.__name__ + "(" + s + ")"
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return ("[" + timestamp_to_string(self.start) +
|
return "[" + repr(self.start) + " -> " + repr(self.end) + ")"
|
||||||
" -> " + timestamp_to_string(self.end) + ")")
|
|
||||||
|
|
||||||
# Compare two intervals. If non-equal, order by start then end
|
def __cmp__(self, Interval other):
|
||||||
def __lt__(self, Interval other):
|
"""Compare two intervals. If non-equal, order by start then end"""
|
||||||
return (self.start, self.end) < (other.start, other.end)
|
if not isinstance(other, Interval):
|
||||||
def __gt__(self, Interval other):
|
raise TypeError("bad type")
|
||||||
return (self.start, self.end) > (other.start, other.end)
|
if self.start == other.start:
|
||||||
def __le__(self, Interval other):
|
if self.end < other.end:
|
||||||
return (self.start, self.end) <= (other.start, other.end)
|
return -1
|
||||||
def __ge__(self, Interval other):
|
if self.end > other.end:
|
||||||
return (self.start, self.end) >= (other.start, other.end)
|
return 1
|
||||||
def __eq__(self, Interval other):
|
return 0
|
||||||
return (self.start, self.end) == (other.start, other.end)
|
if self.start < other.start:
|
||||||
def __ne__(self, Interval other):
|
return -1
|
||||||
return (self.start, self.end) != (other.start, other.end)
|
return 1
|
||||||
|
|
||||||
cpdef intersects(self, Interval other):
|
cpdef intersects(self, Interval other):
|
||||||
"""Return True if two Interval objects intersect"""
|
"""Return True if two Interval objects intersect"""
|
||||||
|
@ -80,7 +69,7 @@ cdef class Interval:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
cpdef subset(self, timestamp_t start, timestamp_t end):
|
cpdef subset(self, double start, double end):
|
||||||
"""Return a new Interval that is a subset of this one"""
|
"""Return a new Interval that is a subset of this one"""
|
||||||
# A subclass that tracks additional data might override this.
|
# A subclass that tracks additional data might override this.
|
||||||
if start < self.start or end > self.end:
|
if start < self.start or end > self.end:
|
||||||
|
@ -102,14 +91,14 @@ cdef class DBInterval(Interval):
|
||||||
db_end = 200, db_endpos = 20000
|
db_end = 200, db_endpos = 20000
|
||||||
"""
|
"""
|
||||||
|
|
||||||
cpdef public timestamp_t db_start, db_end
|
cpdef public double db_start, db_end
|
||||||
cpdef public uint64_t db_startpos, db_endpos
|
cpdef public uint64_t db_startpos, db_endpos
|
||||||
|
|
||||||
def __init__(self, start, end,
|
def __init__(self, start, end,
|
||||||
db_start, db_end,
|
db_start, db_end,
|
||||||
db_startpos, db_endpos):
|
db_startpos, db_endpos):
|
||||||
"""
|
"""
|
||||||
'db_start' and 'db_end' are arbitrary numbers that represent
|
'db_start' and 'db_end' are arbitrary floats that represent
|
||||||
time. They must be a strict superset of the time interval
|
time. They must be a strict superset of the time interval
|
||||||
covered by 'start' and 'end'. The 'db_startpos' and
|
covered by 'start' and 'end'. The 'db_startpos' and
|
||||||
'db_endpos' are arbitrary database position indicators that
|
'db_endpos' are arbitrary database position indicators that
|
||||||
|
@ -129,7 +118,7 @@ cdef class DBInterval(Interval):
|
||||||
s += ", " + repr(self.db_startpos) + ", " + repr(self.db_endpos)
|
s += ", " + repr(self.db_startpos) + ", " + repr(self.db_endpos)
|
||||||
return self.__class__.__name__ + "(" + s + ")"
|
return self.__class__.__name__ + "(" + s + ")"
|
||||||
|
|
||||||
cpdef subset(self, timestamp_t start, timestamp_t end):
|
cpdef subset(self, double start, double end):
|
||||||
"""
|
"""
|
||||||
Return a new DBInterval that is a subset of this one
|
Return a new DBInterval that is a subset of this one
|
||||||
"""
|
"""
|
||||||
|
@ -273,15 +262,21 @@ cdef class IntervalSet:
|
||||||
|
|
||||||
def __and__(self, other not None):
|
def __and__(self, other not None):
|
||||||
"""
|
"""
|
||||||
Compute a new IntervalSet from the intersection of this
|
Compute a new IntervalSet from the intersection of two others
|
||||||
IntervalSet with one other interval.
|
|
||||||
|
|
||||||
Output intervals are built as subsets of the intervals in the
|
Output intervals are built as subsets of the intervals in the
|
||||||
first argument (self).
|
first argument (self).
|
||||||
"""
|
"""
|
||||||
out = IntervalSet()
|
out = IntervalSet()
|
||||||
|
|
||||||
|
if not isinstance(other, IntervalSet):
|
||||||
for i in self.intersection(other):
|
for i in self.intersection(other):
|
||||||
out.tree.insert(rbtree.RBNode(i.start, i.end, i))
|
out.tree.insert(rbtree.RBNode(i.start, i.end, i))
|
||||||
|
else:
|
||||||
|
for x in other:
|
||||||
|
for i in self.intersection(x):
|
||||||
|
out.tree.insert(rbtree.RBNode(i.start, i.end, i))
|
||||||
|
|
||||||
return out
|
return out
|
||||||
|
|
||||||
def intersection(self, Interval interval not None, orig = False):
|
def intersection(self, Interval interval not None, orig = False):
|
||||||
|
@ -298,17 +293,22 @@ cdef class IntervalSet:
|
||||||
(potentially) subsetted to make the one that is being
|
(potentially) subsetted to make the one that is being
|
||||||
returned.
|
returned.
|
||||||
"""
|
"""
|
||||||
if orig:
|
if not isinstance(interval, Interval):
|
||||||
|
raise TypeError("bad type")
|
||||||
for n in self.tree.intersect(interval.start, interval.end):
|
for n in self.tree.intersect(interval.start, interval.end):
|
||||||
i = n.obj
|
i = n.obj
|
||||||
|
if i:
|
||||||
|
if i.start >= interval.start and i.end <= interval.end:
|
||||||
|
if orig:
|
||||||
|
yield (i, i)
|
||||||
|
else:
|
||||||
|
yield i
|
||||||
|
else:
|
||||||
subset = i.subset(max(i.start, interval.start),
|
subset = i.subset(max(i.start, interval.start),
|
||||||
min(i.end, interval.end))
|
min(i.end, interval.end))
|
||||||
|
if orig:
|
||||||
yield (subset, i)
|
yield (subset, i)
|
||||||
else:
|
else:
|
||||||
for n in self.tree.intersect(interval.start, interval.end):
|
|
||||||
i = n.obj
|
|
||||||
subset = i.subset(max(i.start, interval.start),
|
|
||||||
min(i.end, interval.end))
|
|
||||||
yield subset
|
yield subset
|
||||||
|
|
||||||
cpdef intersects(self, Interval other):
|
cpdef intersects(self, Interval other):
|
||||||
|
@ -318,7 +318,7 @@ cdef class IntervalSet:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def find_end(self, timestamp_t t):
|
def find_end(self, double t):
|
||||||
"""
|
"""
|
||||||
Return an Interval from this tree that ends at time t, or
|
Return an Interval from this tree that ends at time t, or
|
||||||
None if it doesn't exist.
|
None if it doesn't exist.
|
||||||
|
|
197
nilmdb/server/layout.pyx
Normal file
197
nilmdb/server/layout.pyx
Normal file
|
@ -0,0 +1,197 @@
|
||||||
|
# cython: profile=False
|
||||||
|
|
||||||
|
import time
|
||||||
|
import sys
|
||||||
|
import inspect
|
||||||
|
import cStringIO
|
||||||
|
|
||||||
|
cdef enum:
|
||||||
|
max_value_count = 64
|
||||||
|
|
||||||
|
cimport cython
|
||||||
|
cimport libc.stdlib
|
||||||
|
cimport libc.stdio
|
||||||
|
cimport libc.string
|
||||||
|
|
||||||
|
class ParserError(Exception):
|
||||||
|
def __init__(self, line, message):
|
||||||
|
self.message = "line " + str(line) + ": " + message
|
||||||
|
Exception.__init__(self, self.message)
|
||||||
|
|
||||||
|
class FormatterError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Layout:
|
||||||
|
"""Represents a NILM database layout"""
|
||||||
|
|
||||||
|
def __init__(self, typestring):
|
||||||
|
"""Initialize this Layout object to handle the specified
|
||||||
|
type string"""
|
||||||
|
try:
|
||||||
|
[ datatype, count ] = typestring.split("_")
|
||||||
|
except:
|
||||||
|
raise KeyError("invalid layout string")
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.count = int(count)
|
||||||
|
except ValueError:
|
||||||
|
raise KeyError("invalid count")
|
||||||
|
if self.count < 1 or self.count > max_value_count:
|
||||||
|
raise KeyError("invalid count")
|
||||||
|
|
||||||
|
if datatype == 'uint16':
|
||||||
|
self.parse = self.parse_uint16
|
||||||
|
self.format_str = "%.6f" + " %d" * self.count
|
||||||
|
self.format = self.format_generic
|
||||||
|
elif datatype == 'float32' or datatype == 'float64':
|
||||||
|
self.parse = self.parse_float64
|
||||||
|
self.format_str = "%.6f" + " %f" * self.count
|
||||||
|
self.format = self.format_generic
|
||||||
|
else:
|
||||||
|
raise KeyError("invalid type")
|
||||||
|
|
||||||
|
self.datatype = datatype
|
||||||
|
|
||||||
|
# Parsers
|
||||||
|
def parse_float64(self, char *text):
|
||||||
|
cdef int n
|
||||||
|
cdef double ts
|
||||||
|
# Return doubles even in float32 case, since they're going into
|
||||||
|
# a Python array which would upconvert to double anyway.
|
||||||
|
result = [0] * (self.count + 1)
|
||||||
|
cdef char *end
|
||||||
|
ts = libc.stdlib.strtod(text, &end)
|
||||||
|
if end == text:
|
||||||
|
raise ValueError("bad timestamp")
|
||||||
|
result[0] = ts
|
||||||
|
for n in range(self.count):
|
||||||
|
text = end
|
||||||
|
result[n+1] = libc.stdlib.strtod(text, &end)
|
||||||
|
if end == text:
|
||||||
|
raise ValueError("wrong number of values")
|
||||||
|
n = 0
|
||||||
|
while end[n] == ' ':
|
||||||
|
n += 1
|
||||||
|
if end[n] != '\n' and end[n] != '#' and end[n] != '\0':
|
||||||
|
raise ValueError("extra data on line")
|
||||||
|
return (ts, result)
|
||||||
|
|
||||||
|
def parse_uint16(self, char *text):
|
||||||
|
cdef int n
|
||||||
|
cdef double ts
|
||||||
|
cdef int v
|
||||||
|
cdef char *end
|
||||||
|
result = [0] * (self.count + 1)
|
||||||
|
ts = libc.stdlib.strtod(text, &end)
|
||||||
|
if end == text:
|
||||||
|
raise ValueError("bad timestamp")
|
||||||
|
result[0] = ts
|
||||||
|
for n in range(self.count):
|
||||||
|
text = end
|
||||||
|
v = libc.stdlib.strtol(text, &end, 10)
|
||||||
|
if v < 0 or v > 65535:
|
||||||
|
raise ValueError("value out of range")
|
||||||
|
result[n+1] = v
|
||||||
|
if end == text:
|
||||||
|
raise ValueError("wrong number of values")
|
||||||
|
n = 0
|
||||||
|
while end[n] == ' ':
|
||||||
|
n += 1
|
||||||
|
if end[n] != '\n' and end[n] != '#' and end[n] != '\0':
|
||||||
|
raise ValueError("extra data on line")
|
||||||
|
return (ts, result)
|
||||||
|
|
||||||
|
# Formatters
|
||||||
|
def format_generic(self, d):
|
||||||
|
n = len(d) - 1
|
||||||
|
if n != self.count:
|
||||||
|
raise ValueError("wrong number of values for layout type: "
|
||||||
|
"got %d, wanted %d" % (n, self.count))
|
||||||
|
return (self.format_str % tuple(d)) + "\n"
|
||||||
|
|
||||||
|
# Get a layout by name
|
||||||
|
def get_named(typestring):
|
||||||
|
try:
|
||||||
|
return Layout(typestring)
|
||||||
|
except KeyError:
|
||||||
|
compat = { "PrepData": "float32_8",
|
||||||
|
"RawData": "uint16_6",
|
||||||
|
"RawNotchedData": "uint16_9" }
|
||||||
|
return Layout(compat[typestring])
|
||||||
|
|
||||||
|
class Parser(object):
|
||||||
|
"""Object that parses and stores ASCII data for inclusion into the
|
||||||
|
database"""
|
||||||
|
|
||||||
|
def __init__(self, layout):
|
||||||
|
if issubclass(layout.__class__, Layout):
|
||||||
|
self.layout = layout
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self.layout = get_named(layout)
|
||||||
|
except KeyError:
|
||||||
|
raise TypeError("unknown layout")
|
||||||
|
|
||||||
|
self.data = []
|
||||||
|
self.min_timestamp = None
|
||||||
|
self.max_timestamp = None
|
||||||
|
|
||||||
|
def parse(self, textdata):
|
||||||
|
"""
|
||||||
|
Parse the data, provided as lines of text, using the current
|
||||||
|
layout, into an internal data structure suitable for a
|
||||||
|
pytables 'table.append(parser.data)'.
|
||||||
|
"""
|
||||||
|
cdef double last_ts = -1e12, ts
|
||||||
|
cdef int n = 0, i
|
||||||
|
cdef char *line
|
||||||
|
|
||||||
|
indata = cStringIO.StringIO(textdata)
|
||||||
|
# Assume any parsing error is a real error.
|
||||||
|
# In the future we might want to skip completely empty lines,
|
||||||
|
# or partial lines right before EOF?
|
||||||
|
try:
|
||||||
|
self.data = []
|
||||||
|
for pyline in indata:
|
||||||
|
line = pyline
|
||||||
|
n += 1
|
||||||
|
if line[0] == '\#':
|
||||||
|
continue
|
||||||
|
(ts, row) = self.layout.parse(line)
|
||||||
|
if ts <= last_ts:
|
||||||
|
raise ValueError("timestamp is not "
|
||||||
|
"monotonically increasing")
|
||||||
|
last_ts = ts
|
||||||
|
self.data.append(row)
|
||||||
|
except (ValueError, IndexError, TypeError) as e:
|
||||||
|
raise ParserError(n, "error: " + e.message)
|
||||||
|
|
||||||
|
# Mark timestamp ranges
|
||||||
|
if len(self.data):
|
||||||
|
self.min_timestamp = self.data[0][0]
|
||||||
|
self.max_timestamp = self.data[-1][0]
|
||||||
|
|
||||||
|
class Formatter(object):
|
||||||
|
"""Object that formats database data into ASCII"""
|
||||||
|
|
||||||
|
def __init__(self, layout):
|
||||||
|
if issubclass(layout.__class__, Layout):
|
||||||
|
self.layout = layout
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self.layout = get_named(layout)
|
||||||
|
except KeyError:
|
||||||
|
raise TypeError("unknown layout")
|
||||||
|
|
||||||
|
def format(self, data):
|
||||||
|
"""
|
||||||
|
Format raw data from the database, using the current layout,
|
||||||
|
as lines of ACSII text.
|
||||||
|
"""
|
||||||
|
text = cStringIO.StringIO()
|
||||||
|
try:
|
||||||
|
for row in data:
|
||||||
|
text.write(self.layout.format(row))
|
||||||
|
except (ValueError, IndexError, TypeError) as e:
|
||||||
|
raise FormatterError("formatting error: " + e.message)
|
||||||
|
return text.getvalue()
|
|
@ -7,20 +7,21 @@ Object that represents a NILM database file.
|
||||||
Manages both the SQL database and the table storage backend.
|
Manages both the SQL database and the table storage backend.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
# Need absolute_import so that "import nilmdb" won't pull in
|
||||||
import errno
|
# nilmdb.py, but will pull the parent nilmdb module instead.
|
||||||
import sqlite3
|
from __future__ import absolute_import
|
||||||
|
import nilmdb
|
||||||
import nilmdb.utils
|
from nilmdb.utils.printf import *
|
||||||
from nilmdb.utils.printf import printf
|
from nilmdb.server.interval import (Interval, DBInterval,
|
||||||
from nilmdb.utils.time import timestamp_to_bytes
|
IntervalSet, IntervalError)
|
||||||
|
|
||||||
from nilmdb.utils.interval import IntervalError
|
|
||||||
from nilmdb.server.interval import Interval, DBInterval, IntervalSet
|
|
||||||
|
|
||||||
from nilmdb.server import bulkdata
|
from nilmdb.server import bulkdata
|
||||||
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
|
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
import os
|
||||||
|
import errno
|
||||||
|
import bisect
|
||||||
|
|
||||||
# Note about performance and transactions:
|
# Note about performance and transactions:
|
||||||
#
|
#
|
||||||
# Committing a transaction in the default sync mode (PRAGMA synchronous=FULL)
|
# Committing a transaction in the default sync mode (PRAGMA synchronous=FULL)
|
||||||
|
@ -30,11 +31,13 @@ from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
|
||||||
# after a series of INSERT, SELECT, but before a CREATE TABLE or PRAGMA.
|
# after a series of INSERT, SELECT, but before a CREATE TABLE or PRAGMA.
|
||||||
# 3: at the end of an explicit transaction, e.g. "with self.con as con:"
|
# 3: at the end of an explicit transaction, e.g. "with self.con as con:"
|
||||||
#
|
#
|
||||||
# To speed things up, we can set 'PRAGMA synchronous=OFF'. Or, it
|
# To speed up testing, or if this transaction speed becomes an issue,
|
||||||
# seems that 'PRAGMA synchronous=NORMAL' and 'PRAGMA journal_mode=WAL'
|
# the sync=False option to NilmDB.__init__ will set PRAGMA synchronous=OFF.
|
||||||
# give an equivalent speedup more safely. That is what is used here.
|
|
||||||
|
|
||||||
|
# Don't touch old entries -- just add new ones.
|
||||||
_sql_schema_updates = {
|
_sql_schema_updates = {
|
||||||
0: {"next": 1, "sql": """
|
0: """
|
||||||
-- All streams
|
-- All streams
|
||||||
CREATE TABLE streams(
|
CREATE TABLE streams(
|
||||||
id INTEGER PRIMARY KEY, -- stream ID
|
id INTEGER PRIMARY KEY, -- stream ID
|
||||||
|
@ -58,47 +61,24 @@ _sql_schema_updates = {
|
||||||
end_pos INTEGER NOT NULL
|
end_pos INTEGER NOT NULL
|
||||||
);
|
);
|
||||||
CREATE INDEX _ranges_index ON ranges (stream_id, start_time, end_time);
|
CREATE INDEX _ranges_index ON ranges (stream_id, start_time, end_time);
|
||||||
"""},
|
""",
|
||||||
|
|
||||||
1: {"next": 3, "sql": """
|
1: """
|
||||||
-- Generic dictionary-type metadata that can be associated with a stream
|
-- Generic dictionary-type metadata that can be associated with a stream
|
||||||
CREATE TABLE metadata(
|
CREATE TABLE metadata(
|
||||||
stream_id INTEGER NOT NULL,
|
stream_id INTEGER NOT NULL,
|
||||||
key TEXT NOT NULL,
|
key TEXT NOT NULL,
|
||||||
value TEXT
|
value TEXT
|
||||||
);
|
);
|
||||||
"""},
|
""",
|
||||||
|
|
||||||
2: {"error": "old format with floating-point timestamps requires "
|
|
||||||
"nilmdb 1.3.1 or older"},
|
|
||||||
|
|
||||||
3: {"next": None},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@nilmdb.utils.must_close()
|
@nilmdb.utils.must_close()
|
||||||
class NilmDB():
|
class NilmDB(object):
|
||||||
verbose = 0
|
verbose = 0
|
||||||
|
|
||||||
def __init__(self, basepath,
|
def __init__(self, basepath, sync=True, max_results=None,
|
||||||
max_results=None,
|
|
||||||
max_removals=None,
|
|
||||||
max_int_removals=None,
|
|
||||||
bulkdata_args=None):
|
bulkdata_args=None):
|
||||||
"""Initialize NilmDB at the given basepath.
|
|
||||||
Other arguments are for debugging / testing:
|
|
||||||
|
|
||||||
'max_results' is the max rows to send in a single
|
|
||||||
stream_intervals or stream_extract response.
|
|
||||||
|
|
||||||
'max_removals' is the max rows to delete at once
|
|
||||||
in stream_remove.
|
|
||||||
|
|
||||||
'max_int_removals' is the max intervals to delete
|
|
||||||
at once in stream_remove.
|
|
||||||
|
|
||||||
'bulkdata_args' is kwargs for the bulkdata module.
|
|
||||||
"""
|
|
||||||
if bulkdata_args is None:
|
if bulkdata_args is None:
|
||||||
bulkdata_args = {}
|
bulkdata_args = {}
|
||||||
|
|
||||||
|
@ -118,25 +98,20 @@ class NilmDB():
|
||||||
# SQLite database too
|
# SQLite database too
|
||||||
sqlfilename = os.path.join(self.basepath, "data.sql")
|
sqlfilename = os.path.join(self.basepath, "data.sql")
|
||||||
self.con = sqlite3.connect(sqlfilename, check_same_thread = True)
|
self.con = sqlite3.connect(sqlfilename, check_same_thread = True)
|
||||||
try:
|
|
||||||
self._sql_schema_update()
|
self._sql_schema_update()
|
||||||
except Exception:
|
|
||||||
self.data.close()
|
|
||||||
raise
|
|
||||||
|
|
||||||
# See big comment at top about the performance implications of this
|
# See big comment at top about the performance implications of this
|
||||||
self.con.execute("PRAGMA synchronous=NORMAL")
|
if sync:
|
||||||
self.con.execute("PRAGMA journal_mode=WAL")
|
self.con.execute("PRAGMA synchronous=FULL")
|
||||||
|
else:
|
||||||
|
self.con.execute("PRAGMA synchronous=OFF")
|
||||||
|
|
||||||
# Approximate largest number of elements that we want to send
|
# Approximate largest number of elements that we want to send
|
||||||
# in a single reply (for stream_intervals, stream_extract).
|
# in a single reply (for stream_intervals, stream_extract)
|
||||||
self.max_results = max_results or 16384
|
if max_results:
|
||||||
|
self.max_results = max_results
|
||||||
# Remove up to this many rows per call to stream_remove.
|
else:
|
||||||
self.max_removals = max_removals or 1048576
|
self.max_results = 16384
|
||||||
|
|
||||||
# Remove up to this many intervals per call to stream_remove.
|
|
||||||
self.max_int_removals = max_int_removals or 4096
|
|
||||||
|
|
||||||
def get_basepath(self):
|
def get_basepath(self):
|
||||||
return self.basepath
|
return self.basepath
|
||||||
|
@ -145,7 +120,6 @@ class NilmDB():
|
||||||
if self.con:
|
if self.con:
|
||||||
self.con.commit()
|
self.con.commit()
|
||||||
self.con.close()
|
self.con.close()
|
||||||
self.con = None
|
|
||||||
self.data.close()
|
self.data.close()
|
||||||
|
|
||||||
def _sql_schema_update(self):
|
def _sql_schema_update(self):
|
||||||
|
@ -153,20 +127,11 @@ class NilmDB():
|
||||||
version = cur.execute("PRAGMA user_version").fetchone()[0]
|
version = cur.execute("PRAGMA user_version").fetchone()[0]
|
||||||
oldversion = version
|
oldversion = version
|
||||||
|
|
||||||
while True:
|
while version in _sql_schema_updates:
|
||||||
if version not in _sql_schema_updates:
|
cur.executescript(_sql_schema_updates[version])
|
||||||
raise Exception(self.basepath + ": unknown database version "
|
version = version + 1
|
||||||
+ str(version))
|
if self.verbose: # pragma: no cover
|
||||||
update = _sql_schema_updates[version]
|
printf("Schema updated to %d\n", version)
|
||||||
if "error" in update:
|
|
||||||
raise Exception(self.basepath + ": can't use database version "
|
|
||||||
+ str(version) + ": " + update["error"])
|
|
||||||
if update["next"] is None:
|
|
||||||
break
|
|
||||||
cur.executescript(update["sql"])
|
|
||||||
version = update["next"]
|
|
||||||
if self.verbose:
|
|
||||||
printf("Database schema updated to %d\n", version)
|
|
||||||
|
|
||||||
if version != oldversion:
|
if version != oldversion:
|
||||||
with self.con:
|
with self.con:
|
||||||
|
@ -174,14 +139,14 @@ class NilmDB():
|
||||||
|
|
||||||
def _check_user_times(self, start, end):
|
def _check_user_times(self, start, end):
|
||||||
if start is None:
|
if start is None:
|
||||||
start = nilmdb.utils.time.min_timestamp
|
start = -1e12
|
||||||
if end is None:
|
if end is None:
|
||||||
end = nilmdb.utils.time.max_timestamp
|
end = 1e12
|
||||||
if start >= end:
|
if start >= end:
|
||||||
raise NilmDBError("start must precede end")
|
raise NilmDBError("start must precede end")
|
||||||
return (start, end)
|
return (start, end)
|
||||||
|
|
||||||
@nilmdb.utils.lru_cache(size=64)
|
@nilmdb.utils.lru_cache(size = 16)
|
||||||
def _get_intervals(self, stream_id):
|
def _get_intervals(self, stream_id):
|
||||||
"""
|
"""
|
||||||
Return a mutable IntervalSet corresponding to the given stream ID.
|
Return a mutable IntervalSet corresponding to the given stream ID.
|
||||||
|
@ -196,7 +161,7 @@ class NilmDB():
|
||||||
iset += DBInterval(start_time, end_time,
|
iset += DBInterval(start_time, end_time,
|
||||||
start_time, end_time,
|
start_time, end_time,
|
||||||
start_pos, end_pos)
|
start_pos, end_pos)
|
||||||
except IntervalError:
|
except IntervalError: # pragma: no cover
|
||||||
raise NilmDBError("unexpected overlap in ranges table!")
|
raise NilmDBError("unexpected overlap in ranges table!")
|
||||||
|
|
||||||
return iset
|
return iset
|
||||||
|
@ -223,6 +188,10 @@ class NilmDB():
|
||||||
# Load this stream's intervals
|
# Load this stream's intervals
|
||||||
iset = self._get_intervals(stream_id)
|
iset = self._get_intervals(stream_id)
|
||||||
|
|
||||||
|
# Check for overlap
|
||||||
|
if iset.intersects(interval): # pragma: no cover (gets caught earlier)
|
||||||
|
raise NilmDBError("new interval overlaps existing data")
|
||||||
|
|
||||||
# Check for adjacency. If there's a stream in the database
|
# Check for adjacency. If there's a stream in the database
|
||||||
# that ends exactly when this one starts, and the database
|
# that ends exactly when this one starts, and the database
|
||||||
# rows match up, we can make one interval that covers the
|
# rows match up, we can make one interval that covers the
|
||||||
|
@ -265,6 +234,10 @@ class NilmDB():
|
||||||
original: original DBInterval; must be already present in DB
|
original: original DBInterval; must be already present in DB
|
||||||
to_remove: DBInterval to remove; must be subset of 'original'
|
to_remove: DBInterval to remove; must be subset of 'original'
|
||||||
"""
|
"""
|
||||||
|
# Just return if we have nothing to remove
|
||||||
|
if remove.start == remove.end: # pragma: no cover
|
||||||
|
return
|
||||||
|
|
||||||
# Load this stream's intervals
|
# Load this stream's intervals
|
||||||
iset = self._get_intervals(stream_id)
|
iset = self._get_intervals(stream_id)
|
||||||
|
|
||||||
|
@ -279,8 +252,7 @@ class NilmDB():
|
||||||
# the removed piece was in the middle.
|
# the removed piece was in the middle.
|
||||||
def add(iset, start, end, start_pos, end_pos):
|
def add(iset, start, end, start_pos, end_pos):
|
||||||
iset += DBInterval(start, end, start, end, start_pos, end_pos)
|
iset += DBInterval(start, end, start, end, start_pos, end_pos)
|
||||||
self._sql_interval_insert(stream_id, start, end,
|
self._sql_interval_insert(stream_id, start, end, start_pos, end_pos)
|
||||||
start_pos, end_pos)
|
|
||||||
|
|
||||||
if original.start != remove.start:
|
if original.start != remove.start:
|
||||||
# Interval before the removed region
|
# Interval before the removed region
|
||||||
|
@ -297,7 +269,7 @@ class NilmDB():
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def stream_list(self, path=None, layout=None, extended=False):
|
def stream_list(self, path = None, layout = None, extent = False):
|
||||||
"""Return list of lists of all streams in the database.
|
"""Return list of lists of all streams in the database.
|
||||||
|
|
||||||
If path is specified, include only streams with a path that
|
If path is specified, include only streams with a path that
|
||||||
|
@ -306,26 +278,19 @@ class NilmDB():
|
||||||
If layout is specified, include only streams with a layout
|
If layout is specified, include only streams with a layout
|
||||||
that matches the given string.
|
that matches the given string.
|
||||||
|
|
||||||
If extended=False, returns a list of lists containing
|
If extent = False, returns a list of lists containing
|
||||||
the path and layout: [ path, layout ]
|
the path and layout: [ path, layout ]
|
||||||
|
|
||||||
If extended=True, returns a list of lists containing
|
If extent = True, returns a list of lists containing the
|
||||||
more information:
|
path, layout, and min/max extent of the data:
|
||||||
path
|
[ path, layout, extent_min, extent_max ]
|
||||||
layout
|
|
||||||
interval_min (earliest interval start)
|
|
||||||
interval_max (latest interval end)
|
|
||||||
rows (total number of rows of data)
|
|
||||||
time (total time covered by this stream, in timestamp units)
|
|
||||||
"""
|
"""
|
||||||
params = ()
|
params = ()
|
||||||
query = "SELECT streams.path, streams.layout"
|
query = "SELECT streams.path, streams.layout"
|
||||||
if extended:
|
if extent:
|
||||||
query += ", min(ranges.start_time), max(ranges.end_time)"
|
query += ", min(ranges.start_time), max(ranges.end_time)"
|
||||||
query += ", coalesce(sum(ranges.end_pos - ranges.start_pos), 0) "
|
|
||||||
query += ", coalesce(sum(ranges.end_time - ranges.start_time), 0) "
|
|
||||||
query += " FROM streams"
|
query += " FROM streams"
|
||||||
if extended:
|
if extent:
|
||||||
query += " LEFT JOIN ranges ON streams.id = ranges.stream_id"
|
query += " LEFT JOIN ranges ON streams.id = ranges.stream_id"
|
||||||
query += " WHERE 1=1"
|
query += " WHERE 1=1"
|
||||||
if layout is not None:
|
if layout is not None:
|
||||||
|
@ -338,45 +303,31 @@ class NilmDB():
|
||||||
result = self.con.execute(query, params).fetchall()
|
result = self.con.execute(query, params).fetchall()
|
||||||
return [ list(x) for x in result ]
|
return [ list(x) for x in result ]
|
||||||
|
|
||||||
def stream_intervals(self, path, start=None, end=None, diffpath=None):
|
def stream_intervals(self, path, start = None, end = None):
|
||||||
"""
|
"""
|
||||||
List all intervals in 'path' between 'start' and 'end'. If
|
|
||||||
'diffpath' is not none, list instead the set-difference
|
|
||||||
between the intervals in the two streams; i.e. all interval
|
|
||||||
ranges that are present in 'path' but not 'diffpath'.
|
|
||||||
|
|
||||||
Returns (intervals, restart) tuple.
|
Returns (intervals, restart) tuple.
|
||||||
|
|
||||||
'intervals' is a list of [start,end] timestamps of all intervals
|
intervals is a list of [start,end] timestamps of all intervals
|
||||||
that exist for path, between start and end.
|
that exist for path, between start and end.
|
||||||
|
|
||||||
'restart', if not None, means that there were too many results
|
restart, if nonzero, means that there were too many results to
|
||||||
to return in a single request. The data is complete from the
|
return in a single request. The data is complete from the
|
||||||
starting timestamp to the point at which it was truncated, and
|
starting timestamp to the point at which it was truncated,
|
||||||
a new request with a start time of 'restart' will fetch the
|
and a new request with a start time of 'restart' will fetch
|
||||||
next block of data.
|
the next block of data.
|
||||||
"""
|
"""
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
intervals = self._get_intervals(stream_id)
|
intervals = self._get_intervals(stream_id)
|
||||||
if diffpath:
|
|
||||||
diffstream_id = self._stream_id(diffpath)
|
|
||||||
diffintervals = self._get_intervals(diffstream_id)
|
|
||||||
(start, end) = self._check_user_times(start, end)
|
(start, end) = self._check_user_times(start, end)
|
||||||
requested = Interval(start, end)
|
requested = Interval(start, end)
|
||||||
result = []
|
result = []
|
||||||
if diffpath:
|
for n, i in enumerate(intervals.intersection(requested)):
|
||||||
getter = nilmdb.utils.interval.set_difference(
|
|
||||||
intervals.intersection(requested),
|
|
||||||
diffintervals.intersection(requested))
|
|
||||||
else:
|
|
||||||
getter = intervals.intersection(requested)
|
|
||||||
for n, i in enumerate(getter):
|
|
||||||
if n >= self.max_results:
|
if n >= self.max_results:
|
||||||
restart = i.start
|
restart = i.start
|
||||||
break
|
break
|
||||||
result.append([i.start, i.end])
|
result.append([i.start, i.end])
|
||||||
else:
|
else:
|
||||||
restart = None
|
restart = 0
|
||||||
return (result, restart)
|
return (result, restart)
|
||||||
|
|
||||||
def stream_create(self, path, layout_name):
|
def stream_create(self, path, layout_name):
|
||||||
|
@ -410,8 +361,8 @@ class NilmDB():
|
||||||
|
|
||||||
def stream_set_metadata(self, path, data):
|
def stream_set_metadata(self, path, data):
|
||||||
"""Set stream metadata from a dictionary, e.g.
|
"""Set stream metadata from a dictionary, e.g.
|
||||||
{ description: 'Downstairs lighting',
|
{ description = 'Downstairs lighting',
|
||||||
v_scaling: 123.45 }
|
v_scaling = 123.45 }
|
||||||
This replaces all existing metadata.
|
This replaces all existing metadata.
|
||||||
"""
|
"""
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
|
@ -439,50 +390,30 @@ class NilmDB():
|
||||||
data.update(newdata)
|
data.update(newdata)
|
||||||
self.stream_set_metadata(path, data)
|
self.stream_set_metadata(path, data)
|
||||||
|
|
||||||
def stream_rename(self, oldpath, newpath):
|
|
||||||
"""Rename a stream."""
|
|
||||||
stream_id = self._stream_id(oldpath)
|
|
||||||
|
|
||||||
# Rename the data
|
|
||||||
self.data.rename(oldpath, newpath)
|
|
||||||
|
|
||||||
# Rename the stream in the database
|
|
||||||
with self.con as con:
|
|
||||||
con.execute("UPDATE streams SET path=? WHERE id=?",
|
|
||||||
(newpath, stream_id))
|
|
||||||
|
|
||||||
def stream_destroy(self, path):
|
def stream_destroy(self, path):
|
||||||
"""Fully remove a table from the database. Fails if there are
|
"""Fully remove a table and all of its data from the database.
|
||||||
any intervals data present; remove them first. Metadata is
|
No way to undo it! Metadata is removed."""
|
||||||
also removed."""
|
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
|
|
||||||
# Verify that no intervals are present, and clear the cache
|
# Delete the cached interval data (if it was cached)
|
||||||
iset = self._get_intervals(stream_id)
|
|
||||||
if iset:
|
|
||||||
raise NilmDBError("all intervals must be removed before "
|
|
||||||
"destroying a stream")
|
|
||||||
self._get_intervals.cache_remove(self, stream_id)
|
self._get_intervals.cache_remove(self, stream_id)
|
||||||
|
|
||||||
# Delete the bulkdata storage
|
# Delete the data
|
||||||
self.data.destroy(path)
|
self.data.destroy(path)
|
||||||
|
|
||||||
# Delete metadata, stream, intervals (should be none)
|
# Delete metadata, stream, intervals
|
||||||
with self.con as con:
|
with self.con as con:
|
||||||
con.execute("DELETE FROM metadata WHERE stream_id=?", (stream_id,))
|
con.execute("DELETE FROM metadata WHERE stream_id=?", (stream_id,))
|
||||||
con.execute("DELETE FROM ranges WHERE stream_id=?", (stream_id,))
|
con.execute("DELETE FROM ranges WHERE stream_id=?", (stream_id,))
|
||||||
con.execute("DELETE FROM streams WHERE id=?", (stream_id,))
|
con.execute("DELETE FROM streams WHERE id=?", (stream_id,))
|
||||||
|
|
||||||
def stream_insert(self, path, start, end, data, binary=False):
|
def stream_insert(self, path, start, end, data):
|
||||||
"""Insert new data into the database.
|
"""Insert new data into the database.
|
||||||
path: Path at which to add the data
|
path: Path at which to add the data
|
||||||
start: Starting timestamp
|
start: Starting timestamp
|
||||||
end: Ending timestamp
|
end: Ending timestamp
|
||||||
data: Textual data, formatted according to the layout of path
|
data: Rows of data, to be passed to bulkdata table.append
|
||||||
|
method. E.g. nilmdb.layout.Parser.data
|
||||||
'binary', if True, means that 'data' is raw binary:
|
|
||||||
little-endian, matching the current table's layout,
|
|
||||||
including the int64 timestamp.
|
|
||||||
"""
|
"""
|
||||||
# First check for basic overlap using timestamp info given.
|
# First check for basic overlap using timestamp info given.
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
|
@ -492,11 +423,10 @@ class NilmDB():
|
||||||
raise OverlapError("new data overlaps existing data at range: "
|
raise OverlapError("new data overlaps existing data at range: "
|
||||||
+ str(iset & interval))
|
+ str(iset & interval))
|
||||||
|
|
||||||
# Tenatively append the data. This will raise a ValueError if
|
# Insert the data
|
||||||
# there are any parse errors.
|
|
||||||
table = self.data.getnode(path)
|
table = self.data.getnode(path)
|
||||||
row_start = table.nrows
|
row_start = table.nrows
|
||||||
table.append_data(data, start, end, binary)
|
table.append(data)
|
||||||
row_end = table.nrows
|
row_end = table.nrows
|
||||||
|
|
||||||
# Insert the record into the sql database.
|
# Insert the record into the sql database.
|
||||||
|
@ -505,17 +435,6 @@ class NilmDB():
|
||||||
# And that's all
|
# And that's all
|
||||||
return
|
return
|
||||||
|
|
||||||
def _bisect_left(self, a, x, lo, hi):
|
|
||||||
# Like bisect.bisect_left, but doesn't choke on large indices on
|
|
||||||
# 32-bit systems, like bisect's fast C implementation does.
|
|
||||||
while lo < hi:
|
|
||||||
mid = (lo + hi) // 2
|
|
||||||
if a[mid] < x:
|
|
||||||
lo = mid + 1
|
|
||||||
else:
|
|
||||||
hi = mid
|
|
||||||
return lo
|
|
||||||
|
|
||||||
def _find_start(self, table, dbinterval):
|
def _find_start(self, table, dbinterval):
|
||||||
"""
|
"""
|
||||||
Given a DBInterval, find the row in the database that
|
Given a DBInterval, find the row in the database that
|
||||||
|
@ -526,7 +445,7 @@ class NilmDB():
|
||||||
# Optimization for the common case where an interval wasn't truncated
|
# Optimization for the common case where an interval wasn't truncated
|
||||||
if dbinterval.start == dbinterval.db_start:
|
if dbinterval.start == dbinterval.db_start:
|
||||||
return dbinterval.db_startpos
|
return dbinterval.db_startpos
|
||||||
return self._bisect_left(table,
|
return bisect.bisect_left(bulkdata.TimestampOnlyTable(table),
|
||||||
dbinterval.start,
|
dbinterval.start,
|
||||||
dbinterval.db_startpos,
|
dbinterval.db_startpos,
|
||||||
dbinterval.db_endpos)
|
dbinterval.db_endpos)
|
||||||
|
@ -545,36 +464,29 @@ class NilmDB():
|
||||||
# want to include the given timestamp in the results. This is
|
# want to include the given timestamp in the results. This is
|
||||||
# so a queries like 1:00 -> 2:00 and 2:00 -> 3:00 return
|
# so a queries like 1:00 -> 2:00 and 2:00 -> 3:00 return
|
||||||
# non-overlapping data.
|
# non-overlapping data.
|
||||||
return self._bisect_left(table,
|
return bisect.bisect_left(bulkdata.TimestampOnlyTable(table),
|
||||||
dbinterval.end,
|
dbinterval.end,
|
||||||
dbinterval.db_startpos,
|
dbinterval.db_startpos,
|
||||||
dbinterval.db_endpos)
|
dbinterval.db_endpos)
|
||||||
|
|
||||||
def stream_extract(self, path, start=None, end=None,
|
def stream_extract(self, path, start = None, end = None, count = False):
|
||||||
count=False, markup=False, binary=False):
|
|
||||||
"""
|
"""
|
||||||
Returns (data, restart) tuple.
|
Returns (data, restart) tuple.
|
||||||
|
|
||||||
'data' is ASCII-formatted data from the database, formatted
|
data is a list of raw data from the database, suitable for
|
||||||
according to the layout of the stream.
|
passing to e.g. nilmdb.layout.Formatter to translate into
|
||||||
|
textual form.
|
||||||
|
|
||||||
'restart', if not None, means that there were too many results to
|
restart, if nonzero, means that there were too many results to
|
||||||
return in a single request. The data is complete from the
|
return in a single request. The data is complete from the
|
||||||
starting timestamp to the point at which it was truncated,
|
starting timestamp to the point at which it was truncated,
|
||||||
and a new request with a start time of 'restart' will fetch
|
and a new request with a start time of 'restart' will fetch
|
||||||
the next block of data.
|
the next block of data.
|
||||||
|
|
||||||
'count', if true, means to not return raw data, but just the count
|
count, if true, means to not return raw data, but just the count
|
||||||
of rows that would have been returned. This is much faster
|
of rows that would have been returned. This is much faster
|
||||||
than actually fetching the data. It is not limited by
|
than actually fetching the data. It is not limited by
|
||||||
max_results.
|
max_results.
|
||||||
|
|
||||||
'markup', if true, indicates that returned data should be
|
|
||||||
marked with a comment denoting when a particular interval
|
|
||||||
starts, and another comment when an interval ends.
|
|
||||||
|
|
||||||
'binary', if true, means to return raw binary rather than
|
|
||||||
ASCII-formatted data.
|
|
||||||
"""
|
"""
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
table = self.data.getnode(path)
|
table = self.data.getnode(path)
|
||||||
|
@ -584,9 +496,7 @@ class NilmDB():
|
||||||
result = []
|
result = []
|
||||||
matched = 0
|
matched = 0
|
||||||
remaining = self.max_results
|
remaining = self.max_results
|
||||||
restart = None
|
restart = 0
|
||||||
if binary and (markup or count):
|
|
||||||
raise NilmDBError("binary mode can't be used with markup or count")
|
|
||||||
for interval in intervals.intersection(requested):
|
for interval in intervals.intersection(requested):
|
||||||
# Reading single rows from the table is too slow, so
|
# Reading single rows from the table is too slow, so
|
||||||
# we use two bisections to find both the starting and
|
# we use two bisections to find both the starting and
|
||||||
|
@ -603,48 +513,27 @@ class NilmDB():
|
||||||
row_max = row_start + remaining
|
row_max = row_start + remaining
|
||||||
if row_max < row_end:
|
if row_max < row_end:
|
||||||
row_end = row_max
|
row_end = row_max
|
||||||
restart = table[row_max]
|
restart = table[row_max][0]
|
||||||
|
|
||||||
# Add markup
|
|
||||||
if markup:
|
|
||||||
result.append(b"# interval-start " +
|
|
||||||
timestamp_to_bytes(interval.start) + b"\n")
|
|
||||||
|
|
||||||
# Gather these results up
|
# Gather these results up
|
||||||
result.append(table.get_data(row_start, row_end, binary))
|
result.extend(table[row_start:row_end])
|
||||||
|
|
||||||
# Count them
|
# Count them
|
||||||
remaining -= row_end - row_start
|
remaining -= row_end - row_start
|
||||||
|
|
||||||
# Add markup, and exit if restart is set.
|
if restart:
|
||||||
if restart is not None:
|
|
||||||
if markup:
|
|
||||||
result.append(b"# interval-end " +
|
|
||||||
timestamp_to_bytes(restart) + b"\n")
|
|
||||||
break
|
break
|
||||||
if markup:
|
|
||||||
result.append(b"# interval-end " +
|
|
||||||
timestamp_to_bytes(interval.end) + b"\n")
|
|
||||||
|
|
||||||
if count:
|
if count:
|
||||||
return matched
|
return matched
|
||||||
full_result = b"".join(result)
|
return (result, restart)
|
||||||
return (full_result, restart)
|
|
||||||
|
|
||||||
def stream_remove(self, path, start = None, end = None):
|
def stream_remove(self, path, start = None, end = None):
|
||||||
"""
|
"""
|
||||||
Remove data from the specified time interval within a stream.
|
Remove data from the specified time interval within a stream.
|
||||||
|
Removes all data in the interval [start, end), and intervals
|
||||||
Removes data in the interval [start, end), and intervals are
|
are truncated or split appropriately. Returns the number of
|
||||||
truncated or split appropriately.
|
data points removed.
|
||||||
|
|
||||||
Returns a (removed, restart) tuple.
|
|
||||||
|
|
||||||
'removed' is the number of data points that were removed.
|
|
||||||
|
|
||||||
'restart', if not None, means there were too many rows to
|
|
||||||
remove in a single request. This function should be called
|
|
||||||
again with a start time of 'restart' to complete the removal.
|
|
||||||
"""
|
"""
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
table = self.data.getnode(path)
|
table = self.data.getnode(path)
|
||||||
|
@ -652,34 +541,16 @@ class NilmDB():
|
||||||
(start, end) = self._check_user_times(start, end)
|
(start, end) = self._check_user_times(start, end)
|
||||||
to_remove = Interval(start, end)
|
to_remove = Interval(start, end)
|
||||||
removed = 0
|
removed = 0
|
||||||
remaining = self.max_removals
|
|
||||||
int_remaining = self.max_int_removals
|
|
||||||
restart = None
|
|
||||||
|
|
||||||
# Can't remove intervals from within the iterator, so we need to
|
# Can't remove intervals from within the iterator, so we need to
|
||||||
# remember what's currently in the intersection now.
|
# remember what's currently in the intersection now.
|
||||||
all_candidates = list(intervals.intersection(to_remove, orig = True))
|
all_candidates = list(intervals.intersection(to_remove, orig = True))
|
||||||
|
|
||||||
remove_start = None
|
|
||||||
remove_end = None
|
|
||||||
|
|
||||||
for (dbint, orig) in all_candidates:
|
for (dbint, orig) in all_candidates:
|
||||||
# Stop if we've hit the max number of interval removals
|
|
||||||
if int_remaining <= 0:
|
|
||||||
restart = dbint.start
|
|
||||||
break
|
|
||||||
|
|
||||||
# Find row start and end
|
# Find row start and end
|
||||||
row_start = self._find_start(table, dbint)
|
row_start = self._find_start(table, dbint)
|
||||||
row_end = self._find_end(table, dbint)
|
row_end = self._find_end(table, dbint)
|
||||||
|
|
||||||
# Shorten it if we'll hit the maximum number of removals
|
|
||||||
row_max = row_start + remaining
|
|
||||||
if row_max < row_end:
|
|
||||||
row_end = row_max
|
|
||||||
dbint.end = table[row_max]
|
|
||||||
restart = dbint.end
|
|
||||||
|
|
||||||
# Adjust the DBInterval to match the newly found ends
|
# Adjust the DBInterval to match the newly found ends
|
||||||
dbint.db_start = dbint.start
|
dbint.db_start = dbint.start
|
||||||
dbint.db_end = dbint.end
|
dbint.db_end = dbint.end
|
||||||
|
@ -689,29 +560,10 @@ class NilmDB():
|
||||||
# Remove interval from the database
|
# Remove interval from the database
|
||||||
self._remove_interval(stream_id, orig, dbint)
|
self._remove_interval(stream_id, orig, dbint)
|
||||||
|
|
||||||
# Remove data from the underlying table storage,
|
# Remove data from the underlying table storage
|
||||||
# coalescing adjacent removals to reduce the number of calls
|
table.remove(row_start, row_end)
|
||||||
# to table.remove.
|
|
||||||
if remove_end == row_start:
|
|
||||||
# Extend our coalesced region
|
|
||||||
remove_end = row_end
|
|
||||||
else:
|
|
||||||
# Perform previous removal, then save this one
|
|
||||||
if remove_end is not None:
|
|
||||||
table.remove(remove_start, remove_end)
|
|
||||||
remove_start = row_start
|
|
||||||
remove_end = row_end
|
|
||||||
|
|
||||||
# Count how many were removed
|
# Count how many were removed
|
||||||
removed += row_end - row_start
|
removed += row_end - row_start
|
||||||
remaining -= row_end - row_start
|
|
||||||
int_remaining -= 1
|
|
||||||
|
|
||||||
if restart is not None:
|
return removed
|
||||||
break
|
|
||||||
|
|
||||||
# Perform any final coalesced removal
|
|
||||||
if remove_end is not None:
|
|
||||||
table.remove(remove_start, remove_end)
|
|
||||||
|
|
||||||
return (removed, restart)
|
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
# cython: language_level=2
|
|
||||||
|
|
||||||
cdef class RBNode:
|
cdef class RBNode:
|
||||||
cdef public object obj
|
cdef public object obj
|
||||||
cdef public double start, end
|
cdef public double start, end
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
# cython: profile=False
|
# cython: profile=False
|
||||||
# cython: cdivision=True
|
# cython: cdivision=True
|
||||||
# cython: language_level=2
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Jim Paris <jim@jtan.com>
|
Jim Paris <jim@jtan.com>
|
||||||
|
|
|
@ -1,806 +0,0 @@
|
||||||
#include <Python.h>
|
|
||||||
#include <structmember.h>
|
|
||||||
#include <endian.h>
|
|
||||||
|
|
||||||
#include <ctype.h>
|
|
||||||
#include <stdint.h>
|
|
||||||
|
|
||||||
#define __STDC_FORMAT_MACROS
|
|
||||||
#include <inttypes.h>
|
|
||||||
|
|
||||||
/* Values missing from stdint.h */
|
|
||||||
#define UINT8_MIN 0
|
|
||||||
#define UINT16_MIN 0
|
|
||||||
#define UINT32_MIN 0
|
|
||||||
#define UINT64_MIN 0
|
|
||||||
|
|
||||||
/* Marker values (if min == max, skip range check) */
|
|
||||||
#define FLOAT32_MIN 0
|
|
||||||
#define FLOAT32_MAX 0
|
|
||||||
#define FLOAT64_MIN 0
|
|
||||||
#define FLOAT64_MAX 0
|
|
||||||
|
|
||||||
typedef int64_t timestamp_t;
|
|
||||||
|
|
||||||
/* Somewhat arbitrary, just so we can use fixed sizes for strings
|
|
||||||
etc. */
|
|
||||||
static const int MAX_LAYOUT_COUNT = 1024;
|
|
||||||
|
|
||||||
/* Error object and constants */
|
|
||||||
static PyObject *ParseError;
|
|
||||||
typedef enum {
|
|
||||||
ERR_OTHER,
|
|
||||||
ERR_NON_MONOTONIC,
|
|
||||||
ERR_OUT_OF_INTERVAL,
|
|
||||||
} parseerror_code_t;
|
|
||||||
static void add_parseerror_codes(PyObject *module)
|
|
||||||
{
|
|
||||||
PyModule_AddIntMacro(module, ERR_OTHER);
|
|
||||||
PyModule_AddIntMacro(module, ERR_NON_MONOTONIC);
|
|
||||||
PyModule_AddIntMacro(module, ERR_OUT_OF_INTERVAL);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Helpers to raise ParseErrors. Use "return raise_str(...)" etc. */
|
|
||||||
static PyObject *raise_str(int line, int col, int code, const char *string)
|
|
||||||
{
|
|
||||||
PyObject *o;
|
|
||||||
o = Py_BuildValue("(iiis)", line, col, code, string);
|
|
||||||
if (o != NULL) {
|
|
||||||
PyErr_SetObject(ParseError, o);
|
|
||||||
Py_DECREF(o);
|
|
||||||
}
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
static PyObject *raise_int(int line, int col, int code, int64_t num)
|
|
||||||
{
|
|
||||||
PyObject *o;
|
|
||||||
o = Py_BuildValue("(iiiL)", line, col, code, (long long)num);
|
|
||||||
if (o != NULL) {
|
|
||||||
PyErr_SetObject(ParseError, o);
|
|
||||||
Py_DECREF(o);
|
|
||||||
}
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
/****
|
|
||||||
* Layout and type helpers
|
|
||||||
*/
|
|
||||||
typedef union {
|
|
||||||
int8_t i;
|
|
||||||
uint8_t u;
|
|
||||||
} union8_t;
|
|
||||||
typedef union {
|
|
||||||
int16_t i;
|
|
||||||
uint16_t u;
|
|
||||||
} union16_t;
|
|
||||||
typedef union {
|
|
||||||
int32_t i;
|
|
||||||
uint32_t u;
|
|
||||||
float f;
|
|
||||||
} union32_t;
|
|
||||||
typedef union {
|
|
||||||
int64_t i;
|
|
||||||
uint64_t u;
|
|
||||||
double d;
|
|
||||||
} union64_t;
|
|
||||||
|
|
||||||
typedef enum {
|
|
||||||
LAYOUT_TYPE_NONE,
|
|
||||||
LAYOUT_TYPE_INT8,
|
|
||||||
LAYOUT_TYPE_UINT8,
|
|
||||||
LAYOUT_TYPE_INT16,
|
|
||||||
LAYOUT_TYPE_UINT16,
|
|
||||||
LAYOUT_TYPE_INT32,
|
|
||||||
LAYOUT_TYPE_UINT32,
|
|
||||||
LAYOUT_TYPE_INT64,
|
|
||||||
LAYOUT_TYPE_UINT64,
|
|
||||||
LAYOUT_TYPE_FLOAT32,
|
|
||||||
LAYOUT_TYPE_FLOAT64,
|
|
||||||
} layout_type_t;
|
|
||||||
|
|
||||||
struct {
|
|
||||||
char *string;
|
|
||||||
layout_type_t layout;
|
|
||||||
int size;
|
|
||||||
} type_lookup[] = {
|
|
||||||
{ "int8", LAYOUT_TYPE_INT8, 1 },
|
|
||||||
{ "uint8", LAYOUT_TYPE_UINT8, 1 },
|
|
||||||
{ "int16", LAYOUT_TYPE_INT16, 2 },
|
|
||||||
{ "uint16", LAYOUT_TYPE_UINT16, 2 },
|
|
||||||
{ "int32", LAYOUT_TYPE_INT32, 4 },
|
|
||||||
{ "uint32", LAYOUT_TYPE_UINT32, 4 },
|
|
||||||
{ "int64", LAYOUT_TYPE_INT64, 8 },
|
|
||||||
{ "uint64", LAYOUT_TYPE_UINT64, 8 },
|
|
||||||
{ "float32", LAYOUT_TYPE_FLOAT32, 4 },
|
|
||||||
{ "float64", LAYOUT_TYPE_FLOAT64, 8 },
|
|
||||||
{ NULL }
|
|
||||||
};
|
|
||||||
|
|
||||||
/****
|
|
||||||
* Object definition, init, etc
|
|
||||||
*/
|
|
||||||
|
|
||||||
/* Rocket object */
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD
|
|
||||||
layout_type_t layout_type;
|
|
||||||
int layout_count;
|
|
||||||
int binary_size;
|
|
||||||
FILE *file;
|
|
||||||
int file_size;
|
|
||||||
} Rocket;
|
|
||||||
|
|
||||||
/* Dealloc / new */
|
|
||||||
static void Rocket_dealloc(Rocket *self)
|
|
||||||
{
|
|
||||||
if (self->file) {
|
|
||||||
fprintf(stderr, "rocket: file wasn't closed\n");
|
|
||||||
fclose(self->file);
|
|
||||||
self->file = NULL;
|
|
||||||
}
|
|
||||||
Py_TYPE(self)->tp_free((PyObject *)self);
|
|
||||||
}
|
|
||||||
|
|
||||||
static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
|
||||||
{
|
|
||||||
Rocket *self;
|
|
||||||
|
|
||||||
self = (Rocket *)type->tp_alloc(type, 0);
|
|
||||||
if (!self)
|
|
||||||
return NULL;
|
|
||||||
self->layout_type = LAYOUT_TYPE_NONE;
|
|
||||||
self->layout_count = 0;
|
|
||||||
self->binary_size = 0;
|
|
||||||
self->file = NULL;
|
|
||||||
self->file_size = -1;
|
|
||||||
return (PyObject *)self;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* .__init__(layout, file) */
|
|
||||||
static int Rocket_init(Rocket *self, PyObject *args, PyObject *kwds)
|
|
||||||
{
|
|
||||||
const char *layout, *path;
|
|
||||||
int pathlen;
|
|
||||||
static char *kwlist[] = { "layout", "file", NULL };
|
|
||||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "sz#", kwlist,
|
|
||||||
&layout, &path, &pathlen))
|
|
||||||
return -1;
|
|
||||||
if (!layout)
|
|
||||||
return -1;
|
|
||||||
if (path) {
|
|
||||||
if (strlen(path) != (size_t)pathlen) {
|
|
||||||
PyErr_SetString(PyExc_ValueError, "path must not "
|
|
||||||
"contain NUL characters");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if ((self->file = fopen(path, "a+b")) == NULL) {
|
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
self->file_size = -1;
|
|
||||||
} else {
|
|
||||||
self->file = NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
const char *under;
|
|
||||||
char *tmp;
|
|
||||||
under = strchr(layout, '_');
|
|
||||||
if (!under) {
|
|
||||||
PyErr_SetString(PyExc_ValueError, "no such layout: "
|
|
||||||
"badly formatted string");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
self->layout_count = strtoul(under+1, &tmp, 10);
|
|
||||||
if (self->layout_count < 1 || *tmp != '\0') {
|
|
||||||
PyErr_SetString(PyExc_ValueError, "no such layout: "
|
|
||||||
"bad count");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (self->layout_count >= MAX_LAYOUT_COUNT) {
|
|
||||||
PyErr_SetString(PyExc_ValueError, "no such layout: "
|
|
||||||
"count too high");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
int i;
|
|
||||||
for (i = 0; type_lookup[i].string; i++)
|
|
||||||
if (strncmp(layout, type_lookup[i].string, under-layout) == 0)
|
|
||||||
break;
|
|
||||||
if (!type_lookup[i].string) {
|
|
||||||
PyErr_SetString(PyExc_ValueError, "no such layout: "
|
|
||||||
"bad data type");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
self->layout_type = type_lookup[i].layout;
|
|
||||||
self->binary_size = 8 + (type_lookup[i].size * self->layout_count);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* .close() */
|
|
||||||
static PyObject *Rocket_close(Rocket *self)
|
|
||||||
{
|
|
||||||
if (self->file) {
|
|
||||||
fclose(self->file);
|
|
||||||
self->file = NULL;
|
|
||||||
}
|
|
||||||
Py_INCREF(Py_None);
|
|
||||||
return Py_None;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* .file_size property */
|
|
||||||
static PyObject *Rocket_get_file_size(Rocket *self)
|
|
||||||
{
|
|
||||||
if (!self->file) {
|
|
||||||
PyErr_SetString(PyExc_AttributeError, "no file");
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
if (self->file_size < 0) {
|
|
||||||
int oldpos;
|
|
||||||
if (((oldpos = ftell(self->file)) < 0) ||
|
|
||||||
(fseek(self->file, 0, SEEK_END) < 0) ||
|
|
||||||
((self->file_size = ftell(self->file)) < 0) ||
|
|
||||||
(fseek(self->file, oldpos, SEEK_SET) < 0)) {
|
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return PyLong_FromLong(self->file_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
/****
|
|
||||||
* Append from string
|
|
||||||
*/
|
|
||||||
static inline long int strtoll10(const char *nptr, char **endptr) {
|
|
||||||
return strtoll(nptr, endptr, 10);
|
|
||||||
}
|
|
||||||
static inline long int strtoull10(const char *nptr, char **endptr) {
|
|
||||||
return strtoull(nptr, endptr, 10);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* .append_string(count, data, offset, linenum, start, end, last_timestamp) */
|
|
||||||
static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
|
|
||||||
{
|
|
||||||
int count;
|
|
||||||
const char *data;
|
|
||||||
int offset;
|
|
||||||
const char *linestart;
|
|
||||||
int linenum;
|
|
||||||
long long ll1, ll2, ll3;
|
|
||||||
timestamp_t start;
|
|
||||||
timestamp_t end;
|
|
||||||
timestamp_t last_timestamp;
|
|
||||||
|
|
||||||
int written = 0;
|
|
||||||
char *endptr;
|
|
||||||
union8_t t8;
|
|
||||||
union16_t t16;
|
|
||||||
union32_t t32;
|
|
||||||
union64_t t64;
|
|
||||||
int i;
|
|
||||||
|
|
||||||
/* Input data is bytes. Using 'y#' instead of 'y' might be
|
|
||||||
preferable, but strto* requires the null terminator. */
|
|
||||||
if (!PyArg_ParseTuple(args, "iyiiLLL:append_string", &count,
|
|
||||||
&data, &offset, &linenum,
|
|
||||||
&ll1, &ll2, &ll3))
|
|
||||||
return NULL;
|
|
||||||
start = ll1;
|
|
||||||
end = ll2;
|
|
||||||
last_timestamp = ll3;
|
|
||||||
|
|
||||||
/* Skip spaces, but don't skip over a newline. */
|
|
||||||
#define SKIP_BLANK(buf) do { \
|
|
||||||
while (isspace(*buf)) { \
|
|
||||||
if (*buf == '\n') \
|
|
||||||
break; \
|
|
||||||
buf++; \
|
|
||||||
} } while(0)
|
|
||||||
|
|
||||||
const char *buf = &data[offset];
|
|
||||||
while (written < count && *buf)
|
|
||||||
{
|
|
||||||
linestart = buf;
|
|
||||||
linenum++;
|
|
||||||
|
|
||||||
/* Skip leading whitespace and commented lines */
|
|
||||||
SKIP_BLANK(buf);
|
|
||||||
if (*buf == '#') {
|
|
||||||
while (*buf && *buf != '\n')
|
|
||||||
buf++;
|
|
||||||
if (*buf)
|
|
||||||
buf++;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Extract timestamp */
|
|
||||||
t64.i = strtoll(buf, &endptr, 10);
|
|
||||||
if (endptr == buf || !isspace(*endptr)) {
|
|
||||||
/* Try parsing as a double instead */
|
|
||||||
t64.d = strtod(buf, &endptr);
|
|
||||||
if (endptr == buf)
|
|
||||||
goto bad_timestamp;
|
|
||||||
if (!isspace(*endptr))
|
|
||||||
goto cant_parse_value;
|
|
||||||
t64.i = round(t64.d);
|
|
||||||
}
|
|
||||||
if (t64.i <= last_timestamp)
|
|
||||||
return raise_int(linenum, buf - linestart + 1,
|
|
||||||
ERR_NON_MONOTONIC, t64.i);
|
|
||||||
last_timestamp = t64.i;
|
|
||||||
if (t64.i < start || t64.i >= end)
|
|
||||||
return raise_int(linenum, buf - linestart + 1,
|
|
||||||
ERR_OUT_OF_INTERVAL, t64.i);
|
|
||||||
t64.u = le64toh(t64.u);
|
|
||||||
if (fwrite(&t64.u, 8, 1, self->file) != 1)
|
|
||||||
goto err;
|
|
||||||
buf = endptr;
|
|
||||||
|
|
||||||
/* Parse all values in the line */
|
|
||||||
switch (self->layout_type) {
|
|
||||||
#define CS(type, parsefunc, parsetype, realtype, disktype, letoh, bytes) \
|
|
||||||
case LAYOUT_TYPE_##type: \
|
|
||||||
/* parse and write in a loop */ \
|
|
||||||
for (i = 0; i < self->layout_count; i++) { \
|
|
||||||
/* skip non-newlines */ \
|
|
||||||
SKIP_BLANK(buf); \
|
|
||||||
if (*buf == '\n') \
|
|
||||||
goto wrong_number_of_values; \
|
|
||||||
/* parse number */ \
|
|
||||||
parsetype = parsefunc(buf, &endptr); \
|
|
||||||
if (*endptr && !isspace(*endptr)) \
|
|
||||||
goto cant_parse_value; \
|
|
||||||
/* check limits */ \
|
|
||||||
if (type##_MIN != type##_MAX && \
|
|
||||||
(parsetype < type##_MIN || \
|
|
||||||
parsetype > type##_MAX)) \
|
|
||||||
goto value_out_of_range; \
|
|
||||||
/* convert to disk representation */ \
|
|
||||||
realtype = parsetype; \
|
|
||||||
disktype = letoh(disktype); \
|
|
||||||
/* write it */ \
|
|
||||||
if (fwrite(&disktype, bytes, \
|
|
||||||
1, self->file) != 1) \
|
|
||||||
goto err; \
|
|
||||||
/* advance buf */ \
|
|
||||||
buf = endptr; \
|
|
||||||
} \
|
|
||||||
/* Skip trailing whitespace and comments */ \
|
|
||||||
SKIP_BLANK(buf); \
|
|
||||||
if (*buf == '#') \
|
|
||||||
while (*buf && *buf != '\n') \
|
|
||||||
buf++; \
|
|
||||||
if (*buf == '\n') \
|
|
||||||
buf++; \
|
|
||||||
else if (*buf != '\0') \
|
|
||||||
goto extra_data_on_line; \
|
|
||||||
break
|
|
||||||
|
|
||||||
CS(INT8, strtoll10, t64.i, t8.i, t8.u, , 1);
|
|
||||||
CS(UINT8, strtoull10, t64.u, t8.u, t8.u, , 1);
|
|
||||||
CS(INT16, strtoll10, t64.i, t16.i, t16.u, le16toh, 2);
|
|
||||||
CS(UINT16, strtoull10, t64.u, t16.u, t16.u, le16toh, 2);
|
|
||||||
CS(INT32, strtoll10, t64.i, t32.i, t32.u, le32toh, 4);
|
|
||||||
CS(UINT32, strtoull10, t64.u, t32.u, t32.u, le32toh, 4);
|
|
||||||
CS(INT64, strtoll10, t64.i, t64.i, t64.u, le64toh, 8);
|
|
||||||
CS(UINT64, strtoull10, t64.u, t64.u, t64.u, le64toh, 8);
|
|
||||||
CS(FLOAT32, strtod, t64.d, t32.f, t32.u, le32toh, 4);
|
|
||||||
CS(FLOAT64, strtod, t64.d, t64.d, t64.u, le64toh, 8);
|
|
||||||
#undef CS
|
|
||||||
default:
|
|
||||||
PyErr_SetString(PyExc_TypeError, "unknown type");
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Done this line */
|
|
||||||
written++;
|
|
||||||
}
|
|
||||||
|
|
||||||
fflush(self->file);
|
|
||||||
|
|
||||||
/* Build return value and return */
|
|
||||||
offset = buf - data;
|
|
||||||
PyObject *o;
|
|
||||||
o = Py_BuildValue("(iiLi)", written, offset,
|
|
||||||
(long long)last_timestamp, linenum);
|
|
||||||
return o;
|
|
||||||
err:
|
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
|
||||||
return NULL;
|
|
||||||
bad_timestamp:
|
|
||||||
return raise_str(linenum, buf - linestart + 1,
|
|
||||||
ERR_OTHER, "bad timestamp");
|
|
||||||
cant_parse_value:
|
|
||||||
return raise_str(linenum, buf - linestart + 1,
|
|
||||||
ERR_OTHER, "can't parse value");
|
|
||||||
wrong_number_of_values:
|
|
||||||
return raise_str(linenum, buf - linestart + 1,
|
|
||||||
ERR_OTHER, "wrong number of values");
|
|
||||||
value_out_of_range:
|
|
||||||
return raise_str(linenum, buf - linestart + 1,
|
|
||||||
ERR_OTHER, "value out of range");
|
|
||||||
extra_data_on_line:
|
|
||||||
return raise_str(linenum, buf - linestart + 1,
|
|
||||||
ERR_OTHER, "extra data on line");
|
|
||||||
}
|
|
||||||
|
|
||||||
/****
|
|
||||||
* Append from binary data
|
|
||||||
*/
|
|
||||||
|
|
||||||
/* .append_binary(count, data, offset, linenum, start, end, last_timestamp) */
|
|
||||||
static PyObject *Rocket_append_binary(Rocket *self, PyObject *args)
|
|
||||||
{
|
|
||||||
int count;
|
|
||||||
const uint8_t *data;
|
|
||||||
int data_len;
|
|
||||||
int linenum;
|
|
||||||
int offset;
|
|
||||||
long long ll1, ll2, ll3;
|
|
||||||
timestamp_t start;
|
|
||||||
timestamp_t end;
|
|
||||||
timestamp_t last_timestamp;
|
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "iy#iiLLL:append_binary",
|
|
||||||
&count, &data, &data_len, &offset,
|
|
||||||
&linenum, &ll1, &ll2, &ll3))
|
|
||||||
return NULL;
|
|
||||||
start = ll1;
|
|
||||||
end = ll2;
|
|
||||||
last_timestamp = ll3;
|
|
||||||
|
|
||||||
/* Advance to offset */
|
|
||||||
if (offset > data_len)
|
|
||||||
return raise_str(0, 0, ERR_OTHER, "bad offset");
|
|
||||||
data += offset;
|
|
||||||
data_len -= offset;
|
|
||||||
|
|
||||||
/* Figure out max number of rows to insert */
|
|
||||||
int rows = data_len / self->binary_size;
|
|
||||||
if (rows > count)
|
|
||||||
rows = count;
|
|
||||||
|
|
||||||
/* Check timestamps */
|
|
||||||
timestamp_t ts;
|
|
||||||
int i;
|
|
||||||
for (i = 0; i < rows; i++) {
|
|
||||||
/* Read raw timestamp, byteswap if needed */
|
|
||||||
memcpy(&ts, &data[i * self->binary_size], 8);
|
|
||||||
ts = le64toh(ts);
|
|
||||||
|
|
||||||
/* Check limits */
|
|
||||||
if (ts <= last_timestamp)
|
|
||||||
return raise_int(i, 0, ERR_NON_MONOTONIC, ts);
|
|
||||||
last_timestamp = ts;
|
|
||||||
if (ts < start || ts >= end)
|
|
||||||
return raise_int(i, 0, ERR_OUT_OF_INTERVAL, ts);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Write binary data */
|
|
||||||
if (fwrite(data, self->binary_size, rows, self->file) != (size_t)rows) {
|
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
fflush(self->file);
|
|
||||||
|
|
||||||
/* Build return value and return */
|
|
||||||
PyObject *o;
|
|
||||||
o = Py_BuildValue("(iiLi)", rows, offset + rows * self->binary_size,
|
|
||||||
(long long)last_timestamp, linenum);
|
|
||||||
return o;
|
|
||||||
}
|
|
||||||
|
|
||||||
/****
|
|
||||||
* Extract to binary bytes object containing ASCII text-formatted data
|
|
||||||
*/
|
|
||||||
|
|
||||||
static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
|
|
||||||
{
|
|
||||||
long count;
|
|
||||||
long offset;
|
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "ll", &offset, &count))
|
|
||||||
return NULL;
|
|
||||||
if (!self->file) {
|
|
||||||
PyErr_SetString(PyExc_Exception, "no file");
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
/* Seek to target location */
|
|
||||||
if (fseek(self->file, offset, SEEK_SET) < 0) {
|
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
char *str = NULL, *new;
|
|
||||||
long len_alloc = 0;
|
|
||||||
long len = 0;
|
|
||||||
int ret;
|
|
||||||
|
|
||||||
/* min space free in string (and the maximum length of one
|
|
||||||
line); this is generous */
|
|
||||||
const int min_free = 32 * MAX_LAYOUT_COUNT;
|
|
||||||
|
|
||||||
/* how much to allocate at once */
|
|
||||||
const int alloc_size = 1048576;
|
|
||||||
|
|
||||||
int row, i;
|
|
||||||
union8_t t8;
|
|
||||||
union16_t t16;
|
|
||||||
union32_t t32;
|
|
||||||
union64_t t64;
|
|
||||||
for (row = 0; row < count; row++) {
|
|
||||||
/* Make sure there's space for a line */
|
|
||||||
if ((len_alloc - len) < min_free) {
|
|
||||||
/* grow by 1 meg at a time */
|
|
||||||
len_alloc += alloc_size;
|
|
||||||
new = realloc(str, len_alloc);
|
|
||||||
if (new == NULL)
|
|
||||||
goto err;
|
|
||||||
str = new;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Read and print timestamp */
|
|
||||||
if (fread(&t64.u, 8, 1, self->file) != 1)
|
|
||||||
goto err;
|
|
||||||
t64.u = le64toh(t64.u);
|
|
||||||
ret = sprintf(&str[len], "%" PRId64, t64.i);
|
|
||||||
if (ret <= 0)
|
|
||||||
goto err;
|
|
||||||
len += ret;
|
|
||||||
|
|
||||||
/* Read and print values */
|
|
||||||
switch (self->layout_type) {
|
|
||||||
#define CASE(type, fmt, fmttype, disktype, letoh, bytes) \
|
|
||||||
case LAYOUT_TYPE_##type: \
|
|
||||||
/* read and format in a loop */ \
|
|
||||||
for (i = 0; i < self->layout_count; i++) { \
|
|
||||||
if (fread(&disktype, bytes, \
|
|
||||||
1, self->file) != 1) \
|
|
||||||
goto err; \
|
|
||||||
disktype = letoh(disktype); \
|
|
||||||
ret = sprintf(&str[len], " " fmt, \
|
|
||||||
fmttype); \
|
|
||||||
if (ret <= 0) \
|
|
||||||
goto err; \
|
|
||||||
len += ret; \
|
|
||||||
} \
|
|
||||||
break
|
|
||||||
CASE(INT8, "%" PRId8, t8.i, t8.u, , 1);
|
|
||||||
CASE(UINT8, "%" PRIu8, t8.u, t8.u, , 1);
|
|
||||||
CASE(INT16, "%" PRId16, t16.i, t16.u, le16toh, 2);
|
|
||||||
CASE(UINT16, "%" PRIu16, t16.u, t16.u, le16toh, 2);
|
|
||||||
CASE(INT32, "%" PRId32, t32.i, t32.u, le32toh, 4);
|
|
||||||
CASE(UINT32, "%" PRIu32, t32.u, t32.u, le32toh, 4);
|
|
||||||
CASE(INT64, "%" PRId64, t64.i, t64.u, le64toh, 8);
|
|
||||||
CASE(UINT64, "%" PRIu64, t64.u, t64.u, le64toh, 8);
|
|
||||||
/* These next two are a bit debatable. floats
|
|
||||||
are 6-9 significant figures, so we print 7.
|
|
||||||
Doubles are 15-19, so we print 17. This is
|
|
||||||
similar to the old prep format for float32.
|
|
||||||
*/
|
|
||||||
CASE(FLOAT32, "%.6e", t32.f, t32.u, le32toh, 4);
|
|
||||||
CASE(FLOAT64, "%.16e", t64.d, t64.u, le64toh, 8);
|
|
||||||
#undef CASE
|
|
||||||
default:
|
|
||||||
PyErr_SetString(PyExc_TypeError, "unknown type");
|
|
||||||
if (str) free(str);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
str[len++] = '\n';
|
|
||||||
}
|
|
||||||
|
|
||||||
PyObject *pystr = PyBytes_FromStringAndSize(str, len);
|
|
||||||
free(str);
|
|
||||||
return pystr;
|
|
||||||
err:
|
|
||||||
if (str) free(str);
|
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
/****
|
|
||||||
* Extract to binary bytes object containing raw little-endian binary data
|
|
||||||
*/
|
|
||||||
static PyObject *Rocket_extract_binary(Rocket *self, PyObject *args)
|
|
||||||
{
|
|
||||||
long count;
|
|
||||||
long offset;
|
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "ll", &offset, &count))
|
|
||||||
return NULL;
|
|
||||||
if (!self->file) {
|
|
||||||
PyErr_SetString(PyExc_Exception, "no file");
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
/* Seek to target location */
|
|
||||||
if (fseek(self->file, offset, SEEK_SET) < 0) {
|
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
uint8_t *str;
|
|
||||||
int len = count * self->binary_size;
|
|
||||||
str = malloc(len);
|
|
||||||
if (str == NULL) {
|
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Data in the file is already in the desired little-endian
|
|
||||||
binary format, so just read it directly. */
|
|
||||||
if (fread(str, self->binary_size, count, self->file) != (size_t)count) {
|
|
||||||
free(str);
|
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
PyObject *pystr = PyBytes_FromStringAndSize((char *)str, len);
|
|
||||||
free(str);
|
|
||||||
return pystr;
|
|
||||||
}
|
|
||||||
|
|
||||||
/****
|
|
||||||
* Extract timestamp
|
|
||||||
*/
|
|
||||||
static PyObject *Rocket_extract_timestamp(Rocket *self, PyObject *args)
|
|
||||||
{
|
|
||||||
long offset;
|
|
||||||
union64_t t64;
|
|
||||||
if (!PyArg_ParseTuple(args, "l", &offset))
|
|
||||||
return NULL;
|
|
||||||
if (!self->file) {
|
|
||||||
PyErr_SetString(PyExc_Exception, "no file");
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Seek to target location and read timestamp */
|
|
||||||
if ((fseek(self->file, offset, SEEK_SET) < 0) ||
|
|
||||||
(fread(&t64.u, 8, 1, self->file) != 1)) {
|
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Convert and return */
|
|
||||||
t64.u = le64toh(t64.u);
|
|
||||||
return Py_BuildValue("L", (long long)t64.i);
|
|
||||||
}
|
|
||||||
|
|
||||||
/****
|
|
||||||
* Module and type setup
|
|
||||||
*/
|
|
||||||
|
|
||||||
static PyGetSetDef Rocket_getsetters[] = {
|
|
||||||
{ "file_size", (getter)Rocket_get_file_size, NULL,
|
|
||||||
"file size in bytes", NULL },
|
|
||||||
{ NULL },
|
|
||||||
};
|
|
||||||
|
|
||||||
static PyMemberDef Rocket_members[] = {
|
|
||||||
{ "binary_size", T_INT, offsetof(Rocket, binary_size), 0,
|
|
||||||
"binary size per row" },
|
|
||||||
{ NULL },
|
|
||||||
};
|
|
||||||
|
|
||||||
static PyMethodDef Rocket_methods[] = {
|
|
||||||
{ "close",
|
|
||||||
(PyCFunction)Rocket_close, METH_NOARGS,
|
|
||||||
"close(self)\n\n"
|
|
||||||
"Close file handle" },
|
|
||||||
|
|
||||||
{ "append_string",
|
|
||||||
(PyCFunction)Rocket_append_string, METH_VARARGS,
|
|
||||||
"append_string(self, count, data, offset, line, start, end, ts)\n\n"
|
|
||||||
"Parse string and append data.\n"
|
|
||||||
"\n"
|
|
||||||
" count: maximum number of rows to add\n"
|
|
||||||
" data: string data\n"
|
|
||||||
" offset: byte offset into data to start parsing\n"
|
|
||||||
" line: current line number of data\n"
|
|
||||||
" start: starting timestamp for interval\n"
|
|
||||||
" end: end timestamp for interval\n"
|
|
||||||
" ts: last timestamp that was previously parsed\n"
|
|
||||||
"\n"
|
|
||||||
"Raises ParseError if timestamps are non-monotonic, outside\n"
|
|
||||||
"the start/end interval etc.\n"
|
|
||||||
"\n"
|
|
||||||
"On success, return a tuple:\n"
|
|
||||||
" added_rows: how many rows were added from the file\n"
|
|
||||||
" data_offset: current offset into the data string\n"
|
|
||||||
" last_timestamp: last timestamp we parsed\n"
|
|
||||||
" linenum: current line number" },
|
|
||||||
|
|
||||||
{ "append_binary",
|
|
||||||
(PyCFunction)Rocket_append_binary, METH_VARARGS,
|
|
||||||
"append_binary(self, count, data, offset, line, start, end, ts)\n\n"
|
|
||||||
"Append binary data, which must match the data layout.\n"
|
|
||||||
"\n"
|
|
||||||
" count: maximum number of rows to add\n"
|
|
||||||
" data: binary data\n"
|
|
||||||
" offset: byte offset into data to start adding\n"
|
|
||||||
" line: current line number (unused)\n"
|
|
||||||
" start: starting timestamp for interval\n"
|
|
||||||
" end: end timestamp for interval\n"
|
|
||||||
" ts: last timestamp that was previously parsed\n"
|
|
||||||
"\n"
|
|
||||||
"Raises ParseError if timestamps are non-monotonic, outside\n"
|
|
||||||
"the start/end interval etc.\n"
|
|
||||||
"\n"
|
|
||||||
"On success, return a tuple:\n"
|
|
||||||
" added_rows: how many rows were added from the file\n"
|
|
||||||
" data_offset: current offset into the data string\n"
|
|
||||||
" last_timestamp: last timestamp we parsed\n"
|
|
||||||
" linenum: current line number (copied from argument)" },
|
|
||||||
|
|
||||||
{ "extract_string",
|
|
||||||
(PyCFunction)Rocket_extract_string, METH_VARARGS,
|
|
||||||
"extract_string(self, offset, count)\n\n"
|
|
||||||
"Extract count rows of data from the file at offset offset.\n"
|
|
||||||
"Return an ascii formatted string according to the layout" },
|
|
||||||
|
|
||||||
{ "extract_binary",
|
|
||||||
(PyCFunction)Rocket_extract_binary, METH_VARARGS,
|
|
||||||
"extract_binary(self, offset, count)\n\n"
|
|
||||||
"Extract count rows of data from the file at offset offset.\n"
|
|
||||||
"Return a raw binary string of data matching the data layout." },
|
|
||||||
|
|
||||||
{ "extract_timestamp",
|
|
||||||
(PyCFunction)Rocket_extract_timestamp, METH_VARARGS,
|
|
||||||
"extract_timestamp(self, offset)\n\n"
|
|
||||||
"Extract a single timestamp from the file" },
|
|
||||||
|
|
||||||
{ NULL },
|
|
||||||
};
|
|
||||||
|
|
||||||
static PyTypeObject RocketType = {
|
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
|
||||||
|
|
||||||
.tp_name = "rocket.Rocket",
|
|
||||||
.tp_basicsize = sizeof(Rocket),
|
|
||||||
.tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
|
|
||||||
|
|
||||||
.tp_new = Rocket_new,
|
|
||||||
.tp_dealloc = (destructor)Rocket_dealloc,
|
|
||||||
.tp_init = (initproc)Rocket_init,
|
|
||||||
.tp_methods = Rocket_methods,
|
|
||||||
.tp_members = Rocket_members,
|
|
||||||
.tp_getset = Rocket_getsetters,
|
|
||||||
|
|
||||||
.tp_doc = ("rocket.Rocket(layout, file)\n\n"
|
|
||||||
"C implementation of the \"rocket\" data parsing\n"
|
|
||||||
"interface, which translates between the binary\n"
|
|
||||||
"format on disk and the ASCII or Python list\n"
|
|
||||||
"format used when communicating with the rest of\n"
|
|
||||||
"the system.")
|
|
||||||
};
|
|
||||||
|
|
||||||
static PyMethodDef module_methods[] = {
|
|
||||||
{ NULL },
|
|
||||||
};
|
|
||||||
|
|
||||||
static struct PyModuleDef moduledef = {
|
|
||||||
PyModuleDef_HEAD_INIT,
|
|
||||||
.m_name = "rocker",
|
|
||||||
.m_doc = "Rocket data parsing and formatting module",
|
|
||||||
.m_size = -1,
|
|
||||||
.m_methods = module_methods,
|
|
||||||
};
|
|
||||||
|
|
||||||
PyMODINIT_FUNC PyInit_rocket(void)
|
|
||||||
{
|
|
||||||
PyObject *module;
|
|
||||||
|
|
||||||
RocketType.tp_new = PyType_GenericNew;
|
|
||||||
if (PyType_Ready(&RocketType) < 0)
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
module = PyModule_Create(&moduledef);
|
|
||||||
Py_INCREF(&RocketType);
|
|
||||||
PyModule_AddObject(module, "Rocket", (PyObject *)&RocketType);
|
|
||||||
|
|
||||||
ParseError = PyErr_NewException("rocket.ParseError", NULL, NULL);
|
|
||||||
Py_INCREF(ParseError);
|
|
||||||
PyModule_AddObject(module, "ParseError", ParseError);
|
|
||||||
add_parseerror_codes(module);
|
|
||||||
|
|
||||||
return module;
|
|
||||||
}
|
|
|
@ -1,49 +1,133 @@
|
||||||
"""CherryPy-based server for accessing NILM database via HTTP"""
|
"""CherryPy-based server for accessing NILM database via HTTP"""
|
||||||
|
|
||||||
import os
|
# Need absolute_import so that "import nilmdb" won't pull in
|
||||||
import json
|
# nilmdb.py, but will pull the nilmdb module instead.
|
||||||
import socket
|
from __future__ import absolute_import
|
||||||
import traceback
|
import nilmdb
|
||||||
|
from nilmdb.utils.printf import *
|
||||||
import psutil
|
|
||||||
import cherrypy
|
|
||||||
|
|
||||||
import nilmdb.server
|
|
||||||
from nilmdb.utils.printf import sprintf
|
|
||||||
from nilmdb.server.errors import NilmDBError
|
from nilmdb.server.errors import NilmDBError
|
||||||
from nilmdb.utils.time import string_to_timestamp
|
|
||||||
|
|
||||||
from nilmdb.server.serverutil import (
|
import cherrypy
|
||||||
chunked_response,
|
import sys
|
||||||
response_type,
|
import os
|
||||||
exception_to_httperror,
|
import simplejson as json
|
||||||
CORS_allow,
|
import decorator
|
||||||
json_to_request_params,
|
import traceback
|
||||||
json_error_page,
|
import psutil
|
||||||
cherrypy_start,
|
|
||||||
cherrypy_stop,
|
|
||||||
bool_param,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add CORS_allow tool
|
class NilmApp(object):
|
||||||
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
|
|
||||||
|
|
||||||
|
|
||||||
class NilmApp():
|
|
||||||
def __init__(self, db):
|
def __init__(self, db):
|
||||||
self.db = db
|
self.db = db
|
||||||
|
|
||||||
|
# Decorators
|
||||||
|
def chunked_response(func):
|
||||||
|
"""Decorator to enable chunked responses."""
|
||||||
|
# Set this to False to get better tracebacks from some requests
|
||||||
|
# (/stream/extract, /stream/intervals).
|
||||||
|
func._cp_config = { 'response.stream': True }
|
||||||
|
return func
|
||||||
|
|
||||||
|
def response_type(content_type):
|
||||||
|
"""Return a decorator-generating function that sets the
|
||||||
|
response type to the specified string."""
|
||||||
|
def wrapper(func, *args, **kwargs):
|
||||||
|
cherrypy.response.headers['Content-Type'] = content_type
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
return decorator.decorator(wrapper)
|
||||||
|
|
||||||
|
@decorator.decorator
|
||||||
|
def workaround_cp_bug_1200(func, *args, **kwargs): # pragma: no cover
|
||||||
|
"""Decorator to work around CherryPy bug #1200 in a response
|
||||||
|
generator.
|
||||||
|
|
||||||
|
Even if chunked responses are disabled, LookupError or
|
||||||
|
UnicodeError exceptions may still be swallowed by CherryPy due to
|
||||||
|
bug #1200. This throws them as generic Exceptions instead so that
|
||||||
|
they make it through.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
for val in func(*args, **kwargs):
|
||||||
|
yield val
|
||||||
|
except (LookupError, UnicodeError):
|
||||||
|
raise Exception("bug workaround; real exception is:\n" +
|
||||||
|
traceback.format_exc())
|
||||||
|
|
||||||
|
def exception_to_httperror(*expected):
|
||||||
|
"""Return a decorator-generating function that catches expected
|
||||||
|
errors and throws a HTTPError describing it instead.
|
||||||
|
|
||||||
|
@exception_to_httperror(NilmDBError, ValueError)
|
||||||
|
def foo():
|
||||||
|
pass
|
||||||
|
"""
|
||||||
|
def wrapper(func, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
except expected as e:
|
||||||
|
message = sprintf("%s", str(e))
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request", message)
|
||||||
|
# We need to preserve the function's argspecs for CherryPy to
|
||||||
|
# handle argument errors correctly. Decorator.decorator takes
|
||||||
|
# care of that.
|
||||||
|
return decorator.decorator(wrapper)
|
||||||
|
|
||||||
|
# Custom CherryPy tools
|
||||||
|
|
||||||
|
def CORS_allow(methods):
|
||||||
|
"""This does several things:
|
||||||
|
|
||||||
|
Handles CORS preflight requests.
|
||||||
|
Adds Allow: header to all requests.
|
||||||
|
Raise 405 if request.method not in method.
|
||||||
|
|
||||||
|
It is similar to cherrypy.tools.allow, with the CORS stuff added.
|
||||||
|
"""
|
||||||
|
request = cherrypy.request.headers
|
||||||
|
response = cherrypy.response.headers
|
||||||
|
|
||||||
|
if not isinstance(methods, (tuple, list)): # pragma: no cover
|
||||||
|
methods = [ methods ]
|
||||||
|
methods = [ m.upper() for m in methods if m ]
|
||||||
|
if not methods: # pragma: no cover
|
||||||
|
methods = [ 'GET', 'HEAD' ]
|
||||||
|
elif 'GET' in methods and 'HEAD' not in methods: # pragma: no cover
|
||||||
|
methods.append('HEAD')
|
||||||
|
response['Allow'] = ', '.join(methods)
|
||||||
|
|
||||||
|
# Allow all origins
|
||||||
|
if 'Origin' in request:
|
||||||
|
response['Access-Control-Allow-Origin'] = request['Origin']
|
||||||
|
|
||||||
|
# If it's a CORS request, send response.
|
||||||
|
request_method = request.get("Access-Control-Request-Method", None)
|
||||||
|
request_headers = request.get("Access-Control-Request-Headers", None)
|
||||||
|
if (cherrypy.request.method == "OPTIONS" and
|
||||||
|
request_method and request_headers):
|
||||||
|
response['Access-Control-Allow-Headers'] = request_headers
|
||||||
|
response['Access-Control-Allow-Methods'] = ', '.join(methods)
|
||||||
|
# Try to stop further processing and return a 200 OK
|
||||||
|
cherrypy.response.status = "200 OK"
|
||||||
|
cherrypy.response.body = ""
|
||||||
|
cherrypy.request.handler = lambda: ""
|
||||||
|
return
|
||||||
|
|
||||||
|
# Reject methods that were not explicitly allowed
|
||||||
|
if cherrypy.request.method not in methods:
|
||||||
|
raise cherrypy.HTTPError(405)
|
||||||
|
|
||||||
|
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
|
||||||
|
|
||||||
# CherryPy apps
|
# CherryPy apps
|
||||||
class Root(NilmApp):
|
class Root(NilmApp):
|
||||||
"""Root application for NILM database"""
|
"""Root application for NILM database"""
|
||||||
|
|
||||||
|
def __init__(self, db):
|
||||||
|
super(Root, self).__init__(db)
|
||||||
|
|
||||||
# /
|
# /
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def index(self):
|
def index(self):
|
||||||
cherrypy.response.headers['Content-Type'] = 'text/plain'
|
raise cherrypy.NotFound()
|
||||||
msg = sprintf("This is NilmDB version %s, running on host %s.\n",
|
|
||||||
nilmdb.__version__, socket.getfqdn())
|
|
||||||
return msg
|
|
||||||
|
|
||||||
# /favicon.ico
|
# /favicon.ico
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
|
@ -63,66 +147,34 @@ class Root(NilmApp):
|
||||||
"""Return a dictionary with the database path,
|
"""Return a dictionary with the database path,
|
||||||
size of the database in bytes, and free disk space in bytes"""
|
size of the database in bytes, and free disk space in bytes"""
|
||||||
path = self.db.get_basepath()
|
path = self.db.get_basepath()
|
||||||
usage = psutil.disk_usage(path)
|
return { "path": path,
|
||||||
dbsize = nilmdb.utils.du(path)
|
"size": nilmdb.utils.du(path),
|
||||||
return {
|
"free": psutil.disk_usage(path).free }
|
||||||
"path": path,
|
|
||||||
"size": dbsize,
|
|
||||||
"other": max(usage.used - dbsize, 0),
|
|
||||||
"reserved": max(usage.total - usage.used - usage.free, 0),
|
|
||||||
"free": usage.free
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Stream(NilmApp):
|
class Stream(NilmApp):
|
||||||
"""Stream-specific operations"""
|
"""Stream-specific operations"""
|
||||||
|
|
||||||
# Helpers
|
|
||||||
def _get_times(self, start_param, end_param):
|
|
||||||
(start, end) = (None, None)
|
|
||||||
try:
|
|
||||||
if start_param is not None:
|
|
||||||
start = string_to_timestamp(start_param)
|
|
||||||
except Exception:
|
|
||||||
raise cherrypy.HTTPError("400 Bad Request", sprintf(
|
|
||||||
"invalid start (%s): must be a numeric timestamp",
|
|
||||||
start_param))
|
|
||||||
try:
|
|
||||||
if end_param is not None:
|
|
||||||
end = string_to_timestamp(end_param)
|
|
||||||
except Exception:
|
|
||||||
raise cherrypy.HTTPError("400 Bad Request", sprintf(
|
|
||||||
"invalid end (%s): must be a numeric timestamp", end_param))
|
|
||||||
if start is not None and end is not None:
|
|
||||||
if start >= end:
|
|
||||||
raise cherrypy.HTTPError(
|
|
||||||
"400 Bad Request",
|
|
||||||
sprintf("start must precede end (%s >= %s)",
|
|
||||||
start_param, end_param))
|
|
||||||
return (start, end)
|
|
||||||
|
|
||||||
# /stream/list
|
# /stream/list
|
||||||
# /stream/list?layout=float32_8
|
# /stream/list?layout=PrepData
|
||||||
# /stream/list?path=/newton/prep&extended=1
|
# /stream/list?path=/newton/prep&extent=1
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
def list(self, path=None, layout=None, extended=None):
|
def list(self, path = None, layout = None, extent = None):
|
||||||
"""List all streams in the database. With optional path or
|
"""List all streams in the database. With optional path or
|
||||||
layout parameter, just list streams that match the given path
|
layout parameter, just list streams that match the given path
|
||||||
or layout.
|
or layout.
|
||||||
|
|
||||||
If extended is missing or zero, returns a list of lists
|
If extent is not given, returns a list of lists containing
|
||||||
containing the path and layout: [ path, layout ]
|
the path and layout: [ path, layout ]
|
||||||
|
|
||||||
If extended is true, returns a list of lists containing
|
If extent is provided, returns a list of lists containing the
|
||||||
extended info: [ path, layout, extent_min, extent_max,
|
path, layout, and min/max extent of the data:
|
||||||
total_rows, total_seconds ]. More data may be added.
|
[ path, layout, extent_min, extent_max ]
|
||||||
"""
|
"""
|
||||||
return self.db.stream_list(path, layout, bool(extended))
|
return self.db.stream_list(path, layout, bool(extent))
|
||||||
|
|
||||||
# /stream/create?path=/newton/prep&layout=float32_8
|
# /stream/create?path=/newton/prep&layout=PrepData
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@cherrypy.tools.json_in()
|
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
@exception_to_httperror(NilmDBError, ValueError)
|
@exception_to_httperror(NilmDBError, ValueError)
|
||||||
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
||||||
|
@ -134,24 +186,13 @@ class Stream(NilmApp):
|
||||||
|
|
||||||
# /stream/destroy?path=/newton/prep
|
# /stream/destroy?path=/newton/prep
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@cherrypy.tools.json_in()
|
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
@exception_to_httperror(NilmDBError)
|
@exception_to_httperror(NilmDBError)
|
||||||
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
||||||
def destroy(self, path):
|
def destroy(self, path):
|
||||||
"""Delete a stream. Fails if any data is still present."""
|
"""Delete a stream and its associated data."""
|
||||||
return self.db.stream_destroy(path)
|
return self.db.stream_destroy(path)
|
||||||
|
|
||||||
# /stream/rename?oldpath=/newton/prep&newpath=/newton/prep/1
|
|
||||||
@cherrypy.expose
|
|
||||||
@cherrypy.tools.json_in()
|
|
||||||
@cherrypy.tools.json_out()
|
|
||||||
@exception_to_httperror(NilmDBError, ValueError)
|
|
||||||
@cherrypy.tools.CORS_allow(methods=["POST"])
|
|
||||||
def rename(self, oldpath, newpath):
|
|
||||||
"""Rename a stream."""
|
|
||||||
return self.db.stream_rename(oldpath, newpath)
|
|
||||||
|
|
||||||
# /stream/get_metadata?path=/newton/prep
|
# /stream/get_metadata?path=/newton/prep
|
||||||
# /stream/get_metadata?path=/newton/prep&key=foo&key=bar
|
# /stream/get_metadata?path=/newton/prep&key=foo&key=bar
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
|
@ -163,9 +204,9 @@ class Stream(NilmApp):
|
||||||
try:
|
try:
|
||||||
data = self.db.stream_get_metadata(path)
|
data = self.db.stream_get_metadata(path)
|
||||||
except nilmdb.server.nilmdb.StreamError as e:
|
except nilmdb.server.nilmdb.StreamError as e:
|
||||||
raise cherrypy.HTTPError("404 Not Found", str(e))
|
raise cherrypy.HTTPError("404 Not Found", e.message)
|
||||||
if key is None: # If no keys specified, return them all
|
if key is None: # If no keys specified, return them all
|
||||||
key = list(data.keys())
|
key = data.keys()
|
||||||
elif not isinstance(key, list):
|
elif not isinstance(key, list):
|
||||||
key = [ key ]
|
key = [ key ]
|
||||||
result = {}
|
result = {}
|
||||||
|
@ -176,84 +217,79 @@ class Stream(NilmApp):
|
||||||
result[k] = None
|
result[k] = None
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# Helper for set_metadata and get_metadata
|
|
||||||
def _metadata_helper(self, function, path, data):
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
try:
|
|
||||||
data = dict(json.loads(data))
|
|
||||||
except TypeError as e:
|
|
||||||
raise NilmDBError("can't parse 'data' parameter: " + str(e))
|
|
||||||
for key in data:
|
|
||||||
if not isinstance(data[key], (str, float, int)):
|
|
||||||
raise NilmDBError("metadata values must be a string or number")
|
|
||||||
function(path, data)
|
|
||||||
|
|
||||||
# /stream/set_metadata?path=/newton/prep&data=<json>
|
# /stream/set_metadata?path=/newton/prep&data=<json>
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@cherrypy.tools.json_in()
|
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
@exception_to_httperror(NilmDBError, LookupError)
|
@exception_to_httperror(NilmDBError, LookupError, TypeError)
|
||||||
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
||||||
def set_metadata(self, path, data):
|
def set_metadata(self, path, data):
|
||||||
"""Set metadata for the named stream, replacing any existing
|
"""Set metadata for the named stream, replacing any
|
||||||
metadata. Data can be json-encoded or a plain dictionary."""
|
existing metadata. Data should be a json-encoded
|
||||||
self._metadata_helper(self.db.stream_set_metadata, path, data)
|
dictionary"""
|
||||||
|
data_dict = json.loads(data)
|
||||||
|
self.db.stream_set_metadata(path, data_dict)
|
||||||
|
|
||||||
# /stream/update_metadata?path=/newton/prep&data=<json>
|
# /stream/update_metadata?path=/newton/prep&data=<json>
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@cherrypy.tools.json_in()
|
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
@exception_to_httperror(NilmDBError, LookupError, ValueError)
|
@exception_to_httperror(NilmDBError, LookupError, TypeError)
|
||||||
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
||||||
def update_metadata(self, path, data):
|
def update_metadata(self, path, data):
|
||||||
"""Set metadata for the named stream, replacing any existing
|
"""Update metadata for the named stream. Data
|
||||||
metadata. Data can be json-encoded or a plain dictionary."""
|
should be a json-encoded dictionary"""
|
||||||
self._metadata_helper(self.db.stream_update_metadata, path, data)
|
data_dict = json.loads(data)
|
||||||
|
self.db.stream_update_metadata(path, data_dict)
|
||||||
|
|
||||||
# /stream/insert?path=/newton/prep
|
# /stream/insert?path=/newton/prep
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
@exception_to_httperror(NilmDBError, ValueError)
|
|
||||||
@cherrypy.tools.CORS_allow(methods = ["PUT"])
|
@cherrypy.tools.CORS_allow(methods = ["PUT"])
|
||||||
def insert(self, path, start, end, binary=False):
|
def insert(self, path, start, end):
|
||||||
"""
|
"""
|
||||||
Insert new data into the database. Provide textual data
|
Insert new data into the database. Provide textual data
|
||||||
(matching the path's layout) as a HTTP PUT.
|
(matching the path's layout) as a HTTP PUT.
|
||||||
|
|
||||||
If 'binary' is True, expect raw binary data, rather than lines
|
|
||||||
of ASCII-formatted data. Raw binary data is always
|
|
||||||
little-endian and matches the database types (including an
|
|
||||||
int64 timestamp).
|
|
||||||
"""
|
"""
|
||||||
binary = bool_param(binary)
|
|
||||||
|
|
||||||
# Important that we always read the input before throwing any
|
# Important that we always read the input before throwing any
|
||||||
# errors, to keep lengths happy for persistent connections.
|
# errors, to keep lengths happy for persistent connections.
|
||||||
# Note that CherryPy 3.2.2 has a bug where this fails for GET
|
# Note that CherryPy 3.2.2 has a bug where this fails for GET
|
||||||
# requests, if we ever want to handle those (issue #1134)
|
# requests, if we ever want to handle those (issue #1134)
|
||||||
body = cherrypy.request.body.read()
|
body = cherrypy.request.body.read()
|
||||||
|
|
||||||
# Verify content type for binary data
|
|
||||||
content_type = cherrypy.request.headers.get('content-type')
|
|
||||||
if binary and content_type:
|
|
||||||
if content_type != "application/octet-stream":
|
|
||||||
raise cherrypy.HTTPError("400", "Content type must be "
|
|
||||||
"application/octet-stream for "
|
|
||||||
"binary data, not " + content_type)
|
|
||||||
|
|
||||||
# Note that non-binary data is *not* decoded from bytes to string,
|
|
||||||
# but rather passed directly to stream_insert.
|
|
||||||
|
|
||||||
# Check path and get layout
|
# Check path and get layout
|
||||||
if len(self.db.stream_list(path=path)) != 1:
|
streams = self.db.stream_list(path = path)
|
||||||
raise cherrypy.HTTPError("404", "No such stream: " + path)
|
if len(streams) != 1:
|
||||||
|
raise cherrypy.HTTPError("404 Not Found", "No such stream")
|
||||||
|
layout = streams[0][1]
|
||||||
|
|
||||||
|
# Parse the input data
|
||||||
|
try:
|
||||||
|
parser = nilmdb.server.layout.Parser(layout)
|
||||||
|
parser.parse(body)
|
||||||
|
except nilmdb.server.layout.ParserError as e:
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request",
|
||||||
|
"error parsing input data: " +
|
||||||
|
e.message)
|
||||||
|
|
||||||
# Check limits
|
# Check limits
|
||||||
(start, end) = self._get_times(start, end)
|
start = float(start)
|
||||||
|
end = float(end)
|
||||||
|
if start >= end:
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request",
|
||||||
|
"start must precede end")
|
||||||
|
if parser.min_timestamp is not None and parser.min_timestamp < start:
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request", "Data timestamp " +
|
||||||
|
repr(parser.min_timestamp) +
|
||||||
|
" < start time " + repr(start))
|
||||||
|
if parser.max_timestamp is not None and parser.max_timestamp >= end:
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request", "Data timestamp " +
|
||||||
|
repr(parser.max_timestamp) +
|
||||||
|
" >= end time " + repr(end))
|
||||||
|
|
||||||
# Pass the data directly to nilmdb, which will parse it and
|
# Now do the nilmdb insert, passing it the parser full of data.
|
||||||
# raise a ValueError if there are any problems.
|
try:
|
||||||
self.db.stream_insert(path, start, end, body, binary)
|
self.db.stream_insert(path, start, end, parser.data)
|
||||||
|
except NilmDBError as e:
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request", e.message)
|
||||||
|
|
||||||
# Done
|
# Done
|
||||||
return
|
return
|
||||||
|
@ -261,73 +297,62 @@ class Stream(NilmApp):
|
||||||
# /stream/remove?path=/newton/prep
|
# /stream/remove?path=/newton/prep
|
||||||
# /stream/remove?path=/newton/prep&start=1234567890.0&end=1234567899.0
|
# /stream/remove?path=/newton/prep&start=1234567890.0&end=1234567899.0
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@cherrypy.tools.json_in()
|
@cherrypy.tools.json_out()
|
||||||
|
@exception_to_httperror(NilmDBError)
|
||||||
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
||||||
@chunked_response
|
|
||||||
@response_type("application/x-json-stream")
|
|
||||||
def remove(self, path, start = None, end = None):
|
def remove(self, path, start = None, end = None):
|
||||||
"""
|
"""
|
||||||
Remove data from the backend database. Removes all data in
|
Remove data from the backend database. Removes all data in
|
||||||
the interval [start, end).
|
the interval [start, end). Returns the number of data points
|
||||||
|
removed.
|
||||||
Returns the number of data points removed. Since this is a potentially
|
|
||||||
long-running operation, multiple numbers may be returned as the
|
|
||||||
data gets removed from the backend database. The total number of
|
|
||||||
points removed is the sum of all of these numbers.
|
|
||||||
"""
|
"""
|
||||||
(start, end) = self._get_times(start, end)
|
if start is not None:
|
||||||
|
start = float(start)
|
||||||
if len(self.db.stream_list(path=path)) != 1:
|
if end is not None:
|
||||||
raise cherrypy.HTTPError("404", "No such stream: " + path)
|
end = float(end)
|
||||||
|
if start is not None and end is not None:
|
||||||
def content(start, end):
|
if start >= end:
|
||||||
# Note: disable chunked responses to see tracebacks from here.
|
raise cherrypy.HTTPError("400 Bad Request",
|
||||||
while True:
|
"start must precede end")
|
||||||
(removed, restart) = self.db.stream_remove(path, start, end)
|
return self.db.stream_remove(path, start, end)
|
||||||
response = json.dumps(removed) + "\r\n"
|
|
||||||
yield response.encode('utf-8')
|
|
||||||
if restart is None:
|
|
||||||
break
|
|
||||||
start = restart
|
|
||||||
return content(start, end)
|
|
||||||
|
|
||||||
# /stream/intervals?path=/newton/prep
|
# /stream/intervals?path=/newton/prep
|
||||||
# /stream/intervals?path=/newton/prep&start=1234567890.0&end=1234567899.0
|
# /stream/intervals?path=/newton/prep&start=1234567890.0&end=1234567899.0
|
||||||
# /stream/intervals?path=/newton/prep&diffpath=/newton/prep2
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@chunked_response
|
@chunked_response
|
||||||
@response_type("application/x-json-stream")
|
@response_type("application/x-json-stream")
|
||||||
def intervals(self, path, start=None, end=None, diffpath=None):
|
def intervals(self, path, start = None, end = None):
|
||||||
"""
|
"""
|
||||||
Get intervals from backend database. Streams the resulting
|
Get intervals from backend database. Streams the resulting
|
||||||
intervals as JSON strings separated by CR LF pairs. This may
|
intervals as JSON strings separated by CR LF pairs. This may
|
||||||
make multiple requests to the nilmdb backend to avoid causing
|
make multiple requests to the nilmdb backend to avoid causing
|
||||||
it to block for too long.
|
it to block for too long.
|
||||||
|
|
||||||
Returns intervals between 'start' and 'end' belonging to
|
|
||||||
'path'. If 'diff' is provided, the set-difference between
|
|
||||||
intervals in 'path' and intervals in 'diffpath' are
|
|
||||||
returned instead.
|
|
||||||
|
|
||||||
Note that the response type is the non-standard
|
Note that the response type is the non-standard
|
||||||
'application/x-json-stream' for lack of a better option.
|
'application/x-json-stream' for lack of a better option.
|
||||||
"""
|
"""
|
||||||
(start, end) = self._get_times(start, end)
|
if start is not None:
|
||||||
|
start = float(start)
|
||||||
|
if end is not None:
|
||||||
|
end = float(end)
|
||||||
|
|
||||||
if len(self.db.stream_list(path=path)) != 1:
|
if start is not None and end is not None:
|
||||||
raise cherrypy.HTTPError("404", "No such stream: " + path)
|
if start >= end:
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request",
|
||||||
|
"start must precede end")
|
||||||
|
|
||||||
if diffpath and len(self.db.stream_list(path=diffpath)) != 1:
|
streams = self.db.stream_list(path = path)
|
||||||
raise cherrypy.HTTPError("404", "No such stream: " + diffpath)
|
if len(streams) != 1:
|
||||||
|
raise cherrypy.HTTPError("404 Not Found", "No such stream")
|
||||||
|
|
||||||
|
@workaround_cp_bug_1200
|
||||||
def content(start, end):
|
def content(start, end):
|
||||||
# Note: disable chunked responses to see tracebacks from here.
|
# Note: disable chunked responses to see tracebacks from here.
|
||||||
while True:
|
while True:
|
||||||
(ints, restart) = self.db.stream_intervals(path, start, end,
|
(ints, restart) = self.db.stream_intervals(path, start, end)
|
||||||
diffpath)
|
|
||||||
response = ''.join([ json.dumps(i) + "\r\n" for i in ints ])
|
response = ''.join([ json.dumps(i) + "\r\n" for i in ints ])
|
||||||
yield response.encode('utf-8')
|
yield response
|
||||||
if restart is None:
|
if restart == 0:
|
||||||
break
|
break
|
||||||
start = restart
|
start = restart
|
||||||
return content(start, end)
|
return content(start, end)
|
||||||
|
@ -335,87 +360,77 @@ class Stream(NilmApp):
|
||||||
# /stream/extract?path=/newton/prep&start=1234567890.0&end=1234567899.0
|
# /stream/extract?path=/newton/prep&start=1234567890.0&end=1234567899.0
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@chunked_response
|
@chunked_response
|
||||||
def extract(self, path, start=None, end=None,
|
@response_type("text/plain")
|
||||||
count=False, markup=False, binary=False):
|
def extract(self, path, start = None, end = None, count = False):
|
||||||
"""
|
"""
|
||||||
Extract data from backend database. Streams the resulting
|
Extract data from backend database. Streams the resulting
|
||||||
entries as ASCII text lines separated by newlines. This may
|
entries as ASCII text lines separated by newlines. This may
|
||||||
make multiple requests to the nilmdb backend to avoid causing
|
make multiple requests to the nilmdb backend to avoid causing
|
||||||
it to block for too long.
|
it to block for too long.
|
||||||
|
|
||||||
If 'count' is True, returns a count rather than actual data.
|
Add count=True to return a count rather than actual data.
|
||||||
|
|
||||||
If 'markup' is True, adds comments to the stream denoting each
|
|
||||||
interval's start and end timestamp.
|
|
||||||
|
|
||||||
If 'binary' is True, return raw binary data, rather than lines
|
|
||||||
of ASCII-formatted data. Raw binary data is always
|
|
||||||
little-endian and matches the database types (including an
|
|
||||||
int64 timestamp).
|
|
||||||
"""
|
"""
|
||||||
binary = bool_param(binary)
|
if start is not None:
|
||||||
markup = bool_param(markup)
|
start = float(start)
|
||||||
count = bool_param(count)
|
if end is not None:
|
||||||
|
end = float(end)
|
||||||
|
|
||||||
(start, end) = self._get_times(start, end)
|
# Check parameters
|
||||||
|
if start is not None and end is not None:
|
||||||
|
if start >= end:
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request",
|
||||||
|
"start must precede end")
|
||||||
|
|
||||||
# Check path and get layout
|
# Check path and get layout
|
||||||
if len(self.db.stream_list(path=path)) != 1:
|
streams = self.db.stream_list(path = path)
|
||||||
raise cherrypy.HTTPError("404", "No such stream: " + path)
|
if len(streams) != 1:
|
||||||
|
raise cherrypy.HTTPError("404 Not Found", "No such stream")
|
||||||
|
layout = streams[0][1]
|
||||||
|
|
||||||
if binary:
|
# Get formatter
|
||||||
content_type = "application/octet-stream"
|
formatter = nilmdb.server.layout.Formatter(layout)
|
||||||
if markup or count:
|
|
||||||
raise cherrypy.HTTPError("400", "can't mix binary and "
|
|
||||||
"markup or count modes")
|
|
||||||
else:
|
|
||||||
content_type = "text/plain"
|
|
||||||
cherrypy.response.headers['Content-Type'] = content_type
|
|
||||||
|
|
||||||
def content(start, end):
|
@workaround_cp_bug_1200
|
||||||
|
def content(start, end, count):
|
||||||
# Note: disable chunked responses to see tracebacks from here.
|
# Note: disable chunked responses to see tracebacks from here.
|
||||||
if count:
|
if count:
|
||||||
matched = self.db.stream_extract(path, start, end,
|
matched = self.db.stream_extract(path, start, end, count)
|
||||||
count=True)
|
yield sprintf("%d\n", matched)
|
||||||
yield sprintf(b"%d\n", matched)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
(data, restart) = self.db.stream_extract(
|
(data, restart) = self.db.stream_extract(path, start, end)
|
||||||
path, start, end, count=False,
|
|
||||||
markup=markup, binary=binary)
|
|
||||||
yield data
|
|
||||||
|
|
||||||
if restart is None:
|
# Format the data and yield it
|
||||||
|
yield formatter.format(data)
|
||||||
|
|
||||||
|
if restart == 0:
|
||||||
return
|
return
|
||||||
start = restart
|
start = restart
|
||||||
return content(start, end)
|
return content(start, end, count)
|
||||||
|
|
||||||
|
class Exiter(object):
|
||||||
class Exiter():
|
|
||||||
"""App that exits the server, for testing"""
|
"""App that exits the server, for testing"""
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def index(self):
|
def index(self):
|
||||||
cherrypy.response.headers['Content-Type'] = 'text/plain'
|
cherrypy.response.headers['Content-Type'] = 'text/plain'
|
||||||
|
|
||||||
def content():
|
def content():
|
||||||
yield b'Exiting by request'
|
yield 'Exiting by request'
|
||||||
raise SystemExit
|
raise SystemExit
|
||||||
|
|
||||||
return content()
|
return content()
|
||||||
index._cp_config = { 'response.stream': True }
|
index._cp_config = { 'response.stream': True }
|
||||||
|
|
||||||
|
class Server(object):
|
||||||
class Server():
|
|
||||||
def __init__(self, db, host = '127.0.0.1', port = 8080,
|
def __init__(self, db, host = '127.0.0.1', port = 8080,
|
||||||
stoppable = False, # whether /exit URL exists
|
stoppable = False, # whether /exit URL exists
|
||||||
|
embedded = True, # hide diagnostics and output, etc
|
||||||
fast_shutdown = False, # don't wait for clients to disconn.
|
fast_shutdown = False, # don't wait for clients to disconn.
|
||||||
force_traceback=False, # include traceback in all errors
|
force_traceback = False # include traceback in all errors
|
||||||
basepath='', # base URL path for cherrypy.tree
|
|
||||||
):
|
):
|
||||||
# Save server version, just for verification during tests
|
# Save server version, just for verification during tests
|
||||||
self.version = nilmdb.__version__
|
self.version = nilmdb.__version__
|
||||||
|
|
||||||
|
self.embedded = embedded
|
||||||
self.db = db
|
self.db = db
|
||||||
if not getattr(db, "_thread_safe", None):
|
if not getattr(db, "_thread_safe", None):
|
||||||
raise KeyError("Database object " + str(db) + " doesn't claim "
|
raise KeyError("Database object " + str(db) + " doesn't claim "
|
||||||
|
@ -425,12 +440,13 @@ class Server():
|
||||||
|
|
||||||
# Build up global server configuration
|
# Build up global server configuration
|
||||||
cherrypy.config.update({
|
cherrypy.config.update({
|
||||||
'environment': 'embedded',
|
|
||||||
'server.socket_host': host,
|
'server.socket_host': host,
|
||||||
'server.socket_port': port,
|
'server.socket_port': port,
|
||||||
'engine.autoreload.on': False,
|
'engine.autoreload_on': False,
|
||||||
'server.max_request_body_size': 8*1024*1024,
|
'server.max_request_body_size': 8*1024*1024,
|
||||||
})
|
})
|
||||||
|
if self.embedded:
|
||||||
|
cherrypy.config.update({ 'environment': 'embedded' })
|
||||||
|
|
||||||
# Build up application specific configuration
|
# Build up application specific configuration
|
||||||
app_config = {}
|
app_config = {}
|
||||||
|
@ -447,12 +463,6 @@ class Server():
|
||||||
app_config.update({ 'tools.CORS_allow.on': True,
|
app_config.update({ 'tools.CORS_allow.on': True,
|
||||||
'tools.CORS_allow.methods': ['GET', 'HEAD'] })
|
'tools.CORS_allow.methods': ['GET', 'HEAD'] })
|
||||||
|
|
||||||
# Configure the 'json_in' tool to also allow other content-types
|
|
||||||
# (like x-www-form-urlencoded), and to treat JSON as a dict that
|
|
||||||
# fills requests.param.
|
|
||||||
app_config.update({'tools.json_in.force': False,
|
|
||||||
'tools.json_in.processor': json_to_request_params})
|
|
||||||
|
|
||||||
# Send tracebacks in error responses. They're hidden by the
|
# Send tracebacks in error responses. They're hidden by the
|
||||||
# error_page function for client errors (code 400-499).
|
# error_page function for client errors (code 400-499).
|
||||||
app_config.update({ 'request.show_tracebacks' : True })
|
app_config.update({ 'request.show_tracebacks' : True })
|
||||||
|
@ -469,78 +479,79 @@ class Server():
|
||||||
if stoppable:
|
if stoppable:
|
||||||
root.exit = Exiter()
|
root.exit = Exiter()
|
||||||
cherrypy.tree.apps = {}
|
cherrypy.tree.apps = {}
|
||||||
cherrypy.tree.mount(root, basepath, config={"/": app_config})
|
cherrypy.tree.mount(root, "/", config = { "/" : app_config })
|
||||||
|
|
||||||
# Shutdowns normally wait for clients to disconnect. To speed
|
# Shutdowns normally wait for clients to disconnect. To speed
|
||||||
# up tests, set fast_shutdown = True
|
# up tests, set fast_shutdown = True
|
||||||
if fast_shutdown:
|
if fast_shutdown:
|
||||||
cherrypy.server.shutdown_timeout = 0
|
# Setting timeout to 0 triggers os._exit(70) at shutdown, grr...
|
||||||
|
cherrypy.server.shutdown_timeout = 0.01
|
||||||
else:
|
else:
|
||||||
cherrypy.server.shutdown_timeout = 5
|
cherrypy.server.shutdown_timeout = 5
|
||||||
|
|
||||||
# Set up the WSGI application pointer for external programs
|
|
||||||
self.wsgi_application = cherrypy.tree
|
|
||||||
|
|
||||||
def json_error_page(self, status, message, traceback, version):
|
def json_error_page(self, status, message, traceback, version):
|
||||||
"""Return a custom error page in JSON so the client can parse it"""
|
"""Return a custom error page in JSON so the client can parse it"""
|
||||||
return json_error_page(status, message, traceback, version,
|
errordata = { "status" : status,
|
||||||
self.force_traceback)
|
"message" : message,
|
||||||
|
"traceback" : traceback }
|
||||||
|
# Don't send a traceback if the error was 400-499 (client's fault)
|
||||||
|
try:
|
||||||
|
code = int(status.split()[0])
|
||||||
|
if not self.force_traceback:
|
||||||
|
if code >= 400 and code <= 499:
|
||||||
|
errordata["traceback"] = ""
|
||||||
|
except Exception: # pragma: no cover
|
||||||
|
pass
|
||||||
|
# Override the response type, which was previously set to text/html
|
||||||
|
cherrypy.serving.response.headers['Content-Type'] = (
|
||||||
|
"application/json;charset=utf-8" )
|
||||||
|
# Undo the HTML escaping that cherrypy's get_error_page function applies
|
||||||
|
# (cherrypy issue 1135)
|
||||||
|
for k, v in errordata.iteritems():
|
||||||
|
v = v.replace("<","<")
|
||||||
|
v = v.replace(">",">")
|
||||||
|
v = v.replace("&","&")
|
||||||
|
errordata[k] = v
|
||||||
|
return json.dumps(errordata, separators=(',',':'))
|
||||||
|
|
||||||
def start(self, blocking = False, event = None):
|
def start(self, blocking = False, event = None):
|
||||||
cherrypy_start(blocking, event)
|
|
||||||
|
if not self.embedded: # pragma: no cover
|
||||||
|
# Handle signals nicely
|
||||||
|
if hasattr(cherrypy.engine, "signal_handler"):
|
||||||
|
cherrypy.engine.signal_handler.subscribe()
|
||||||
|
if hasattr(cherrypy.engine, "console_control_handler"):
|
||||||
|
cherrypy.engine.console_control_handler.subscribe()
|
||||||
|
|
||||||
|
# Cherrypy stupidly calls os._exit(70) when it can't bind the
|
||||||
|
# port. At least try to print a reasonable error and continue
|
||||||
|
# in this case, rather than just dying silently (as we would
|
||||||
|
# otherwise do in embedded mode)
|
||||||
|
real_exit = os._exit
|
||||||
|
def fake_exit(code): # pragma: no cover
|
||||||
|
if code == os.EX_SOFTWARE:
|
||||||
|
fprintf(sys.stderr, "error: CherryPy called os._exit!\n")
|
||||||
|
else:
|
||||||
|
real_exit(code)
|
||||||
|
os._exit = fake_exit
|
||||||
|
cherrypy.engine.start()
|
||||||
|
os._exit = real_exit
|
||||||
|
|
||||||
|
# Signal that the engine has started successfully
|
||||||
|
if event is not None:
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
if blocking:
|
||||||
|
try:
|
||||||
|
cherrypy.engine.wait(cherrypy.engine.states.EXITING,
|
||||||
|
interval = 0.1, channel = 'main')
|
||||||
|
except (KeyboardInterrupt, IOError): # pragma: no cover
|
||||||
|
cherrypy.engine.log('Keyboard Interrupt: shutting down bus')
|
||||||
|
cherrypy.engine.exit()
|
||||||
|
except SystemExit: # pragma: no cover
|
||||||
|
cherrypy.engine.log('SystemExit raised: shutting down bus')
|
||||||
|
cherrypy.engine.exit()
|
||||||
|
raise
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
cherrypy_stop()
|
cherrypy.engine.exit()
|
||||||
|
|
||||||
|
|
||||||
# Use a single global nilmdb.server.NilmDB and nilmdb.server.Server
|
|
||||||
# instance since the database can only be opened once. For this to
|
|
||||||
# work, the web server must use only a single process and single
|
|
||||||
# Python interpreter. Multiple threads are OK.
|
|
||||||
_wsgi_server = None
|
|
||||||
|
|
||||||
|
|
||||||
def wsgi_application(dbpath, basepath):
|
|
||||||
"""Return a WSGI application object with a database at the
|
|
||||||
specified path.
|
|
||||||
|
|
||||||
'dbpath' is a filesystem location, e.g. /home/nilm/db
|
|
||||||
|
|
||||||
'basepath' is the URL path of the application base, which
|
|
||||||
is the same as the first argument to Apache's WSGIScriptAlias
|
|
||||||
directive.
|
|
||||||
"""
|
|
||||||
def application(environ, start_response):
|
|
||||||
global _wsgi_server
|
|
||||||
if _wsgi_server is None:
|
|
||||||
# Try to start the server
|
|
||||||
try:
|
|
||||||
db = nilmdb.utils.serializer_proxy(
|
|
||||||
nilmdb.server.NilmDB)(dbpath)
|
|
||||||
_wsgi_server = nilmdb.server.Server(
|
|
||||||
db, basepath=basepath.rstrip('/'))
|
|
||||||
except Exception:
|
|
||||||
# Build an error message on failure
|
|
||||||
import pprint
|
|
||||||
err = sprintf("Initializing database at path '%s' failed:\n\n",
|
|
||||||
dbpath)
|
|
||||||
err += traceback.format_exc()
|
|
||||||
import pwd
|
|
||||||
import grp
|
|
||||||
err += sprintf("\nRunning as: uid=%d (%s), gid=%d (%s) "
|
|
||||||
"on host %s, pid %d\n",
|
|
||||||
os.getuid(), pwd.getpwuid(os.getuid())[0],
|
|
||||||
os.getgid(), grp.getgrgid(os.getgid())[0],
|
|
||||||
socket.gethostname(), os.getpid())
|
|
||||||
err += sprintf("\nEnvironment:\n%s\n", pprint.pformat(environ))
|
|
||||||
if _wsgi_server is None:
|
|
||||||
# Serve up the error with our own mini WSGI app.
|
|
||||||
err_b = err.encode('utf-8')
|
|
||||||
headers = [('Content-type', 'text/plain; charset=utf-8'),
|
|
||||||
('Content-length', str(len(err_b)))]
|
|
||||||
start_response("500 Internal Server Error", headers)
|
|
||||||
return [err_b]
|
|
||||||
|
|
||||||
# Call the normal application
|
|
||||||
return _wsgi_server.wsgi_application(environ, start_response)
|
|
||||||
return application
|
|
||||||
|
|
|
@ -1,225 +0,0 @@
|
||||||
"""Miscellaneous decorators and other helpers for running a CherryPy
|
|
||||||
server"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
import decorator
|
|
||||||
import functools
|
|
||||||
import threading
|
|
||||||
|
|
||||||
import cherrypy
|
|
||||||
|
|
||||||
|
|
||||||
# Helper to parse parameters into booleans
|
|
||||||
def bool_param(s):
|
|
||||||
"""Return a bool indicating whether parameter 's' was True or False,
|
|
||||||
supporting a few different types for 's'."""
|
|
||||||
try:
|
|
||||||
ss = s.lower()
|
|
||||||
if ss in ["0", "false", "f", "no", "n"]:
|
|
||||||
return False
|
|
||||||
if ss in ["1", "true", "t", "yes", "y"]:
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
return bool(s)
|
|
||||||
raise cherrypy.HTTPError("400 Bad Request",
|
|
||||||
"can't parse parameter: " + ss)
|
|
||||||
|
|
||||||
|
|
||||||
# Decorators
|
|
||||||
def chunked_response(func):
|
|
||||||
"""Decorator to enable chunked responses."""
|
|
||||||
# Set this to False to get better tracebacks from some requests
|
|
||||||
# (/stream/extract, /stream/intervals).
|
|
||||||
func._cp_config = {'response.stream': True}
|
|
||||||
return func
|
|
||||||
|
|
||||||
|
|
||||||
def response_type(content_type):
|
|
||||||
"""Return a decorator-generating function that sets the
|
|
||||||
response type to the specified string."""
|
|
||||||
def wrapper(func, *args, **kwargs):
|
|
||||||
cherrypy.response.headers['Content-Type'] = content_type
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
return decorator.decorator(wrapper)
|
|
||||||
|
|
||||||
|
|
||||||
def exception_to_httperror(*expected):
|
|
||||||
"""Return a decorator-generating function that catches expected
|
|
||||||
errors and throws a HTTPError describing it instead.
|
|
||||||
|
|
||||||
@exception_to_httperror(NilmDBError, ValueError)
|
|
||||||
def foo():
|
|
||||||
pass
|
|
||||||
"""
|
|
||||||
def wrapper(func, *args, **kwargs):
|
|
||||||
exc_info = None
|
|
||||||
try:
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
except expected:
|
|
||||||
# Re-raise it, but maintain the original traceback
|
|
||||||
exc_info = sys.exc_info()
|
|
||||||
new_exc = cherrypy.HTTPError("400 Bad Request", str(exc_info[1]))
|
|
||||||
raise new_exc.with_traceback(exc_info[2])
|
|
||||||
finally:
|
|
||||||
del exc_info
|
|
||||||
# We need to preserve the function's argspecs for CherryPy to
|
|
||||||
# handle argument errors correctly. Decorator.decorator takes
|
|
||||||
# care of that.
|
|
||||||
return decorator.decorator(wrapper)
|
|
||||||
|
|
||||||
|
|
||||||
# Custom CherryPy tools
|
|
||||||
def CORS_allow(methods):
|
|
||||||
"""This does several things:
|
|
||||||
|
|
||||||
Handles CORS preflight requests.
|
|
||||||
Adds Allow: header to all requests.
|
|
||||||
Raise 405 if request.method not in method.
|
|
||||||
|
|
||||||
It is similar to cherrypy.tools.allow, with the CORS stuff added.
|
|
||||||
|
|
||||||
Add this to CherryPy with:
|
|
||||||
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
|
|
||||||
"""
|
|
||||||
request = cherrypy.request.headers
|
|
||||||
response = cherrypy.response.headers
|
|
||||||
|
|
||||||
if not isinstance(methods, (tuple, list)):
|
|
||||||
methods = [methods]
|
|
||||||
methods = [m.upper() for m in methods if m]
|
|
||||||
if not methods:
|
|
||||||
methods = ['GET', 'HEAD']
|
|
||||||
elif 'GET' in methods and 'HEAD' not in methods:
|
|
||||||
methods.append('HEAD')
|
|
||||||
response['Allow'] = ', '.join(methods)
|
|
||||||
|
|
||||||
# Allow all origins
|
|
||||||
if 'Origin' in request:
|
|
||||||
response['Access-Control-Allow-Origin'] = request['Origin']
|
|
||||||
|
|
||||||
# If it's a CORS request, send response.
|
|
||||||
request_method = request.get("Access-Control-Request-Method", None)
|
|
||||||
request_headers = request.get("Access-Control-Request-Headers", None)
|
|
||||||
if (cherrypy.request.method == "OPTIONS" and
|
|
||||||
request_method and request_headers):
|
|
||||||
response['Access-Control-Allow-Headers'] = request_headers
|
|
||||||
response['Access-Control-Allow-Methods'] = ', '.join(methods)
|
|
||||||
# Try to stop further processing and return a 200 OK
|
|
||||||
cherrypy.response.status = "200 OK"
|
|
||||||
cherrypy.response.body = b""
|
|
||||||
cherrypy.request.handler = lambda: ""
|
|
||||||
return
|
|
||||||
|
|
||||||
# Reject methods that were not explicitly allowed
|
|
||||||
if cherrypy.request.method not in methods:
|
|
||||||
raise cherrypy.HTTPError(405)
|
|
||||||
|
|
||||||
|
|
||||||
# Helper for json_in tool to process JSON data into normal request
|
|
||||||
# parameters.
|
|
||||||
def json_to_request_params(body):
|
|
||||||
cherrypy.lib.jsontools.json_processor(body)
|
|
||||||
if not isinstance(cherrypy.request.json, dict):
|
|
||||||
raise cherrypy.HTTPError(415)
|
|
||||||
cherrypy.request.params.update(cherrypy.request.json)
|
|
||||||
|
|
||||||
|
|
||||||
# Used as an "error_page.default" handler
|
|
||||||
def json_error_page(status, message, traceback, version,
|
|
||||||
force_traceback=False):
|
|
||||||
"""Return a custom error page in JSON so the client can parse it"""
|
|
||||||
errordata = {"status": status,
|
|
||||||
"message": message,
|
|
||||||
"version": version,
|
|
||||||
"traceback": traceback}
|
|
||||||
# Don't send a traceback if the error was 400-499 (client's fault)
|
|
||||||
code = int(status.split()[0])
|
|
||||||
if not force_traceback:
|
|
||||||
if 400 <= code <= 499:
|
|
||||||
errordata["traceback"] = ""
|
|
||||||
# Override the response type, which was previously set to text/html
|
|
||||||
cherrypy.serving.response.headers['Content-Type'] = (
|
|
||||||
"application/json;charset=utf-8")
|
|
||||||
# Undo the HTML escaping that cherrypy's get_error_page function applies
|
|
||||||
# (cherrypy issue 1135)
|
|
||||||
for k, v in errordata.items():
|
|
||||||
v = v.replace("<", "<")
|
|
||||||
v = v.replace(">", ">")
|
|
||||||
v = v.replace("&", "&")
|
|
||||||
errordata[k] = v
|
|
||||||
return json.dumps(errordata, separators=(',', ':'))
|
|
||||||
|
|
||||||
|
|
||||||
class CherryPyExit(SystemExit):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def cherrypy_patch_exit():
|
|
||||||
# Cherrypy stupidly calls os._exit(70) when it can't bind the port
|
|
||||||
# and exits. Instead of that, raise a CherryPyExit (derived from
|
|
||||||
# SystemExit). This exception may not make it back up to the caller
|
|
||||||
# due to internal thread use in the CherryPy engine, but there should
|
|
||||||
# be at least some indication that it happened.
|
|
||||||
bus = cherrypy.process.wspbus.bus
|
|
||||||
if "_patched_exit" in bus.__dict__:
|
|
||||||
return
|
|
||||||
bus._patched_exit = True
|
|
||||||
|
|
||||||
def patched_exit(orig):
|
|
||||||
real_exit = os._exit
|
|
||||||
|
|
||||||
def fake_exit(code):
|
|
||||||
raise CherryPyExit(code)
|
|
||||||
os._exit = fake_exit
|
|
||||||
try:
|
|
||||||
orig()
|
|
||||||
finally:
|
|
||||||
os._exit = real_exit
|
|
||||||
bus.exit = functools.partial(patched_exit, bus.exit)
|
|
||||||
|
|
||||||
# A behavior change in Python 3.8 means that some thread exceptions,
|
|
||||||
# derived from SystemExit, now print tracebacks where they didn't
|
|
||||||
# used to: https://bugs.python.org/issue1230540
|
|
||||||
# Install a thread exception hook that ignores CherryPyExit;
|
|
||||||
# to make this match the behavior where we didn't set
|
|
||||||
# threading.excepthook, we also need to ignore SystemExit.
|
|
||||||
def hook(args):
|
|
||||||
if args.exc_type == CherryPyExit or args.exc_type == SystemExit:
|
|
||||||
return
|
|
||||||
sys.excepthook(args.exc_type, args.exc_value,
|
|
||||||
args.exc_traceback) # pragma: no cover
|
|
||||||
threading.excepthook = hook
|
|
||||||
|
|
||||||
|
|
||||||
# Start/stop CherryPy standalone server
|
|
||||||
def cherrypy_start(blocking=False, event=False):
|
|
||||||
"""Start the CherryPy server, handling errors and signals
|
|
||||||
somewhat gracefully."""
|
|
||||||
|
|
||||||
cherrypy_patch_exit()
|
|
||||||
|
|
||||||
# Start the server
|
|
||||||
cherrypy.engine.start()
|
|
||||||
|
|
||||||
# Signal that the engine has started successfully
|
|
||||||
if event is not None:
|
|
||||||
event.set()
|
|
||||||
|
|
||||||
if blocking:
|
|
||||||
try:
|
|
||||||
cherrypy.engine.wait(cherrypy.engine.states.EXITING,
|
|
||||||
interval=0.1, channel='main')
|
|
||||||
except (KeyboardInterrupt, IOError):
|
|
||||||
cherrypy.engine.log('Keyboard Interrupt: shutting down')
|
|
||||||
cherrypy.engine.exit()
|
|
||||||
except SystemExit:
|
|
||||||
cherrypy.engine.log('SystemExit raised: shutting down')
|
|
||||||
cherrypy.engine.exit()
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
# Stop CherryPy server
|
|
||||||
def cherrypy_stop():
|
|
||||||
cherrypy.engine.exit()
|
|
|
@ -1,16 +1,10 @@
|
||||||
"""NilmDB utilities"""
|
"""NilmDB utilities"""
|
||||||
|
|
||||||
|
|
||||||
from nilmdb.utils.timer import Timer
|
from nilmdb.utils.timer import Timer
|
||||||
|
from nilmdb.utils.iteratorizer import Iteratorizer
|
||||||
from nilmdb.utils.serializer import serializer_proxy
|
from nilmdb.utils.serializer import serializer_proxy
|
||||||
from nilmdb.utils.lrucache import lru_cache
|
from nilmdb.utils.lrucache import lru_cache
|
||||||
from nilmdb.utils.diskusage import du, human_size
|
from nilmdb.utils.diskusage import du, human_size
|
||||||
from nilmdb.utils.mustclose import must_close
|
from nilmdb.utils.mustclose import must_close
|
||||||
from nilmdb.utils import atomic
|
from nilmdb.utils import atomic
|
||||||
import nilmdb.utils.threadsafety
|
import nilmdb.utils.threadsafety
|
||||||
import nilmdb.utils.fallocate
|
|
||||||
import nilmdb.utils.time
|
|
||||||
import nilmdb.utils.iterator
|
|
||||||
import nilmdb.utils.interval
|
|
||||||
import nilmdb.utils.lock
|
|
||||||
import nilmdb.utils.sort
|
|
||||||
|
|
|
@ -2,12 +2,12 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
def replace_file(filename, content):
|
def replace_file(filename, content):
|
||||||
"""Attempt to atomically and durably replace the filename with the
|
"""Attempt to atomically and durably replace the filename with the
|
||||||
given contents"""
|
given contents. This is intended to be 'pretty good on most
|
||||||
|
OSes', but not necessarily bulletproof."""
|
||||||
|
|
||||||
newfilename = filename + b".new"
|
newfilename = filename + ".new"
|
||||||
|
|
||||||
# Write to new file, flush it
|
# Write to new file, flush it
|
||||||
with open(newfilename, "wb") as f:
|
with open(newfilename, "wb") as f:
|
||||||
|
@ -16,4 +16,11 @@ def replace_file(filename, content):
|
||||||
os.fsync(f.fileno())
|
os.fsync(f.fileno())
|
||||||
|
|
||||||
# Move new file over old one
|
# Move new file over old one
|
||||||
os.replace(newfilename, filename)
|
try:
|
||||||
|
os.rename(newfilename, filename)
|
||||||
|
except OSError: # pragma: no cover
|
||||||
|
# Some OSes might not support renaming over an existing file.
|
||||||
|
# This is definitely NOT atomic!
|
||||||
|
os.remove(filename)
|
||||||
|
os.rename(newfilename, filename)
|
||||||
|
|
||||||
|
|
710
nilmdb/utils/datetime_tz/__init__.py
Normal file
710
nilmdb/utils/datetime_tz/__init__.py
Normal file
|
@ -0,0 +1,710 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
#
|
||||||
|
# Copyright 2009 Google Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# Disable the invalid name warning as we are inheriting from a standard library
|
||||||
|
# object.
|
||||||
|
# pylint: disable-msg=C6409,W0212
|
||||||
|
|
||||||
|
"""A version of the datetime module which *cares* about timezones.
|
||||||
|
|
||||||
|
This module will never return a naive datetime object. This requires the module
|
||||||
|
know your local timezone, which it tries really hard to figure out.
|
||||||
|
|
||||||
|
You can override the detection by using the datetime.tzaware.defaulttz_set
|
||||||
|
method. It the module is unable to figure out the timezone itself this method
|
||||||
|
*must* be called before the normal module is imported. If done before importing
|
||||||
|
it can also speed up the time taken to import as the defaulttz will no longer
|
||||||
|
try and do the detection.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__author__ = "tansell@google.com (Tim Ansell)"
|
||||||
|
|
||||||
|
import calendar
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
import warnings
|
||||||
|
import dateutil.parser
|
||||||
|
import dateutil.relativedelta
|
||||||
|
import dateutil.tz
|
||||||
|
import pytz
|
||||||
|
import pytz_abbr
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
# pylint: disable-msg=C6204
|
||||||
|
import functools
|
||||||
|
except ImportError, e:
|
||||||
|
|
||||||
|
class functools(object):
|
||||||
|
"""Fake replacement for a full functools."""
|
||||||
|
|
||||||
|
# pylint: disable-msg=W0613
|
||||||
|
@staticmethod
|
||||||
|
def wraps(f, *args, **kw):
|
||||||
|
return f
|
||||||
|
|
||||||
|
|
||||||
|
# Need to patch pytz.utc to have a _utcoffset so you can normalize/localize
|
||||||
|
# using it.
|
||||||
|
pytz.utc._utcoffset = datetime.timedelta()
|
||||||
|
|
||||||
|
|
||||||
|
timedelta = datetime.timedelta
|
||||||
|
|
||||||
|
|
||||||
|
def _tzinfome(tzinfo):
|
||||||
|
"""Gets a tzinfo object from a string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tzinfo: A string (or string like) object, or a datetime.tzinfo object.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
An datetime.tzinfo object.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
UnknownTimeZoneError: If the timezone given can't be decoded.
|
||||||
|
"""
|
||||||
|
if not isinstance(tzinfo, datetime.tzinfo):
|
||||||
|
try:
|
||||||
|
tzinfo = pytz.timezone(tzinfo)
|
||||||
|
except AttributeError:
|
||||||
|
raise pytz.UnknownTimeZoneError("Unknown timezone! %s" % tzinfo)
|
||||||
|
return tzinfo
|
||||||
|
|
||||||
|
|
||||||
|
# Our "local" timezone
|
||||||
|
_localtz = None
|
||||||
|
|
||||||
|
|
||||||
|
def localtz():
|
||||||
|
"""Get the local timezone.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The localtime timezone as a tzinfo object.
|
||||||
|
"""
|
||||||
|
# pylint: disable-msg=W0603
|
||||||
|
global _localtz
|
||||||
|
if _localtz is None:
|
||||||
|
_localtz = detect_timezone()
|
||||||
|
return _localtz
|
||||||
|
|
||||||
|
|
||||||
|
def localtz_set(timezone):
|
||||||
|
"""Set the local timezone."""
|
||||||
|
# pylint: disable-msg=W0603
|
||||||
|
global _localtz
|
||||||
|
_localtz = _tzinfome(timezone)
|
||||||
|
|
||||||
|
|
||||||
|
def detect_timezone():
|
||||||
|
"""Try and detect the timezone that Python is currently running in.
|
||||||
|
|
||||||
|
We have a bunch of different methods for trying to figure this out (listed in
|
||||||
|
order they are attempted).
|
||||||
|
* Try TZ environment variable.
|
||||||
|
* Try and find /etc/timezone file (with timezone name).
|
||||||
|
* Try and find /etc/localtime file (with timezone data).
|
||||||
|
* Try and match a TZ to the current dst/offset/shortname.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The detected local timezone as a tzinfo object
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
pytz.UnknownTimeZoneError: If it was unable to detect a timezone.
|
||||||
|
"""
|
||||||
|
# First we try the TZ variable
|
||||||
|
tz = _detect_timezone_environ()
|
||||||
|
if tz is not None:
|
||||||
|
return tz
|
||||||
|
|
||||||
|
# Second we try /etc/timezone and use the value in that
|
||||||
|
tz = _detect_timezone_etc_timezone()
|
||||||
|
if tz is not None:
|
||||||
|
return tz
|
||||||
|
|
||||||
|
# Next we try and see if something matches the tzinfo in /etc/localtime
|
||||||
|
tz = _detect_timezone_etc_localtime()
|
||||||
|
if tz is not None:
|
||||||
|
return tz
|
||||||
|
|
||||||
|
# Next we try and use a similiar method to what PHP does.
|
||||||
|
# We first try to search on time.tzname, time.timezone, time.daylight to
|
||||||
|
# match a pytz zone.
|
||||||
|
warnings.warn("Had to fall back to worst detection method (the 'PHP' "
|
||||||
|
"method).")
|
||||||
|
|
||||||
|
tz = _detect_timezone_php()
|
||||||
|
if tz is not None:
|
||||||
|
return tz
|
||||||
|
|
||||||
|
raise pytz.UnknownTimeZoneError("Unable to detect your timezone!")
|
||||||
|
|
||||||
|
|
||||||
|
def _detect_timezone_environ():
|
||||||
|
if "TZ" in os.environ:
|
||||||
|
try:
|
||||||
|
return pytz.timezone(os.environ["TZ"])
|
||||||
|
except (IOError, pytz.UnknownTimeZoneError):
|
||||||
|
warnings.warn("You provided a TZ environment value (%r) we did not "
|
||||||
|
"understand!" % os.environ["TZ"])
|
||||||
|
|
||||||
|
|
||||||
|
def _detect_timezone_etc_timezone():
|
||||||
|
if os.path.exists("/etc/timezone"):
|
||||||
|
try:
|
||||||
|
tz = file("/etc/timezone").read().strip()
|
||||||
|
try:
|
||||||
|
return pytz.timezone(tz)
|
||||||
|
except (IOError, pytz.UnknownTimeZoneError), ei:
|
||||||
|
warnings.warn("Your /etc/timezone file references a timezone (%r) that"
|
||||||
|
" is not valid (%r)." % (tz, ei))
|
||||||
|
|
||||||
|
# Problem reading the /etc/timezone file
|
||||||
|
except IOError, eo:
|
||||||
|
warnings.warn("Could not access your /etc/timezone file: %s" % eo)
|
||||||
|
|
||||||
|
|
||||||
|
def _detect_timezone_etc_localtime():
|
||||||
|
matches = []
|
||||||
|
if os.path.exists("/etc/localtime"):
|
||||||
|
localtime = pytz.tzfile.build_tzinfo("/etc/localtime",
|
||||||
|
file("/etc/localtime"))
|
||||||
|
|
||||||
|
# See if we can find a "Human Name" for this..
|
||||||
|
for tzname in pytz.all_timezones:
|
||||||
|
tz = _tzinfome(tzname)
|
||||||
|
|
||||||
|
if dir(tz) != dir(localtime):
|
||||||
|
continue
|
||||||
|
|
||||||
|
for attrib in dir(tz):
|
||||||
|
# Ignore functions and specials
|
||||||
|
if callable(getattr(tz, attrib)) or attrib.startswith("__"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# This will always be different
|
||||||
|
if attrib == "zone" or attrib == "_tzinfos":
|
||||||
|
continue
|
||||||
|
|
||||||
|
if getattr(tz, attrib) != getattr(localtime, attrib):
|
||||||
|
break
|
||||||
|
|
||||||
|
# We get here iff break didn't happen, i.e. no meaningful attributes
|
||||||
|
# differ between tz and localtime
|
||||||
|
else:
|
||||||
|
matches.append(tzname)
|
||||||
|
|
||||||
|
if len(matches) == 1:
|
||||||
|
return _tzinfome(matches[0])
|
||||||
|
else:
|
||||||
|
# Warn the person about this!
|
||||||
|
warning = "Could not get a human name for your timezone: "
|
||||||
|
if len(matches) > 1:
|
||||||
|
warning += ("We detected multiple matches for your /etc/localtime. "
|
||||||
|
"(Matches where %s)" % matches)
|
||||||
|
return _tzinfome(matches[0])
|
||||||
|
else:
|
||||||
|
warning += "We detected no matches for your /etc/localtime."
|
||||||
|
warnings.warn(warning)
|
||||||
|
|
||||||
|
# Register /etc/localtime as the timezone loaded.
|
||||||
|
pytz._tzinfo_cache['/etc/localtime'] = localtime
|
||||||
|
return localtime
|
||||||
|
|
||||||
|
|
||||||
|
def _detect_timezone_php():
|
||||||
|
tomatch = (time.tzname[0], time.timezone, time.daylight)
|
||||||
|
now = datetime.datetime.now()
|
||||||
|
|
||||||
|
matches = []
|
||||||
|
for tzname in pytz.all_timezones:
|
||||||
|
try:
|
||||||
|
tz = pytz.timezone(tzname)
|
||||||
|
except IOError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
indst = tz.localize(now).timetuple()[-1]
|
||||||
|
|
||||||
|
if tomatch == (tz._tzname, -tz._utcoffset.seconds, indst):
|
||||||
|
matches.append(tzname)
|
||||||
|
|
||||||
|
# pylint: disable-msg=W0704
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if len(matches) > 1:
|
||||||
|
warnings.warn("We detected multiple matches for the timezone, choosing "
|
||||||
|
"the first %s. (Matches where %s)" % (matches[0], matches))
|
||||||
|
return pytz.timezone(matches[0])
|
||||||
|
|
||||||
|
|
||||||
|
class datetime_tz(datetime.datetime):
|
||||||
|
"""An extension of the inbuilt datetime adding more functionality.
|
||||||
|
|
||||||
|
The extra functionality includes:
|
||||||
|
* Partial parsing support (IE 2006/02/30 matches %Y/%M/%D %H:%M)
|
||||||
|
* Full integration with pytz (just give it the string of the timezone!)
|
||||||
|
* Proper support for going to/from Unix timestamps (which are in UTC!).
|
||||||
|
"""
|
||||||
|
__slots__ = ["is_dst"]
|
||||||
|
|
||||||
|
def __new__(cls, *args, **kw):
|
||||||
|
args = list(args)
|
||||||
|
if not args:
|
||||||
|
raise TypeError("Not enough arguments given.")
|
||||||
|
|
||||||
|
# See if we are given a tzinfo object...
|
||||||
|
tzinfo = None
|
||||||
|
if isinstance(args[-1], (datetime.tzinfo, basestring)):
|
||||||
|
tzinfo = _tzinfome(args.pop(-1))
|
||||||
|
elif kw.get("tzinfo", None) is not None:
|
||||||
|
tzinfo = _tzinfome(kw.pop("tzinfo"))
|
||||||
|
|
||||||
|
# Create a datetime object if we don't have one
|
||||||
|
if isinstance(args[0], datetime.datetime):
|
||||||
|
# Convert the datetime instance to a datetime object.
|
||||||
|
newargs = (list(args[0].timetuple()[0:6]) +
|
||||||
|
[args[0].microsecond, args[0].tzinfo])
|
||||||
|
dt = datetime.datetime(*newargs)
|
||||||
|
|
||||||
|
if tzinfo is None and dt.tzinfo is None:
|
||||||
|
raise TypeError("Must specify a timezone!")
|
||||||
|
|
||||||
|
if tzinfo is not None and dt.tzinfo is not None:
|
||||||
|
raise TypeError("Can not give a timezone with timezone aware"
|
||||||
|
" datetime object! (Use localize.)")
|
||||||
|
else:
|
||||||
|
dt = datetime.datetime(*args, **kw)
|
||||||
|
|
||||||
|
if dt.tzinfo is not None:
|
||||||
|
# Re-normalize the dt object
|
||||||
|
dt = dt.tzinfo.normalize(dt)
|
||||||
|
|
||||||
|
else:
|
||||||
|
if tzinfo is None:
|
||||||
|
tzinfo = localtz()
|
||||||
|
|
||||||
|
try:
|
||||||
|
dt = tzinfo.localize(dt, is_dst=None)
|
||||||
|
except pytz.AmbiguousTimeError:
|
||||||
|
is_dst = None
|
||||||
|
if "is_dst" in kw:
|
||||||
|
is_dst = kw.pop("is_dst")
|
||||||
|
|
||||||
|
try:
|
||||||
|
dt = tzinfo.localize(dt, is_dst)
|
||||||
|
except IndexError:
|
||||||
|
raise pytz.AmbiguousTimeError("No such time exists!")
|
||||||
|
|
||||||
|
newargs = list(dt.timetuple()[0:6])+[dt.microsecond, dt.tzinfo]
|
||||||
|
obj = datetime.datetime.__new__(cls, *newargs)
|
||||||
|
obj.is_dst = obj.dst() != datetime.timedelta(0)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def asdatetime(self, naive=True):
|
||||||
|
"""Return this datetime_tz as a datetime object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
naive: Return *without* any tz info.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
This datetime_tz as a datetime object.
|
||||||
|
"""
|
||||||
|
args = list(self.timetuple()[0:6])+[self.microsecond]
|
||||||
|
if not naive:
|
||||||
|
args.append(self.tzinfo)
|
||||||
|
return datetime.datetime(*args)
|
||||||
|
|
||||||
|
def asdate(self):
|
||||||
|
"""Return this datetime_tz as a date object.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
This datetime_tz as a date object.
|
||||||
|
"""
|
||||||
|
return datetime.date(self.year, self.month, self.day)
|
||||||
|
|
||||||
|
def totimestamp(self):
|
||||||
|
"""Convert this datetime object back to a unix timestamp.
|
||||||
|
|
||||||
|
The Unix epoch is the time 00:00:00 UTC on January 1, 1970.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Unix timestamp.
|
||||||
|
"""
|
||||||
|
return calendar.timegm(self.utctimetuple())+1e-6*self.microsecond
|
||||||
|
|
||||||
|
def astimezone(self, tzinfo):
|
||||||
|
"""Returns a version of this timestamp converted to the given timezone.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tzinfo: Either a datetime.tzinfo object or a string (which will be looked
|
||||||
|
up in pytz.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A datetime_tz object in the given timezone.
|
||||||
|
"""
|
||||||
|
# Assert we are not a naive datetime object
|
||||||
|
assert self.tzinfo is not None
|
||||||
|
|
||||||
|
tzinfo = _tzinfome(tzinfo)
|
||||||
|
|
||||||
|
d = self.asdatetime(naive=False).astimezone(tzinfo)
|
||||||
|
return datetime_tz(d)
|
||||||
|
|
||||||
|
# pylint: disable-msg=C6113
|
||||||
|
def replace(self, **kw):
|
||||||
|
"""Return datetime with new specified fields given as arguments.
|
||||||
|
|
||||||
|
For example, dt.replace(days=4) would return a new datetime_tz object with
|
||||||
|
exactly the same as dt but with the days attribute equal to 4.
|
||||||
|
|
||||||
|
Any attribute can be replaced, but tzinfo can not be set to None.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
Any datetime_tz attribute.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A datetime_tz object with the attributes replaced.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError: If the given replacement is invalid.
|
||||||
|
"""
|
||||||
|
if "tzinfo" in kw:
|
||||||
|
if kw["tzinfo"] is None:
|
||||||
|
raise TypeError("Can not remove the timezone use asdatetime()")
|
||||||
|
|
||||||
|
is_dst = None
|
||||||
|
if "is_dst" in kw:
|
||||||
|
is_dst = kw["is_dst"]
|
||||||
|
del kw["is_dst"]
|
||||||
|
else:
|
||||||
|
# Use our own DST setting..
|
||||||
|
is_dst = self.is_dst
|
||||||
|
|
||||||
|
replaced = self.asdatetime().replace(**kw)
|
||||||
|
|
||||||
|
return datetime_tz(replaced, tzinfo=self.tzinfo.zone, is_dst=is_dst)
|
||||||
|
|
||||||
|
# pylint: disable-msg=C6310
|
||||||
|
@classmethod
|
||||||
|
def smartparse(cls, toparse, tzinfo=None):
|
||||||
|
"""Method which uses dateutil.parse and extras to try and parse the string.
|
||||||
|
|
||||||
|
Valid dates are found at:
|
||||||
|
http://labix.org/python-dateutil#head-1443e0f14ad5dff07efd465e080d1110920673d8-2
|
||||||
|
|
||||||
|
Other valid formats include:
|
||||||
|
"now" or "today"
|
||||||
|
"yesterday"
|
||||||
|
"tommorrow"
|
||||||
|
"5 minutes ago"
|
||||||
|
"10 hours ago"
|
||||||
|
"10h5m ago"
|
||||||
|
"start of yesterday"
|
||||||
|
"end of tommorrow"
|
||||||
|
"end of 3rd of March"
|
||||||
|
|
||||||
|
Args:
|
||||||
|
toparse: The string to parse.
|
||||||
|
tzinfo: Timezone for the resultant datetime_tz object should be in.
|
||||||
|
(Defaults to your local timezone.)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
New datetime_tz object.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If unable to make sense of the input.
|
||||||
|
"""
|
||||||
|
# Default for empty fields are:
|
||||||
|
# year/month/day == now
|
||||||
|
# hour/minute/second/microsecond == 0
|
||||||
|
toparse = toparse.strip()
|
||||||
|
|
||||||
|
if tzinfo is None:
|
||||||
|
dt = cls.now()
|
||||||
|
else:
|
||||||
|
dt = cls.now(tzinfo)
|
||||||
|
|
||||||
|
default = dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
|
||||||
|
# Remove "start of " and "end of " prefix in the string
|
||||||
|
if toparse.lower().startswith("end of "):
|
||||||
|
toparse = toparse[7:].strip()
|
||||||
|
|
||||||
|
dt += datetime.timedelta(days=1)
|
||||||
|
dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
dt -= datetime.timedelta(microseconds=1)
|
||||||
|
|
||||||
|
default = dt
|
||||||
|
|
||||||
|
elif toparse.lower().startswith("start of "):
|
||||||
|
toparse = toparse[9:].strip()
|
||||||
|
|
||||||
|
dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
default = dt
|
||||||
|
|
||||||
|
# Handle strings with "now", "today", "yesterday", "tomorrow" and "ago".
|
||||||
|
# Need to use lowercase
|
||||||
|
toparselower = toparse.lower()
|
||||||
|
|
||||||
|
if toparselower in ["now", "today"]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
elif toparselower == "yesterday":
|
||||||
|
dt -= datetime.timedelta(days=1)
|
||||||
|
|
||||||
|
elif toparselower == "tommorrow":
|
||||||
|
dt += datetime.timedelta(days=1)
|
||||||
|
|
||||||
|
elif "ago" in toparselower:
|
||||||
|
# Remove the "ago" bit
|
||||||
|
toparselower = toparselower[:-3]
|
||||||
|
# Replace all "a day and an hour" with "1 day 1 hour"
|
||||||
|
toparselower = toparselower.replace("a ", "1 ")
|
||||||
|
toparselower = toparselower.replace("an ", "1 ")
|
||||||
|
toparselower = toparselower.replace(" and ", " ")
|
||||||
|
|
||||||
|
# Match the following
|
||||||
|
# 1 hour ago
|
||||||
|
# 1h ago
|
||||||
|
# 1 h ago
|
||||||
|
# 1 hour ago
|
||||||
|
# 2 hours ago
|
||||||
|
# Same with minutes, seconds, etc.
|
||||||
|
|
||||||
|
tocheck = ("seconds", "minutes", "hours", "days", "weeks", "months",
|
||||||
|
"years")
|
||||||
|
result = {}
|
||||||
|
for match in re.finditer("([0-9]+)([^0-9]*)", toparselower):
|
||||||
|
amount = int(match.group(1))
|
||||||
|
unit = match.group(2).strip()
|
||||||
|
|
||||||
|
for bit in tocheck:
|
||||||
|
regex = "^([%s]|((%s)s?))$" % (
|
||||||
|
bit[0], bit[:-1])
|
||||||
|
|
||||||
|
bitmatch = re.search(regex, unit)
|
||||||
|
if bitmatch:
|
||||||
|
result[bit] = amount
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise ValueError("Was not able to parse date unit %r!" % unit)
|
||||||
|
|
||||||
|
delta = dateutil.relativedelta.relativedelta(**result)
|
||||||
|
dt -= delta
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Handle strings with normal datetime format, use original case.
|
||||||
|
dt = dateutil.parser.parse(toparse, default=default.asdatetime(),
|
||||||
|
tzinfos=pytz_abbr.tzinfos)
|
||||||
|
if dt is None:
|
||||||
|
raise ValueError("Was not able to parse date!")
|
||||||
|
|
||||||
|
if dt.tzinfo is pytz_abbr.unknown:
|
||||||
|
dt = dt.replace(tzinfo=None)
|
||||||
|
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
if tzinfo is None:
|
||||||
|
tzinfo = localtz()
|
||||||
|
dt = cls(dt, tzinfo)
|
||||||
|
else:
|
||||||
|
if isinstance(dt.tzinfo, pytz_abbr.tzabbr):
|
||||||
|
abbr = dt.tzinfo
|
||||||
|
dt = dt.replace(tzinfo=None)
|
||||||
|
dt = cls(dt, abbr.zone, is_dst=abbr.dst)
|
||||||
|
|
||||||
|
dt = cls(dt)
|
||||||
|
|
||||||
|
return dt
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def utcfromtimestamp(cls, timestamp):
|
||||||
|
"""Returns a datetime object of a given timestamp (in UTC)."""
|
||||||
|
obj = datetime.datetime.utcfromtimestamp(timestamp)
|
||||||
|
obj = pytz.utc.localize(obj)
|
||||||
|
return cls(obj)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def fromtimestamp(cls, timestamp):
|
||||||
|
"""Returns a datetime object of a given timestamp (in local tz)."""
|
||||||
|
d = cls.utcfromtimestamp(timestamp)
|
||||||
|
return d.astimezone(localtz())
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def utcnow(cls):
|
||||||
|
"""Return a new datetime representing UTC day and time."""
|
||||||
|
obj = datetime.datetime.utcnow()
|
||||||
|
obj = cls(obj, tzinfo=pytz.utc)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def now(cls, tzinfo=None):
|
||||||
|
"""[tz] -> new datetime with tz's local day and time."""
|
||||||
|
obj = cls.utcnow()
|
||||||
|
if tzinfo is None:
|
||||||
|
tzinfo = localtz()
|
||||||
|
return obj.astimezone(tzinfo)
|
||||||
|
|
||||||
|
today = now
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fromordinal(ordinal):
|
||||||
|
raise SyntaxError("Not enough information to create a datetime_tz object "
|
||||||
|
"from an ordinal. Please use datetime.date.fromordinal")
|
||||||
|
|
||||||
|
|
||||||
|
class iterate(object):
|
||||||
|
"""Helpful iterators for working with datetime_tz objects."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def between(start, delta, end=None):
|
||||||
|
"""Return an iterator between this date till given end point.
|
||||||
|
|
||||||
|
Example usage:
|
||||||
|
>>> d = datetime_tz.smartparse("5 days ago")
|
||||||
|
2008/05/12 11:45
|
||||||
|
>>> for i in d.between(timedelta(days=1), datetime_tz.now()):
|
||||||
|
>>> print i
|
||||||
|
2008/05/12 11:45
|
||||||
|
2008/05/13 11:45
|
||||||
|
2008/05/14 11:45
|
||||||
|
2008/05/15 11:45
|
||||||
|
2008/05/16 11:45
|
||||||
|
|
||||||
|
Args:
|
||||||
|
start: The date to start at.
|
||||||
|
delta: The interval to iterate with.
|
||||||
|
end: (Optional) Date to end at. If not given the iterator will never
|
||||||
|
terminate.
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
datetime_tz objects.
|
||||||
|
"""
|
||||||
|
toyield = start
|
||||||
|
while end is None or toyield < end:
|
||||||
|
yield toyield
|
||||||
|
toyield += delta
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def weeks(start, end=None):
|
||||||
|
"""Iterate over the weeks between the given datetime_tzs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
start: datetime_tz to start from.
|
||||||
|
end: (Optional) Date to end at, if not given the iterator will never
|
||||||
|
terminate.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
An iterator which generates datetime_tz objects a week apart.
|
||||||
|
"""
|
||||||
|
return iterate.between(start, datetime.timedelta(days=7), end)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def days(start, end=None):
|
||||||
|
"""Iterate over the days between the given datetime_tzs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
start: datetime_tz to start from.
|
||||||
|
end: (Optional) Date to end at, if not given the iterator will never
|
||||||
|
terminate.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
An iterator which generates datetime_tz objects a day apart.
|
||||||
|
"""
|
||||||
|
return iterate.between(start, datetime.timedelta(days=1), end)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def hours(start, end=None):
|
||||||
|
"""Iterate over the hours between the given datetime_tzs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
start: datetime_tz to start from.
|
||||||
|
end: (Optional) Date to end at, if not given the iterator will never
|
||||||
|
terminate.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
An iterator which generates datetime_tz objects a hour apart.
|
||||||
|
"""
|
||||||
|
return iterate.between(start, datetime.timedelta(hours=1), end)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def minutes(start, end=None):
|
||||||
|
"""Iterate over the minutes between the given datetime_tzs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
start: datetime_tz to start from.
|
||||||
|
end: (Optional) Date to end at, if not given the iterator will never
|
||||||
|
terminate.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
An iterator which generates datetime_tz objects a minute apart.
|
||||||
|
"""
|
||||||
|
return iterate.between(start, datetime.timedelta(minutes=1), end)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def seconds(start, end=None):
|
||||||
|
"""Iterate over the seconds between the given datetime_tzs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
start: datetime_tz to start from.
|
||||||
|
end: (Optional) Date to end at, if not given the iterator will never
|
||||||
|
terminate.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
An iterator which generates datetime_tz objects a second apart.
|
||||||
|
"""
|
||||||
|
return iterate.between(start, datetime.timedelta(minutes=1), end)
|
||||||
|
|
||||||
|
|
||||||
|
def _wrap_method(name):
|
||||||
|
"""Wrap a method.
|
||||||
|
|
||||||
|
Patch a method which might return a datetime.datetime to return a
|
||||||
|
datetime_tz.datetime_tz instead.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: The name of the method to patch
|
||||||
|
"""
|
||||||
|
method = getattr(datetime.datetime, name)
|
||||||
|
|
||||||
|
# Have to give the second argument as method has no __module__ option.
|
||||||
|
@functools.wraps(method, ("__name__", "__doc__"), ())
|
||||||
|
def wrapper(*args, **kw):
|
||||||
|
r = method(*args, **kw)
|
||||||
|
|
||||||
|
if isinstance(r, datetime.datetime) and not isinstance(r, datetime_tz):
|
||||||
|
r = datetime_tz(r)
|
||||||
|
return r
|
||||||
|
|
||||||
|
setattr(datetime_tz, name, wrapper)
|
||||||
|
|
||||||
|
for methodname in ["__add__", "__radd__", "__rsub__", "__sub__", "combine"]:
|
||||||
|
|
||||||
|
# Make sure we have not already got an override for this method
|
||||||
|
assert methodname not in datetime_tz.__dict__
|
||||||
|
|
||||||
|
_wrap_method(methodname)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['datetime_tz', 'detect_timezone', 'iterate', 'localtz',
|
||||||
|
'localtz_set', 'timedelta', '_detect_timezone_environ',
|
||||||
|
'_detect_timezone_etc_localtime', '_detect_timezone_etc_timezone',
|
||||||
|
'_detect_timezone_php']
|
230
nilmdb/utils/datetime_tz/pytz_abbr.py
Normal file
230
nilmdb/utils/datetime_tz/pytz_abbr.py
Normal file
|
@ -0,0 +1,230 @@
|
||||||
|
#!/usr/bin/python2.4
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright 2010 Google Inc. All Rights Reserved.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Common time zone acronyms/abbreviations for use with the datetime_tz module.
|
||||||
|
|
||||||
|
*WARNING*: There are lots of caveats when using this module which are listed
|
||||||
|
below.
|
||||||
|
|
||||||
|
CAVEAT 1: The acronyms/abbreviations are not globally unique, they are not even
|
||||||
|
unique within a region. For example, EST can mean any of,
|
||||||
|
Eastern Standard Time in Australia (which is 10 hour ahead of UTC)
|
||||||
|
Eastern Standard Time in North America (which is 5 hours behind UTC)
|
||||||
|
|
||||||
|
Where there are two abbreviations the more popular one will appear in the all
|
||||||
|
dictionary, while the less common one will only appear in that countries region
|
||||||
|
dictionary. IE If using all, EST will be mapped to Eastern Standard Time in
|
||||||
|
North America.
|
||||||
|
|
||||||
|
CAVEAT 2: Many of the acronyms don't map to a neat Oslon timezones. For example,
|
||||||
|
Eastern European Summer Time (EEDT) is used by many different countries in
|
||||||
|
Europe *at different times*! If the acronym does not map neatly to one zone it
|
||||||
|
is mapped to the Etc/GMT+-XX Oslon zone. This means that any date manipulations
|
||||||
|
can end up with idiot things like summer time in the middle of winter.
|
||||||
|
|
||||||
|
CAVEAT 3: The Summer/Standard time difference is really important! For an hour
|
||||||
|
each year it is needed to determine which time you are actually talking about.
|
||||||
|
2002-10-27 01:20:00 EST != 2002-10-27 01:20:00 EDT
|
||||||
|
"""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import pytz
|
||||||
|
import pytz.tzfile
|
||||||
|
|
||||||
|
|
||||||
|
class tzabbr(datetime.tzinfo):
|
||||||
|
"""A timezone abbreviation.
|
||||||
|
|
||||||
|
*WARNING*: This is not a tzinfo implementation! Trying to use this as tzinfo
|
||||||
|
object will result in failure. We inherit from datetime.tzinfo so we can get
|
||||||
|
through the dateutil checks.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# A "marker" tzinfo object which is used to signify an unknown timezone.
|
||||||
|
unknown = datetime.tzinfo(0)
|
||||||
|
|
||||||
|
|
||||||
|
regions = {'all': {}, 'military': {}}
|
||||||
|
# Create a special alias for the all and military regions
|
||||||
|
all = regions['all']
|
||||||
|
military = regions['military']
|
||||||
|
|
||||||
|
|
||||||
|
def tzabbr_register(abbr, name, region, zone, dst):
|
||||||
|
"""Register a new timezone abbreviation in the global registry.
|
||||||
|
|
||||||
|
If another abbreviation with the same name has already been registered it new
|
||||||
|
abbreviation will only be registered in region specific dictionary.
|
||||||
|
"""
|
||||||
|
newabbr = tzabbr()
|
||||||
|
newabbr.abbr = abbr
|
||||||
|
newabbr.name = name
|
||||||
|
newabbr.region = region
|
||||||
|
newabbr.zone = zone
|
||||||
|
newabbr.dst = dst
|
||||||
|
|
||||||
|
if abbr not in all:
|
||||||
|
all[abbr] = newabbr
|
||||||
|
|
||||||
|
if not region in regions:
|
||||||
|
regions[region] = {}
|
||||||
|
|
||||||
|
assert abbr not in regions[region]
|
||||||
|
regions[region][abbr] = newabbr
|
||||||
|
|
||||||
|
|
||||||
|
def tzinfos_create(use_region):
|
||||||
|
abbrs = regions[use_region]
|
||||||
|
|
||||||
|
def tzinfos(abbr, offset):
|
||||||
|
if abbr:
|
||||||
|
if abbr in abbrs:
|
||||||
|
result = abbrs[abbr]
|
||||||
|
if offset:
|
||||||
|
# FIXME: Check the offset matches the abbreviation we just selected.
|
||||||
|
pass
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
raise ValueError, "Unknown timezone found %s" % abbr
|
||||||
|
if offset == 0:
|
||||||
|
return pytz.utc
|
||||||
|
if offset:
|
||||||
|
return pytz.FixedOffset(offset/60)
|
||||||
|
return unknown
|
||||||
|
|
||||||
|
return tzinfos
|
||||||
|
|
||||||
|
|
||||||
|
# Create a special alias for the all tzinfos
|
||||||
|
tzinfos = tzinfos_create('all')
|
||||||
|
|
||||||
|
|
||||||
|
# Create the abbreviations.
|
||||||
|
# *WARNING*: Order matters!
|
||||||
|
tzabbr_register("A", u"Alpha Time Zone", u"Military", "Etc/GMT-1", False)
|
||||||
|
tzabbr_register("ACDT", u"Australian Central Daylight Time", u"Australia",
|
||||||
|
"Australia/Adelaide", True)
|
||||||
|
tzabbr_register("ACST", u"Australian Central Standard Time", u"Australia",
|
||||||
|
"Australia/Adelaide", False)
|
||||||
|
tzabbr_register("ADT", u"Atlantic Daylight Time", u"North America",
|
||||||
|
"America/Halifax", True)
|
||||||
|
tzabbr_register("AEDT", u"Australian Eastern Daylight Time", u"Australia",
|
||||||
|
"Australia/Sydney", True)
|
||||||
|
tzabbr_register("AEST", u"Australian Eastern Standard Time", u"Australia",
|
||||||
|
"Australia/Sydney", False)
|
||||||
|
tzabbr_register("AKDT", u"Alaska Daylight Time", u"North America",
|
||||||
|
"US/Alaska", True)
|
||||||
|
tzabbr_register("AKST", u"Alaska Standard Time", u"North America",
|
||||||
|
"US/Alaska", False)
|
||||||
|
tzabbr_register("AST", u"Atlantic Standard Time", u"North America",
|
||||||
|
"America/Halifax", False)
|
||||||
|
tzabbr_register("AWDT", u"Australian Western Daylight Time", u"Australia",
|
||||||
|
"Australia/West", True)
|
||||||
|
tzabbr_register("AWST", u"Australian Western Standard Time", u"Australia",
|
||||||
|
"Australia/West", False)
|
||||||
|
tzabbr_register("B", u"Bravo Time Zone", u"Military", "Etc/GMT-2", False)
|
||||||
|
tzabbr_register("BST", u"British Summer Time", u"Europe", "Europe/London", True)
|
||||||
|
tzabbr_register("C", u"Charlie Time Zone", u"Military", "Etc/GMT-2", False)
|
||||||
|
tzabbr_register("CDT", u"Central Daylight Time", u"North America",
|
||||||
|
"US/Central", True)
|
||||||
|
tzabbr_register("CEDT", u"Central European Daylight Time", u"Europe",
|
||||||
|
"Etc/GMT+2", True)
|
||||||
|
tzabbr_register("CEST", u"Central European Summer Time", u"Europe",
|
||||||
|
"Etc/GMT+2", True)
|
||||||
|
tzabbr_register("CET", u"Central European Time", u"Europe", "Etc/GMT+1", False)
|
||||||
|
tzabbr_register("CST", u"Central Standard Time", u"North America",
|
||||||
|
"US/Central", False)
|
||||||
|
tzabbr_register("CXT", u"Christmas Island Time", u"Australia",
|
||||||
|
"Indian/Christmas", False)
|
||||||
|
tzabbr_register("D", u"Delta Time Zone", u"Military", "Etc/GMT-2", False)
|
||||||
|
tzabbr_register("E", u"Echo Time Zone", u"Military", "Etc/GMT-2", False)
|
||||||
|
tzabbr_register("EDT", u"Eastern Daylight Time", u"North America",
|
||||||
|
"US/Eastern", True)
|
||||||
|
tzabbr_register("EEDT", u"Eastern European Daylight Time", u"Europe",
|
||||||
|
"Etc/GMT+3", True)
|
||||||
|
tzabbr_register("EEST", u"Eastern European Summer Time", u"Europe",
|
||||||
|
"Etc/GMT+3", True)
|
||||||
|
tzabbr_register("EET", u"Eastern European Time", u"Europe", "Etc/GMT+2", False)
|
||||||
|
tzabbr_register("EST", u"Eastern Standard Time", u"North America",
|
||||||
|
"US/Eastern", False)
|
||||||
|
tzabbr_register("F", u"Foxtrot Time Zone", u"Military", "Etc/GMT-6", False)
|
||||||
|
tzabbr_register("G", u"Golf Time Zone", u"Military", "Etc/GMT-7", False)
|
||||||
|
tzabbr_register("GMT", u"Greenwich Mean Time", u"Europe", pytz.utc, False)
|
||||||
|
tzabbr_register("H", u"Hotel Time Zone", u"Military", "Etc/GMT-8", False)
|
||||||
|
#tzabbr_register("HAA", u"Heure Avancée de l'Atlantique", u"North America", u"UTC - 3 hours")
|
||||||
|
#tzabbr_register("HAC", u"Heure Avancée du Centre", u"North America", u"UTC - 5 hours")
|
||||||
|
tzabbr_register("HADT", u"Hawaii-Aleutian Daylight Time", u"North America",
|
||||||
|
"Pacific/Honolulu", True)
|
||||||
|
#tzabbr_register("HAE", u"Heure Avancée de l'Est", u"North America", u"UTC - 4 hours")
|
||||||
|
#tzabbr_register("HAP", u"Heure Avancée du Pacifique", u"North America", u"UTC - 7 hours")
|
||||||
|
#tzabbr_register("HAR", u"Heure Avancée des Rocheuses", u"North America", u"UTC - 6 hours")
|
||||||
|
tzabbr_register("HAST", u"Hawaii-Aleutian Standard Time", u"North America",
|
||||||
|
"Pacific/Honolulu", False)
|
||||||
|
#tzabbr_register("HAT", u"Heure Avancée de Terre-Neuve", u"North America", u"UTC - 2:30 hours")
|
||||||
|
#tzabbr_register("HAY", u"Heure Avancée du Yukon", u"North America", u"UTC - 8 hours")
|
||||||
|
tzabbr_register("HDT", u"Hawaii Daylight Time", u"North America",
|
||||||
|
"Pacific/Honolulu", True)
|
||||||
|
#tzabbr_register("HNA", u"Heure Normale de l'Atlantique", u"North America", u"UTC - 4 hours")
|
||||||
|
#tzabbr_register("HNC", u"Heure Normale du Centre", u"North America", u"UTC - 6 hours")
|
||||||
|
#tzabbr_register("HNE", u"Heure Normale de l'Est", u"North America", u"UTC - 5 hours")
|
||||||
|
#tzabbr_register("HNP", u"Heure Normale du Pacifique", u"North America", u"UTC - 8 hours")
|
||||||
|
#tzabbr_register("HNR", u"Heure Normale des Rocheuses", u"North America", u"UTC - 7 hours")
|
||||||
|
#tzabbr_register("HNT", u"Heure Normale de Terre-Neuve", u"North America", u"UTC - 3:30 hours")
|
||||||
|
#tzabbr_register("HNY", u"Heure Normale du Yukon", u"North America", u"UTC - 9 hours")
|
||||||
|
tzabbr_register("HST", u"Hawaii Standard Time", u"North America",
|
||||||
|
"Pacific/Honolulu", False)
|
||||||
|
tzabbr_register("I", u"India Time Zone", u"Military", "Etc/GMT-9", False)
|
||||||
|
tzabbr_register("IST", u"Irish Summer Time", u"Europe", "Europe/Dublin", True)
|
||||||
|
tzabbr_register("K", u"Kilo Time Zone", u"Military", "Etc/GMT-10", False)
|
||||||
|
tzabbr_register("L", u"Lima Time Zone", u"Military", "Etc/GMT-11", False)
|
||||||
|
tzabbr_register("M", u"Mike Time Zone", u"Military", "Etc/GMT-12", False)
|
||||||
|
tzabbr_register("MDT", u"Mountain Daylight Time", u"North America",
|
||||||
|
"US/Mountain", True)
|
||||||
|
#tzabbr_register("MESZ", u"Mitteleuroäische Sommerzeit", u"Europe", u"UTC + 2 hours")
|
||||||
|
#tzabbr_register("MEZ", u"Mitteleuropäische Zeit", u"Europe", u"UTC + 1 hour")
|
||||||
|
tzabbr_register("MSD", u"Moscow Daylight Time", u"Europe",
|
||||||
|
"Europe/Moscow", True)
|
||||||
|
tzabbr_register("MSK", u"Moscow Standard Time", u"Europe",
|
||||||
|
"Europe/Moscow", False)
|
||||||
|
tzabbr_register("MST", u"Mountain Standard Time", u"North America",
|
||||||
|
"US/Mountain", False)
|
||||||
|
tzabbr_register("N", u"November Time Zone", u"Military", "Etc/GMT+1", False)
|
||||||
|
tzabbr_register("NDT", u"Newfoundland Daylight Time", u"North America",
|
||||||
|
"America/St_Johns", True)
|
||||||
|
tzabbr_register("NFT", u"Norfolk (Island) Time", u"Australia",
|
||||||
|
"Pacific/Norfolk", False)
|
||||||
|
tzabbr_register("NST", u"Newfoundland Standard Time", u"North America",
|
||||||
|
"America/St_Johns", False)
|
||||||
|
tzabbr_register("O", u"Oscar Time Zone", u"Military", "Etc/GMT+2", False)
|
||||||
|
tzabbr_register("P", u"Papa Time Zone", u"Military", "Etc/GMT+3", False)
|
||||||
|
tzabbr_register("PDT", u"Pacific Daylight Time", u"North America",
|
||||||
|
"US/Pacific", True)
|
||||||
|
tzabbr_register("PST", u"Pacific Standard Time", u"North America",
|
||||||
|
"US/Pacific", False)
|
||||||
|
tzabbr_register("Q", u"Quebec Time Zone", u"Military", "Etc/GMT+4", False)
|
||||||
|
tzabbr_register("R", u"Romeo Time Zone", u"Military", "Etc/GMT+5", False)
|
||||||
|
tzabbr_register("S", u"Sierra Time Zone", u"Military", "Etc/GMT+6", False)
|
||||||
|
tzabbr_register("T", u"Tango Time Zone", u"Military", "Etc/GMT+7", False)
|
||||||
|
tzabbr_register("U", u"Uniform Time Zone", u"Military", "Etc/GMT+8", False)
|
||||||
|
tzabbr_register("UTC", u"Coordinated Universal Time", u"Europe",
|
||||||
|
pytz.utc, False)
|
||||||
|
tzabbr_register("V", u"Victor Time Zone", u"Military", "Etc/GMT+9", False)
|
||||||
|
tzabbr_register("W", u"Whiskey Time Zone", u"Military", "Etc/GMT+10", False)
|
||||||
|
tzabbr_register("WDT", u"Western Daylight Time", u"Australia",
|
||||||
|
"Australia/West", True)
|
||||||
|
tzabbr_register("WEDT", u"Western European Daylight Time", u"Europe",
|
||||||
|
"Etc/GMT+1", True)
|
||||||
|
tzabbr_register("WEST", u"Western European Summer Time", u"Europe",
|
||||||
|
"Etc/GMT+1", True)
|
||||||
|
tzabbr_register("WET", u"Western European Time", u"Europe", pytz.utc, False)
|
||||||
|
tzabbr_register("WST", u"Western Standard Time", u"Australia",
|
||||||
|
"Australia/West", False)
|
||||||
|
tzabbr_register("X", u"X-ray Time Zone", u"Military", "Etc/GMT+11", False)
|
||||||
|
tzabbr_register("Y", u"Yankee Time Zone", u"Military", "Etc/GMT+12", False)
|
||||||
|
tzabbr_register("Z", u"Zulu Time Zone", u"Military", pytz.utc, False)
|
|
@ -1,36 +1,25 @@
|
||||||
import os
|
import os
|
||||||
import errno
|
|
||||||
from math import log
|
from math import log
|
||||||
|
|
||||||
|
|
||||||
def human_size(num):
|
def human_size(num):
|
||||||
"""Human friendly file size"""
|
"""Human friendly file size"""
|
||||||
unit_list = list(zip(['bytes', 'kiB', 'MiB', 'GiB', 'TiB'],
|
unit_list = zip(['bytes', 'kiB', 'MiB', 'GiB', 'TiB'], [0, 0, 1, 2, 2])
|
||||||
[0, 0, 1, 2, 2]))
|
if num > 1:
|
||||||
if num == 0:
|
|
||||||
return '0 bytes'
|
|
||||||
if num == 1:
|
|
||||||
return '1 byte'
|
|
||||||
exponent = min(int(log(num, 1024)), len(unit_list) - 1)
|
exponent = min(int(log(num, 1024)), len(unit_list) - 1)
|
||||||
quotient = float(num) / 1024**exponent
|
quotient = float(num) / 1024**exponent
|
||||||
unit, num_decimals = unit_list[exponent]
|
unit, num_decimals = unit_list[exponent]
|
||||||
format_string = '{:.%sf} {}' % (num_decimals)
|
format_string = '{:.%sf} {}' % (num_decimals)
|
||||||
return format_string.format(quotient, unit)
|
return format_string.format(quotient, unit)
|
||||||
|
if num == 0: # pragma: no cover
|
||||||
|
return '0 bytes'
|
||||||
|
if num == 1: # pragma: no cover
|
||||||
|
return '1 byte'
|
||||||
|
|
||||||
def du(path):
|
def du(path):
|
||||||
"""Like du -sb, returns total size of path in bytes. Ignore
|
"""Like du -sb, returns total size of path in bytes."""
|
||||||
errors that might occur if we encounter broken symlinks or
|
size = os.path.getsize(path)
|
||||||
files in the process of being removed."""
|
|
||||||
try:
|
|
||||||
st = os.stat(path)
|
|
||||||
size = st.st_blocks * 512
|
|
||||||
if os.path.isdir(path):
|
if os.path.isdir(path):
|
||||||
for thisfile in os.listdir(path):
|
for thisfile in os.listdir(path):
|
||||||
filepath = os.path.join(path, thisfile)
|
filepath = os.path.join(path, thisfile)
|
||||||
size += du(filepath)
|
size += du(filepath)
|
||||||
return size
|
return size
|
||||||
except OSError as e:
|
|
||||||
if e.errno != errno.ENOENT:
|
|
||||||
raise
|
|
||||||
return 0
|
|
||||||
|
|
|
@ -1,20 +0,0 @@
|
||||||
# Implementation of hole punching via fallocate, if the OS
|
|
||||||
# and filesystem support it.
|
|
||||||
|
|
||||||
import fallocate
|
|
||||||
|
|
||||||
|
|
||||||
def punch_hole(filename, offset, length, ignore_errors=True):
|
|
||||||
"""Punch a hole in the file. This isn't well supported, so errors
|
|
||||||
are ignored by default."""
|
|
||||||
try:
|
|
||||||
with open(filename, "r+") as f:
|
|
||||||
fallocate.fallocate(
|
|
||||||
f.fileno(),
|
|
||||||
offset,
|
|
||||||
length,
|
|
||||||
fallocate.FALLOC_FL_KEEP_SIZE | fallocate.FALLOC_FL_PUNCH_HOLE)
|
|
||||||
except Exception:
|
|
||||||
if ignore_errors:
|
|
||||||
return
|
|
||||||
raise
|
|
|
@ -1,168 +0,0 @@
|
||||||
"""Interval. Like nilmdb.server.interval, but re-implemented here
|
|
||||||
in plain Python so clients have easier access to it, and with a few
|
|
||||||
helper functions.
|
|
||||||
|
|
||||||
Intervals are half-open, ie. they include data points with timestamps
|
|
||||||
[start, end)
|
|
||||||
"""
|
|
||||||
|
|
||||||
import nilmdb.utils.time
|
|
||||||
import nilmdb.utils.iterator
|
|
||||||
|
|
||||||
|
|
||||||
class IntervalError(Exception):
|
|
||||||
"""Error due to interval overlap, etc"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Interval
|
|
||||||
class Interval:
|
|
||||||
"""Represents an interval of time."""
|
|
||||||
|
|
||||||
def __init__(self, start, end):
|
|
||||||
"""
|
|
||||||
'start' and 'end' are arbitrary numbers that represent time
|
|
||||||
"""
|
|
||||||
if start >= end:
|
|
||||||
# Explicitly disallow zero-width intervals, since they're half-open
|
|
||||||
raise IntervalError("start %s must precede end %s" % (start, end))
|
|
||||||
self.start = start
|
|
||||||
self.end = end
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
s = repr(self.start) + ", " + repr(self.end)
|
|
||||||
return self.__class__.__name__ + "(" + s + ")"
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return ("[" + nilmdb.utils.time.timestamp_to_string(self.start) +
|
|
||||||
" -> " + nilmdb.utils.time.timestamp_to_string(self.end) + ")")
|
|
||||||
|
|
||||||
def human_string(self):
|
|
||||||
return ("[ " + nilmdb.utils.time.timestamp_to_human(self.start) +
|
|
||||||
" -> " + nilmdb.utils.time.timestamp_to_human(self.end) + " ]")
|
|
||||||
|
|
||||||
# Compare two intervals. If non-equal, order by start then end
|
|
||||||
def __lt__(self, other):
|
|
||||||
return (self.start, self.end) < (other.start, other.end)
|
|
||||||
|
|
||||||
def __gt__(self, other):
|
|
||||||
return (self.start, self.end) > (other.start, other.end)
|
|
||||||
|
|
||||||
def __le__(self, other):
|
|
||||||
return (self.start, self.end) <= (other.start, other.end)
|
|
||||||
|
|
||||||
def __ge__(self, other):
|
|
||||||
return (self.start, self.end) >= (other.start, other.end)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return (self.start, self.end) == (other.start, other.end)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return (self.start, self.end) != (other.start, other.end)
|
|
||||||
|
|
||||||
def intersects(self, other):
|
|
||||||
"""Return True if two Interval objects intersect"""
|
|
||||||
if not isinstance(other, Interval):
|
|
||||||
raise TypeError("need an Interval")
|
|
||||||
if self.end <= other.start or self.start >= other.end:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def subset(self, start, end):
|
|
||||||
"""Return a new Interval that is a subset of this one"""
|
|
||||||
# A subclass that tracks additional data might override this.
|
|
||||||
if start < self.start or end > self.end:
|
|
||||||
raise IntervalError("not a subset")
|
|
||||||
return Interval(start, end)
|
|
||||||
|
|
||||||
|
|
||||||
def _interval_math_helper(a, b, op, subset=True):
|
|
||||||
"""Helper for set_difference, intersection functions,
|
|
||||||
to compute interval subsets based on a math operator on ranges
|
|
||||||
present in A and B. Subsets are computed from A, or new intervals
|
|
||||||
are generated if subset = False."""
|
|
||||||
# Iterate through all starts and ends in sorted order. Add a
|
|
||||||
# tag to the iterator so that we can figure out which one they
|
|
||||||
# were, after sorting.
|
|
||||||
def decorate(it, key_start, key_end):
|
|
||||||
for i in it:
|
|
||||||
yield i.start, key_start, i
|
|
||||||
yield i.end, key_end, i
|
|
||||||
a_iter = decorate(iter(a), 0, 2)
|
|
||||||
b_iter = decorate(iter(b), 1, 3)
|
|
||||||
|
|
||||||
# Now iterate over the timestamps of each start and end.
|
|
||||||
# At each point, evaluate which type of end it is, to determine
|
|
||||||
# how to build up the output intervals.
|
|
||||||
a_interval = None
|
|
||||||
in_a = False
|
|
||||||
in_b = False
|
|
||||||
out_start = None
|
|
||||||
for (ts, k, i) in nilmdb.utils.iterator.imerge(a_iter, b_iter):
|
|
||||||
if k == 0:
|
|
||||||
a_interval = i
|
|
||||||
in_a = True
|
|
||||||
elif k == 1:
|
|
||||||
in_b = True
|
|
||||||
elif k == 2:
|
|
||||||
in_a = False
|
|
||||||
else: # k == 3
|
|
||||||
in_b = False
|
|
||||||
include = op(in_a, in_b)
|
|
||||||
if include and out_start is None:
|
|
||||||
out_start = ts
|
|
||||||
elif not include:
|
|
||||||
if out_start is not None and out_start != ts:
|
|
||||||
if subset:
|
|
||||||
yield a_interval.subset(out_start, ts)
|
|
||||||
else:
|
|
||||||
yield Interval(out_start, ts)
|
|
||||||
out_start = None
|
|
||||||
|
|
||||||
|
|
||||||
def set_difference(a, b):
|
|
||||||
"""
|
|
||||||
Compute the difference (a \\ b) between the intervals in 'a' and
|
|
||||||
the intervals in 'b'; i.e., the ranges that are present in 'self'
|
|
||||||
but not 'other'.
|
|
||||||
|
|
||||||
'a' and 'b' must both be iterables.
|
|
||||||
|
|
||||||
Returns a generator that yields each interval in turn.
|
|
||||||
Output intervals are built as subsets of the intervals in the
|
|
||||||
first argument (a).
|
|
||||||
"""
|
|
||||||
return _interval_math_helper(a, b, (lambda a, b: a and not b))
|
|
||||||
|
|
||||||
|
|
||||||
def intersection(a, b):
|
|
||||||
"""
|
|
||||||
Compute the intersection between the intervals in 'a' and the
|
|
||||||
intervals in 'b'; i.e., the ranges that are present in both 'a'
|
|
||||||
and 'b'.
|
|
||||||
|
|
||||||
'a' and 'b' must both be iterables.
|
|
||||||
|
|
||||||
Returns a generator that yields each interval in turn.
|
|
||||||
Output intervals are built as subsets of the intervals in the
|
|
||||||
first argument (a).
|
|
||||||
"""
|
|
||||||
return _interval_math_helper(a, b, (lambda a, b: a and b))
|
|
||||||
|
|
||||||
|
|
||||||
def optimize(it):
|
|
||||||
"""
|
|
||||||
Given an iterable 'it' with intervals, optimize them by joining
|
|
||||||
together intervals that are adjacent in time, and return a generator
|
|
||||||
that yields the new intervals.
|
|
||||||
"""
|
|
||||||
saved_int = None
|
|
||||||
for interval in it:
|
|
||||||
if saved_int is not None:
|
|
||||||
if saved_int.end == interval.start:
|
|
||||||
interval.start = saved_int.start
|
|
||||||
else:
|
|
||||||
yield saved_int
|
|
||||||
saved_int = interval
|
|
||||||
if saved_int is not None:
|
|
||||||
yield saved_int
|
|
|
@ -1,38 +0,0 @@
|
||||||
# Misc iterator tools
|
|
||||||
|
|
||||||
# Iterator merging, based on http://code.activestate.com/recipes/491285/
|
|
||||||
import heapq
|
|
||||||
|
|
||||||
|
|
||||||
def imerge(*iterables):
|
|
||||||
'''Merge multiple sorted inputs into a single sorted output.
|
|
||||||
|
|
||||||
Equivalent to: sorted(itertools.chain(*iterables))
|
|
||||||
|
|
||||||
>>> list(imerge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25]))
|
|
||||||
[0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25]
|
|
||||||
|
|
||||||
'''
|
|
||||||
heappop, siftup, _Stop = heapq.heappop, heapq._siftup, StopIteration
|
|
||||||
|
|
||||||
h = []
|
|
||||||
h_append = h.append
|
|
||||||
for it in map(iter, iterables):
|
|
||||||
try:
|
|
||||||
nexter = it.__next__
|
|
||||||
h_append([nexter(), nexter])
|
|
||||||
except _Stop:
|
|
||||||
pass
|
|
||||||
heapq.heapify(h)
|
|
||||||
|
|
||||||
while 1:
|
|
||||||
try:
|
|
||||||
while 1:
|
|
||||||
v, nexter = s = h[0] # raises IndexError when h is empty
|
|
||||||
yield v
|
|
||||||
s[0] = nexter() # raises StopIteration when exhausted
|
|
||||||
siftup(h, 0) # restore heap condition
|
|
||||||
except _Stop:
|
|
||||||
heappop(h) # remove empty iterator
|
|
||||||
except IndexError:
|
|
||||||
return
|
|
100
nilmdb/utils/iteratorizer.py
Normal file
100
nilmdb/utils/iteratorizer.py
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
import Queue
|
||||||
|
import threading
|
||||||
|
import sys
|
||||||
|
import contextlib
|
||||||
|
|
||||||
|
# This file provides a context manager that converts a function
|
||||||
|
# that takes a callback into a generator that returns an iterable.
|
||||||
|
# This is done by running the function in a new thread.
|
||||||
|
|
||||||
|
# Based partially on http://stackoverflow.com/questions/9968592/
|
||||||
|
|
||||||
|
class IteratorizerThread(threading.Thread):
|
||||||
|
def __init__(self, queue, function, curl_hack):
|
||||||
|
"""
|
||||||
|
function: function to execute, which takes the
|
||||||
|
callback (provided by this class) as an argument
|
||||||
|
"""
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.name = "Iteratorizer-" + function.__name__ + "-" + self.name
|
||||||
|
self.function = function
|
||||||
|
self.queue = queue
|
||||||
|
self.die = False
|
||||||
|
self.curl_hack = curl_hack
|
||||||
|
|
||||||
|
def callback(self, data):
|
||||||
|
try:
|
||||||
|
if self.die:
|
||||||
|
raise Exception() # trigger termination
|
||||||
|
self.queue.put((1, data))
|
||||||
|
except:
|
||||||
|
if self.curl_hack:
|
||||||
|
# We can't raise exceptions, because the pycurl
|
||||||
|
# extension module will unconditionally print the
|
||||||
|
# exception itself, and not pass it up to the caller.
|
||||||
|
# Instead, just return a value that tells curl to
|
||||||
|
# abort. (-1 would be best, in case we were given 0
|
||||||
|
# bytes, but the extension doesn't support that).
|
||||||
|
self.queue.put((2, sys.exc_info()))
|
||||||
|
return 0
|
||||||
|
raise
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
try:
|
||||||
|
result = self.function(self.callback)
|
||||||
|
except:
|
||||||
|
self.queue.put((2, sys.exc_info()))
|
||||||
|
else:
|
||||||
|
self.queue.put((0, result))
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def Iteratorizer(function, curl_hack = False):
|
||||||
|
"""
|
||||||
|
Context manager that takes a function expecting a callback,
|
||||||
|
and provides an iterable that yields the values passed to that
|
||||||
|
callback instead.
|
||||||
|
|
||||||
|
function: function to execute, which takes a callback
|
||||||
|
(provided by this context manager) as an argument
|
||||||
|
|
||||||
|
with iteratorizer(func) as it:
|
||||||
|
for i in it:
|
||||||
|
print 'callback was passed:', i
|
||||||
|
print 'function returned:', it.retval
|
||||||
|
"""
|
||||||
|
queue = Queue.Queue(maxsize = 1)
|
||||||
|
thread = IteratorizerThread(queue, function, curl_hack)
|
||||||
|
thread.daemon = True
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
class iteratorizer_gen(object):
|
||||||
|
def __init__(self, queue):
|
||||||
|
self.queue = queue
|
||||||
|
self.retval = None
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
(typ, data) = self.queue.get()
|
||||||
|
if typ == 0:
|
||||||
|
# function has returned
|
||||||
|
self.retval = data
|
||||||
|
raise StopIteration
|
||||||
|
elif typ == 1:
|
||||||
|
# data is available
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
# callback raised an exception
|
||||||
|
raise data[0], data[1], data[2]
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield iteratorizer_gen(queue)
|
||||||
|
finally:
|
||||||
|
# Ask the thread to die, if it's still running.
|
||||||
|
thread.die = True
|
||||||
|
while thread.isAlive():
|
||||||
|
try:
|
||||||
|
queue.get(True, 0.01)
|
||||||
|
except: # pragma: no cover
|
||||||
|
pass
|
|
@ -1,22 +0,0 @@
|
||||||
# File locking
|
|
||||||
|
|
||||||
import fcntl
|
|
||||||
import errno
|
|
||||||
|
|
||||||
|
|
||||||
def exclusive_lock(f):
|
|
||||||
"""Acquire an exclusive lock. Returns True on successful
|
|
||||||
lock, or False on error."""
|
|
||||||
try:
|
|
||||||
fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
|
||||||
except IOError as e:
|
|
||||||
if e.errno in (errno.EACCES, errno.EAGAIN):
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def exclusive_unlock(f):
|
|
||||||
"""Release an exclusive lock."""
|
|
||||||
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
|
|
@ -6,11 +6,10 @@
|
||||||
import collections
|
import collections
|
||||||
import decorator
|
import decorator
|
||||||
|
|
||||||
|
|
||||||
def lru_cache(size = 10, onremove = None, keys = slice(None)):
|
def lru_cache(size = 10, onremove = None, keys = slice(None)):
|
||||||
"""Least-recently-used cache decorator.
|
"""Least-recently-used cache decorator.
|
||||||
|
|
||||||
@lru_cache(size=10, onremove=None)
|
@lru_cache(size = 10, onevict = None)
|
||||||
def f(...):
|
def f(...):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -54,17 +53,14 @@ def lru_cache(size=10, onremove=None, keys=slice(None)):
|
||||||
if key in cache:
|
if key in cache:
|
||||||
evict(cache.pop(key))
|
evict(cache.pop(key))
|
||||||
else:
|
else:
|
||||||
if cache:
|
if len(cache) > 0 and len(args) != len(cache.iterkeys().next()):
|
||||||
if len(args) != len(next(iter(cache.keys()))):
|
|
||||||
raise KeyError("trying to remove from LRU cache, but "
|
raise KeyError("trying to remove from LRU cache, but "
|
||||||
"number of arguments doesn't match the "
|
"number of arguments doesn't match the "
|
||||||
"cache key length")
|
"cache key length")
|
||||||
|
|
||||||
def cache_remove_all():
|
def cache_remove_all():
|
||||||
nonlocal cache
|
|
||||||
for key in cache:
|
for key in cache:
|
||||||
evict(cache[key])
|
evict(cache.pop(key))
|
||||||
cache = collections.OrderedDict()
|
|
||||||
|
|
||||||
def cache_info():
|
def cache_info():
|
||||||
return (func.cache_hits, func.cache_misses)
|
return (func.cache_hits, func.cache_misses)
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
|
from nilmdb.utils.printf import *
|
||||||
import sys
|
import sys
|
||||||
import inspect
|
import inspect
|
||||||
import decorator
|
import decorator
|
||||||
from nilmdb.utils.printf import fprintf
|
|
||||||
|
|
||||||
|
|
||||||
def must_close(errorfile = sys.stderr, wrap_verify = False):
|
def must_close(errorfile = sys.stderr, wrap_verify = False):
|
||||||
"""Class decorator that warns on 'errorfile' at deletion time if
|
"""Class decorator that warns on 'errorfile' at deletion time if
|
||||||
|
@ -13,17 +12,12 @@ def must_close(errorfile=sys.stderr, wrap_verify=False):
|
||||||
already been called."""
|
already been called."""
|
||||||
def class_decorator(cls):
|
def class_decorator(cls):
|
||||||
|
|
||||||
def is_method_or_function(x):
|
|
||||||
return inspect.ismethod(x) or inspect.isfunction(x)
|
|
||||||
|
|
||||||
def wrap_class_method(wrapper):
|
def wrap_class_method(wrapper):
|
||||||
try:
|
try:
|
||||||
orig = getattr(cls, wrapper.__name__)
|
orig = getattr(cls, wrapper.__name__).im_func
|
||||||
except AttributeError:
|
except:
|
||||||
orig = lambda x: None
|
orig = lambda x: None
|
||||||
if is_method_or_function(orig):
|
setattr(cls, wrapper.__name__, decorator.decorator(wrapper, orig))
|
||||||
setattr(cls, wrapper.__name__,
|
|
||||||
decorator.decorator(wrapper, orig))
|
|
||||||
|
|
||||||
@wrap_class_method
|
@wrap_class_method
|
||||||
def __init__(orig, self, *args, **kwargs):
|
def __init__(orig, self, *args, **kwargs):
|
||||||
|
@ -34,13 +28,10 @@ def must_close(errorfile=sys.stderr, wrap_verify=False):
|
||||||
|
|
||||||
@wrap_class_method
|
@wrap_class_method
|
||||||
def __del__(orig, self, *args, **kwargs):
|
def __del__(orig, self, *args, **kwargs):
|
||||||
try:
|
|
||||||
if "_must_close" in self.__dict__:
|
if "_must_close" in self.__dict__:
|
||||||
fprintf(errorfile, "error: %s.close() wasn't called!\n",
|
fprintf(errorfile, "error: %s.close() wasn't called!\n",
|
||||||
self.__class__.__name__)
|
self.__class__.__name__)
|
||||||
return orig(self, *args, **kwargs)
|
return orig(self, *args, **kwargs)
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@wrap_class_method
|
@wrap_class_method
|
||||||
def close(orig, self, *args, **kwargs):
|
def close(orig, self, *args, **kwargs):
|
||||||
|
@ -55,17 +46,16 @@ def must_close(errorfile=sys.stderr, wrap_verify=False):
|
||||||
raise AssertionError("called " + str(orig) + " after close")
|
raise AssertionError("called " + str(orig) + " after close")
|
||||||
return orig(self, *args, **kwargs)
|
return orig(self, *args, **kwargs)
|
||||||
if wrap_verify:
|
if wrap_verify:
|
||||||
for (name, method) in inspect.getmembers(cls,
|
for (name, method) in inspect.getmembers(cls, inspect.ismethod):
|
||||||
is_method_or_function):
|
# Skip class methods
|
||||||
|
if method.__self__ is not None:
|
||||||
|
continue
|
||||||
# Skip some methods
|
# Skip some methods
|
||||||
if name in [ "__del__", "__init__" ]:
|
if name in [ "__del__", "__init__" ]:
|
||||||
continue
|
continue
|
||||||
# Set up wrapper
|
# Set up wrapper
|
||||||
if inspect.ismethod(method):
|
setattr(cls, name, decorator.decorator(verifier,
|
||||||
func = method.__func__
|
method.im_func))
|
||||||
else:
|
|
||||||
func = method
|
|
||||||
setattr(cls, name, decorator.decorator(verifier, func))
|
|
||||||
|
|
||||||
return cls
|
return cls
|
||||||
return class_decorator
|
return class_decorator
|
||||||
|
|
|
@ -1,13 +1,9 @@
|
||||||
"""printf, fprintf, sprintf"""
|
"""printf, fprintf, sprintf"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
def printf(_str, *args):
|
def printf(_str, *args):
|
||||||
print(_str % args, end='')
|
print(_str % args, end='')
|
||||||
|
|
||||||
|
|
||||||
def fprintf(_file, _str, *args):
|
def fprintf(_file, _str, *args):
|
||||||
print(_str % args, end='', file=_file)
|
print(_str % args, end='', file=_file)
|
||||||
|
|
||||||
|
|
||||||
def sprintf(_str, *args):
|
def sprintf(_str, *args):
|
||||||
return (_str % args)
|
return (_str % args)
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
import queue
|
import Queue
|
||||||
import threading
|
import threading
|
||||||
import sys
|
import sys
|
||||||
|
import decorator
|
||||||
|
import inspect
|
||||||
|
import types
|
||||||
|
import functools
|
||||||
|
|
||||||
# This file provides a class that will wrap an object and serialize
|
# This file provides a class that will wrap an object and serialize
|
||||||
# all calls to its methods. All calls to that object will be queued
|
# all calls to its methods. All calls to that object will be queued
|
||||||
|
@ -9,7 +13,6 @@ import sys
|
||||||
|
|
||||||
# Based partially on http://stackoverflow.com/questions/2642515/
|
# Based partially on http://stackoverflow.com/questions/2642515/
|
||||||
|
|
||||||
|
|
||||||
class SerializerThread(threading.Thread):
|
class SerializerThread(threading.Thread):
|
||||||
"""Thread that retrieves call information from the queue, makes the
|
"""Thread that retrieves call information from the queue, makes the
|
||||||
call, and returns the results."""
|
call, and returns the results."""
|
||||||
|
@ -37,7 +40,6 @@ class SerializerThread(threading.Thread):
|
||||||
result_queue.put((exception, result))
|
result_queue.put((exception, result))
|
||||||
del exception, result
|
del exception, result
|
||||||
|
|
||||||
|
|
||||||
def serializer_proxy(obj_or_type):
|
def serializer_proxy(obj_or_type):
|
||||||
"""Wrap the given object or type in a SerializerObjectProxy.
|
"""Wrap the given object or type in a SerializerObjectProxy.
|
||||||
|
|
||||||
|
@ -47,88 +49,61 @@ def serializer_proxy(obj_or_type):
|
||||||
The proxied requests, including instantiation, are performed in a
|
The proxied requests, including instantiation, are performed in a
|
||||||
single thread and serialized between caller threads.
|
single thread and serialized between caller threads.
|
||||||
"""
|
"""
|
||||||
class SerializerCallProxy():
|
class SerializerCallProxy(object):
|
||||||
def __init__(self, call_queue, func, objectproxy):
|
def __init__(self, call_queue, func, objectproxy):
|
||||||
self.call_queue = call_queue
|
self.call_queue = call_queue
|
||||||
self.func = func
|
self.func = func
|
||||||
# Need to hold a reference to object proxy so it doesn't
|
# Need to hold a reference to object proxy so it doesn't
|
||||||
# go away (and kill the thread) until after get called.
|
# go away (and kill the thread) until after get called.
|
||||||
self.objectproxy = objectproxy
|
self.objectproxy = objectproxy
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
result_queue = queue.Queue()
|
result_queue = Queue.Queue()
|
||||||
self.call_queue.put((result_queue, self.func, args, kwargs))
|
self.call_queue.put((result_queue, self.func, args, kwargs))
|
||||||
( exc_info, result ) = result_queue.get()
|
( exc_info, result ) = result_queue.get()
|
||||||
if exc_info is None:
|
if exc_info is None:
|
||||||
return result
|
return result
|
||||||
else:
|
else:
|
||||||
raise exc_info[1].with_traceback(exc_info[2])
|
raise exc_info[0], exc_info[1], exc_info[2]
|
||||||
|
|
||||||
class SerializerObjectProxy():
|
class SerializerObjectProxy(object):
|
||||||
def __init__(self, obj_or_type, *args, **kwargs):
|
def __init__(self, obj_or_type, *args, **kwargs):
|
||||||
self.__object = obj_or_type
|
self.__object = obj_or_type
|
||||||
if isinstance(obj_or_type, type):
|
try:
|
||||||
|
if type(obj_or_type) in (types.TypeType, types.ClassType):
|
||||||
classname = obj_or_type.__name__
|
classname = obj_or_type.__name__
|
||||||
else:
|
else:
|
||||||
classname = obj_or_type.__class__.__name__
|
classname = obj_or_type.__class__.__name__
|
||||||
self.__call_queue = queue.Queue()
|
except AttributeError: # pragma: no cover
|
||||||
|
classname = "???"
|
||||||
|
self.__call_queue = Queue.Queue()
|
||||||
self.__thread = SerializerThread(classname, self.__call_queue)
|
self.__thread = SerializerThread(classname, self.__call_queue)
|
||||||
self.__thread.daemon = True
|
self.__thread.daemon = True
|
||||||
self.__thread.start()
|
self.__thread.start()
|
||||||
self._thread_safe = True
|
self._thread_safe = True
|
||||||
|
|
||||||
def __getattr__(self, key):
|
def __getattr__(self, key):
|
||||||
# If the attribute is a function, we want to return a
|
if key.startswith("_SerializerObjectProxy__"): # pragma: no cover
|
||||||
# proxy that will perform the call through the serializer
|
raise AttributeError
|
||||||
# when called. Otherwise, we want to return the value
|
|
||||||
# directly. This means we need to grab the attribute once,
|
|
||||||
# and therefore self.__object.__getattr__ may be called
|
|
||||||
# in an unsafe way, from the caller's thread.
|
|
||||||
attr = getattr(self.__object, key)
|
attr = getattr(self.__object, key)
|
||||||
if not callable(attr):
|
if not callable(attr):
|
||||||
# It's not callable, so perform the getattr from within
|
|
||||||
# the serializer thread, then return its value.
|
|
||||||
# That may differ from the "attr" value we just grabbed
|
|
||||||
# from here, due to forced ordering in the serializer.
|
|
||||||
getter = SerializerCallProxy(self.__call_queue, getattr, self)
|
getter = SerializerCallProxy(self.__call_queue, getattr, self)
|
||||||
return getter(self.__object, key)
|
return getter(self.__object, key)
|
||||||
else:
|
|
||||||
# It is callable, so return an object that will proxy through
|
|
||||||
# the serializer when called.
|
|
||||||
r = SerializerCallProxy(self.__call_queue, attr, self)
|
r = SerializerCallProxy(self.__call_queue, attr, self)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
# For an interable object, on __iter__(), save the object's
|
|
||||||
# iterator and return this proxy. On next(), call the object's
|
|
||||||
# iterator through this proxy.
|
|
||||||
def __iter__(self):
|
|
||||||
attr = getattr(self.__object, "__iter__")
|
|
||||||
self.__iter = SerializerCallProxy(self.__call_queue, attr, self)()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __next__(self):
|
|
||||||
return SerializerCallProxy(self.__call_queue,
|
|
||||||
self.__iter.__next__, self)()
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
return self.__getattr__("__getitem__")(key)
|
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
"""Call this to instantiate the type, if a type was passed
|
"""Call this to instantiate the type, if a type was passed
|
||||||
to serializer_proxy. Otherwise, pass the call through."""
|
to serializer_proxy. Otherwise, pass the call through."""
|
||||||
ret = SerializerCallProxy(self.__call_queue,
|
ret = SerializerCallProxy(self.__call_queue,
|
||||||
self.__object, self)(*args, **kwargs)
|
self.__object, self)(*args, **kwargs)
|
||||||
if isinstance(self.__object, type):
|
if type(self.__object) in (types.TypeType, types.ClassType):
|
||||||
# Instantiation
|
# Instantiation
|
||||||
self.__object = ret
|
self.__object = ret
|
||||||
return self
|
return self
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
try:
|
|
||||||
# Signal thread to exit, but don't wait for it.
|
|
||||||
self.__call_queue.put((None, None, None, None))
|
self.__call_queue.put((None, None, None, None))
|
||||||
except:
|
self.__thread.join()
|
||||||
pass
|
|
||||||
|
|
||||||
return SerializerObjectProxy(obj_or_type)
|
return SerializerObjectProxy(obj_or_type)
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
def sort_human(items, key=None):
|
|
||||||
"""Human-friendly sort (/stream/2 before /stream/10)"""
|
|
||||||
def to_num(val):
|
|
||||||
try:
|
|
||||||
return int(val)
|
|
||||||
except Exception:
|
|
||||||
return val
|
|
||||||
|
|
||||||
def human_key(text):
|
|
||||||
if key:
|
|
||||||
text = key(text)
|
|
||||||
# Break into character and numeric chunks.
|
|
||||||
chunks = re.split(r'([0-9]+)', text)
|
|
||||||
return [to_num(c) for c in chunks]
|
|
||||||
|
|
||||||
return sorted(items, key=human_key)
|
|
|
@ -1,25 +1,26 @@
|
||||||
|
from nilmdb.utils.printf import *
|
||||||
import threading
|
import threading
|
||||||
from nilmdb.utils.printf import sprintf
|
import warnings
|
||||||
|
import types
|
||||||
|
|
||||||
|
def verify_proxy(obj_or_type, exception = False, check_thread = True,
|
||||||
def verify_proxy(obj_or_type, check_thread=True,
|
|
||||||
check_concurrent = True):
|
check_concurrent = True):
|
||||||
"""Wrap the given object or type in a VerifyObjectProxy.
|
"""Wrap the given object or type in a VerifyObjectProxy.
|
||||||
|
|
||||||
Returns a VerifyObjectProxy that proxies all method calls to the
|
Returns a VerifyObjectProxy that proxies all method calls to the
|
||||||
given object, as well as attribute retrievals.
|
given object, as well as attribute retrievals.
|
||||||
|
|
||||||
When calling methods, the following checks are performed. On
|
When calling methods, the following checks are performed. If
|
||||||
failure, an exception is raised.
|
exception is True, an exception is raised. Otherwise, a warning
|
||||||
|
is printed.
|
||||||
|
|
||||||
check_thread = True # Fail if two different threads call methods.
|
check_thread = True # Warn/fail if two different threads call methods.
|
||||||
check_concurrent = True # Fail if two functions are concurrently
|
check_concurrent = True # Warn/fail if two functions are concurrently
|
||||||
# run through this proxy
|
# run through this proxy
|
||||||
"""
|
"""
|
||||||
class Namespace():
|
class Namespace(object):
|
||||||
pass
|
pass
|
||||||
|
class VerifyCallProxy(object):
|
||||||
class VerifyCallProxy():
|
|
||||||
def __init__(self, func, parent_namespace):
|
def __init__(self, func, parent_namespace):
|
||||||
self.func = func
|
self.func = func
|
||||||
self.parent_namespace = parent_namespace
|
self.parent_namespace = parent_namespace
|
||||||
|
@ -41,16 +42,22 @@ def verify_proxy(obj_or_type, check_thread=True,
|
||||||
" but %s called %s.%s",
|
" but %s called %s.%s",
|
||||||
p.thread.name, p.classname, p.thread_callee,
|
p.thread.name, p.classname, p.thread_callee,
|
||||||
this.name, p.classname, callee)
|
this.name, p.classname, callee)
|
||||||
|
if exception:
|
||||||
raise AssertionError(err)
|
raise AssertionError(err)
|
||||||
|
else: # pragma: no cover
|
||||||
|
warnings.warn(err)
|
||||||
|
|
||||||
need_concur_unlock = False
|
need_concur_unlock = False
|
||||||
if check_concurrent:
|
if check_concurrent:
|
||||||
if not p.concur_lock.acquire(False):
|
if p.concur_lock.acquire(False) == False:
|
||||||
err = sprintf("unsafe concurrency: %s called %s.%s "
|
err = sprintf("unsafe concurrency: %s called %s.%s "
|
||||||
"while %s is still in %s.%s",
|
"while %s is still in %s.%s",
|
||||||
this.name, p.classname, callee,
|
this.name, p.classname, callee,
|
||||||
p.concur_tname, p.classname, p.concur_callee)
|
p.concur_tname, p.classname, p.concur_callee)
|
||||||
|
if exception:
|
||||||
raise AssertionError(err)
|
raise AssertionError(err)
|
||||||
|
else: # pragma: no cover
|
||||||
|
warnings.warn(err)
|
||||||
else:
|
else:
|
||||||
p.concur_tname = this.name
|
p.concur_tname = this.name
|
||||||
p.concur_callee = callee
|
p.concur_callee = callee
|
||||||
|
@ -63,7 +70,7 @@ def verify_proxy(obj_or_type, check_thread=True,
|
||||||
p.concur_lock.release()
|
p.concur_lock.release()
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
class VerifyObjectProxy():
|
class VerifyObjectProxy(object):
|
||||||
def __init__(self, obj_or_type, *args, **kwargs):
|
def __init__(self, obj_or_type, *args, **kwargs):
|
||||||
p = Namespace()
|
p = Namespace()
|
||||||
self.__ns = p
|
self.__ns = p
|
||||||
|
@ -73,12 +80,17 @@ def verify_proxy(obj_or_type, check_thread=True,
|
||||||
p.concur_tname = None
|
p.concur_tname = None
|
||||||
p.concur_callee = None
|
p.concur_callee = None
|
||||||
self.__obj = obj_or_type
|
self.__obj = obj_or_type
|
||||||
if isinstance(obj_or_type, type):
|
try:
|
||||||
|
if type(obj_or_type) in (types.TypeType, types.ClassType):
|
||||||
p.classname = self.__obj.__name__
|
p.classname = self.__obj.__name__
|
||||||
else:
|
else:
|
||||||
p.classname = self.__obj.__class__.__name__
|
p.classname = self.__obj.__class__.__name__
|
||||||
|
except AttributeError: # pragma: no cover
|
||||||
|
p.classname = "???"
|
||||||
|
|
||||||
def __getattr__(self, key):
|
def __getattr__(self, key):
|
||||||
|
if key.startswith("_VerifyObjectProxy__"): # pragma: no cover
|
||||||
|
raise AttributeError
|
||||||
attr = getattr(self.__obj, key)
|
attr = getattr(self.__obj, key)
|
||||||
if not callable(attr):
|
if not callable(attr):
|
||||||
return VerifyCallProxy(getattr, self.__ns)(self.__obj, key)
|
return VerifyCallProxy(getattr, self.__ns)(self.__obj, key)
|
||||||
|
@ -88,7 +100,7 @@ def verify_proxy(obj_or_type, check_thread=True,
|
||||||
"""Call this to instantiate the type, if a type was passed
|
"""Call this to instantiate the type, if a type was passed
|
||||||
to verify_proxy. Otherwise, pass the call through."""
|
to verify_proxy. Otherwise, pass the call through."""
|
||||||
ret = VerifyCallProxy(self.__obj, self.__ns)(*args, **kwargs)
|
ret = VerifyCallProxy(self.__obj, self.__ns)(*args, **kwargs)
|
||||||
if isinstance(self.__obj, type):
|
if type(self.__obj) in (types.TypeType, types.ClassType):
|
||||||
# Instantiation
|
# Instantiation
|
||||||
self.__obj = ret
|
self.__obj = ret
|
||||||
return self
|
return self
|
||||||
|
|
|
@ -1,95 +1,12 @@
|
||||||
|
from nilmdb.utils import datetime_tz
|
||||||
import re
|
import re
|
||||||
import time
|
|
||||||
import datetime_tz
|
|
||||||
|
|
||||||
# Range
|
|
||||||
min_timestamp = (-2**63)
|
|
||||||
max_timestamp = (2**63 - 1)
|
|
||||||
|
|
||||||
# Smallest representable step
|
|
||||||
epsilon = 1
|
|
||||||
|
|
||||||
|
|
||||||
def string_to_timestamp(string):
|
|
||||||
"""Convert a string that represents an integer number of microseconds
|
|
||||||
since epoch."""
|
|
||||||
try:
|
|
||||||
# Parse a string like "1234567890123456" and return an integer
|
|
||||||
return int(string)
|
|
||||||
except ValueError:
|
|
||||||
# Try parsing as a float, in case it's "1234567890123456.0"
|
|
||||||
return int(round(float(string)))
|
|
||||||
|
|
||||||
|
|
||||||
def timestamp_to_string(timestamp):
|
|
||||||
"""Convert a timestamp (integer microseconds since epoch) to a string"""
|
|
||||||
if isinstance(timestamp, float):
|
|
||||||
return str(int(round(timestamp)))
|
|
||||||
else:
|
|
||||||
return str(timestamp)
|
|
||||||
|
|
||||||
|
|
||||||
def timestamp_to_bytes(timestamp):
|
|
||||||
"""Convert a timestamp (integer microseconds since epoch) to a Python
|
|
||||||
bytes object"""
|
|
||||||
return timestamp_to_string(timestamp).encode('utf-8')
|
|
||||||
|
|
||||||
|
|
||||||
def timestamp_to_human(timestamp):
|
|
||||||
"""Convert a timestamp (integer microseconds since epoch) to a
|
|
||||||
human-readable string, using the local timezone for display
|
|
||||||
(e.g. from the TZ env var)."""
|
|
||||||
if timestamp == min_timestamp:
|
|
||||||
return "(minimum)"
|
|
||||||
if timestamp == max_timestamp:
|
|
||||||
return "(maximum)"
|
|
||||||
dt = datetime_tz.datetime_tz.fromtimestamp(timestamp_to_unix(timestamp))
|
|
||||||
return dt.strftime("%a, %d %b %Y %H:%M:%S.%f %z")
|
|
||||||
|
|
||||||
|
|
||||||
def unix_to_timestamp(unix):
|
|
||||||
"""Convert a Unix timestamp (floating point seconds since epoch)
|
|
||||||
into a NILM timestamp (integer microseconds since epoch)"""
|
|
||||||
return int(round(unix * 1e6))
|
|
||||||
|
|
||||||
|
|
||||||
def timestamp_to_unix(timestamp):
|
|
||||||
"""Convert a NILM timestamp (integer microseconds since epoch)
|
|
||||||
into a Unix timestamp (floating point seconds since epoch)"""
|
|
||||||
return timestamp / 1e6
|
|
||||||
|
|
||||||
|
|
||||||
seconds_to_timestamp = unix_to_timestamp
|
|
||||||
timestamp_to_seconds = timestamp_to_unix
|
|
||||||
|
|
||||||
|
|
||||||
def rate_to_period(hz, cycles=1):
|
|
||||||
"""Convert a rate (in Hz) to a period (in timestamp units).
|
|
||||||
Returns an integer."""
|
|
||||||
period = unix_to_timestamp(cycles) / float(hz)
|
|
||||||
return int(round(period))
|
|
||||||
|
|
||||||
|
|
||||||
def parse_time(toparse):
|
def parse_time(toparse):
|
||||||
"""
|
"""
|
||||||
Parse a free-form time string and return a nilmdb timestamp
|
Parse a free-form time string and return a datetime_tz object.
|
||||||
(integer microseconds since epoch). If the string doesn't contain a
|
If the string doesn't contain a timestamp, the current local
|
||||||
timestamp, the current local timezone is assumed (e.g. from the TZ
|
timezone is assumed (e.g. from the TZ env var).
|
||||||
env var).
|
|
||||||
"""
|
"""
|
||||||
if toparse == "min":
|
|
||||||
return min_timestamp
|
|
||||||
if toparse == "max":
|
|
||||||
return max_timestamp
|
|
||||||
|
|
||||||
# If it starts with @, treat it as a NILM timestamp
|
|
||||||
# (integer microseconds since epoch)
|
|
||||||
try:
|
|
||||||
if toparse[0] == '@':
|
|
||||||
return int(toparse[1:])
|
|
||||||
except (ValueError, KeyError, IndexError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# If string isn't "now" and doesn't contain at least 4 digits,
|
# If string isn't "now" and doesn't contain at least 4 digits,
|
||||||
# consider it invalid. smartparse might otherwise accept
|
# consider it invalid. smartparse might otherwise accept
|
||||||
# empty strings and strings with just separators.
|
# empty strings and strings with just separators.
|
||||||
|
@ -98,20 +15,7 @@ def parse_time(toparse):
|
||||||
|
|
||||||
# Try to just parse the time as given
|
# Try to just parse the time as given
|
||||||
try:
|
try:
|
||||||
return unix_to_timestamp(datetime_tz.datetime_tz.
|
return datetime_tz.datetime_tz.smartparse(toparse)
|
||||||
smartparse(toparse).totimestamp())
|
|
||||||
except (ValueError, OverflowError, TypeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# If it's parseable as a float, treat it as a Unix or NILM
|
|
||||||
# timestamp based on its range.
|
|
||||||
try:
|
|
||||||
val = float(toparse)
|
|
||||||
# range is from about year 2001 - 2128
|
|
||||||
if 1e9 < val < 5e9:
|
|
||||||
return unix_to_timestamp(val)
|
|
||||||
if 1e15 < val < 5e15:
|
|
||||||
return val
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -133,8 +37,7 @@ def parse_time(toparse):
|
||||||
r")", toparse)
|
r")", toparse)
|
||||||
if res is not None:
|
if res is not None:
|
||||||
try:
|
try:
|
||||||
return unix_to_timestamp(datetime_tz.datetime_tz.
|
return datetime_tz.datetime_tz.smartparse(res.group(2))
|
||||||
smartparse(res.group(2)).totimestamp())
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -142,7 +45,10 @@ def parse_time(toparse):
|
||||||
# just give up for now.
|
# just give up for now.
|
||||||
raise ValueError("unable to parse timestamp")
|
raise ValueError("unable to parse timestamp")
|
||||||
|
|
||||||
|
def format_time(timestamp):
|
||||||
def now():
|
"""
|
||||||
"""Return current timestamp"""
|
Convert a Unix timestamp to a string for printing, using the
|
||||||
return unix_to_timestamp(time.time())
|
local timezone for display (e.g. from the TZ env var).
|
||||||
|
"""
|
||||||
|
dt = datetime_tz.datetime_tz.fromtimestamp(timestamp)
|
||||||
|
return dt.strftime("%a, %d %b %Y %H:%M:%S.%f %z")
|
||||||
|
|
|
@ -5,17 +5,18 @@
|
||||||
# with nilmdb.utils.Timer("flush"):
|
# with nilmdb.utils.Timer("flush"):
|
||||||
# foo.flush()
|
# foo.flush()
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import absolute_import
|
||||||
import contextlib
|
import contextlib
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def Timer(name = None, tosyslog = False):
|
def Timer(name = None, tosyslog = False):
|
||||||
start = time.time()
|
start = time.time()
|
||||||
yield
|
yield
|
||||||
elapsed = int((time.time() - start) * 1000)
|
elapsed = int((time.time() - start) * 1000)
|
||||||
msg = (name or 'elapsed') + ": " + str(elapsed) + " ms"
|
msg = (name or 'elapsed') + ": " + str(elapsed) + " ms"
|
||||||
if tosyslog:
|
if tosyslog: # pragma: no cover
|
||||||
import syslog
|
import syslog
|
||||||
syslog.syslog(msg)
|
syslog.syslog(msg)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -1,17 +1,16 @@
|
||||||
"""File-like objects that add timestamps to the input lines"""
|
"""File-like objects that add timestamps to the input lines"""
|
||||||
|
|
||||||
from nilmdb.utils.printf import sprintf
|
from nilmdb.utils.printf import *
|
||||||
import nilmdb.utils.time
|
from nilmdb.utils import datetime_tz
|
||||||
|
|
||||||
|
class Timestamper(object):
|
||||||
class Timestamper():
|
|
||||||
"""A file-like object that adds timestamps to lines of an input file."""
|
"""A file-like object that adds timestamps to lines of an input file."""
|
||||||
def __init__(self, infile, ts_iter):
|
def __init__(self, infile, ts_iter):
|
||||||
"""file: filename, or another file-like object
|
"""file: filename, or another file-like object
|
||||||
ts_iter: iterator that returns a timestamp string for
|
ts_iter: iterator that returns a timestamp string for
|
||||||
each line of the file"""
|
each line of the file"""
|
||||||
if isinstance(infile, str):
|
if isinstance(infile, basestring):
|
||||||
self.file = open(infile, "rb")
|
self.file = open(infile, "r")
|
||||||
else:
|
else:
|
||||||
self.file = infile
|
self.file = infile
|
||||||
self.ts_iter = ts_iter
|
self.ts_iter = ts_iter
|
||||||
|
@ -23,19 +22,17 @@ class Timestamper():
|
||||||
while True:
|
while True:
|
||||||
line = self.file.readline(*args)
|
line = self.file.readline(*args)
|
||||||
if not line:
|
if not line:
|
||||||
return b""
|
return ""
|
||||||
if line[0:1] == b'#':
|
if line[0] == '#':
|
||||||
continue
|
continue
|
||||||
# For some reason, coverage on python 3.8 reports that
|
break
|
||||||
# we never hit this break, even though we definitely do.
|
|
||||||
break # pragma: no cover
|
|
||||||
try:
|
try:
|
||||||
return next(self.ts_iter) + line
|
return self.ts_iter.next() + line
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
return b""
|
return ""
|
||||||
|
|
||||||
def readlines(self, size = None):
|
def readlines(self, size = None):
|
||||||
out = b""
|
out = ""
|
||||||
while True:
|
while True:
|
||||||
line = self.readline()
|
line = self.readline()
|
||||||
out += line
|
out += line
|
||||||
|
@ -46,13 +43,12 @@ class Timestamper():
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __next__(self):
|
def next(self):
|
||||||
result = self.readline()
|
result = self.readline()
|
||||||
if not result:
|
if not result:
|
||||||
raise StopIteration
|
raise StopIteration
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class TimestamperRate(Timestamper):
|
class TimestamperRate(Timestamper):
|
||||||
"""Timestamper that uses a start time and a fixed rate"""
|
"""Timestamper that uses a start time and a fixed rate"""
|
||||||
def __init__(self, infile, start, rate, end = None):
|
def __init__(self, infile, start, rate, end = None):
|
||||||
|
@ -65,39 +61,44 @@ class TimestamperRate(Timestamper):
|
||||||
|
|
||||||
end: If specified, raise StopIteration before outputting a value
|
end: If specified, raise StopIteration before outputting a value
|
||||||
greater than this."""
|
greater than this."""
|
||||||
timestamp_to_bytes = nilmdb.utils.time.timestamp_to_bytes
|
|
||||||
rate_to_period = nilmdb.utils.time.rate_to_period
|
|
||||||
|
|
||||||
def iterator(start, rate, end):
|
def iterator(start, rate, end):
|
||||||
n = 0
|
n = 0
|
||||||
rate = float(rate)
|
rate = float(rate)
|
||||||
while True:
|
while True:
|
||||||
now = start + rate_to_period(rate, n)
|
now = start + n / rate
|
||||||
if end and now >= end:
|
if end and now >= end:
|
||||||
return
|
raise StopIteration
|
||||||
yield timestamp_to_bytes(now) + b" "
|
yield sprintf("%.6f ", start + n / rate)
|
||||||
n += 1
|
n += 1
|
||||||
|
# Handle case where we're passed a datetime or datetime_tz object
|
||||||
|
if "totimestamp" in dir(start):
|
||||||
|
start = start.totimestamp()
|
||||||
Timestamper.__init__(self, infile, iterator(start, rate, end))
|
Timestamper.__init__(self, infile, iterator(start, rate, end))
|
||||||
self.start = start
|
self.start = start
|
||||||
self.rate = rate
|
self.rate = rate
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
start = datetime_tz.datetime_tz.fromtimestamp(self.start)
|
||||||
|
start = start.strftime("%a, %d %b %Y %H:%M:%S %Z")
|
||||||
return sprintf("TimestamperRate(..., start=\"%s\", rate=%g)",
|
return sprintf("TimestamperRate(..., start=\"%s\", rate=%g)",
|
||||||
nilmdb.utils.time.timestamp_to_human(self.start),
|
str(start), self.rate)
|
||||||
self.rate)
|
|
||||||
|
|
||||||
|
|
||||||
class TimestamperNow(Timestamper):
|
class TimestamperNow(Timestamper):
|
||||||
"""Timestamper that uses current time"""
|
"""Timestamper that uses current time"""
|
||||||
def __init__(self, infile):
|
def __init__(self, infile):
|
||||||
timestamp_to_bytes = nilmdb.utils.time.timestamp_to_bytes
|
|
||||||
get_now = nilmdb.utils.time.now
|
|
||||||
|
|
||||||
def iterator():
|
def iterator():
|
||||||
while True:
|
while True:
|
||||||
yield timestamp_to_bytes(get_now()) + b" "
|
now = datetime_tz.datetime_tz.utcnow().totimestamp()
|
||||||
|
yield sprintf("%.6f ", now)
|
||||||
Timestamper.__init__(self, infile, iterator())
|
Timestamper.__init__(self, infile, iterator())
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "TimestamperNow(...)"
|
return "TimestamperNow(...)"
|
||||||
|
|
||||||
|
class TimestamperNull(Timestamper):
|
||||||
|
"""Timestamper that adds nothing to each line"""
|
||||||
|
def __init__(self, infile):
|
||||||
|
def iterator():
|
||||||
|
while True:
|
||||||
|
yield ""
|
||||||
|
Timestamper.__init__(self, infile, iterator())
|
||||||
|
def __str__(self):
|
||||||
|
return "TimestamperNull(...)"
|
||||||
|
|
|
@ -1,41 +0,0 @@
|
||||||
argcomplete==1.12.0
|
|
||||||
CherryPy==18.6.0
|
|
||||||
coverage==5.2.1
|
|
||||||
Cython==0.29.21
|
|
||||||
decorator==4.4.2
|
|
||||||
fallocate==1.6.4
|
|
||||||
flake8==3.8.3
|
|
||||||
nose==1.3.7
|
|
||||||
numpy==1.19.1
|
|
||||||
progressbar==2.5
|
|
||||||
psutil==5.7.2
|
|
||||||
python-datetime-tz==0.5.4
|
|
||||||
python-dateutil==2.8.1
|
|
||||||
requests==2.24.0
|
|
||||||
tz==0.2.2
|
|
||||||
yappi==1.2.5
|
|
||||||
|
|
||||||
## The following requirements were added by pip freeze:
|
|
||||||
beautifulsoup4==4.9.1
|
|
||||||
certifi==2020.6.20
|
|
||||||
chardet==3.0.4
|
|
||||||
cheroot==8.4.2
|
|
||||||
idna==2.10
|
|
||||||
jaraco.classes==3.1.0
|
|
||||||
jaraco.collections==3.0.0
|
|
||||||
jaraco.functools==3.0.1
|
|
||||||
jaraco.text==3.2.0
|
|
||||||
mccabe==0.6.1
|
|
||||||
more-itertools==8.4.0
|
|
||||||
portend==2.6
|
|
||||||
pycodestyle==2.6.0
|
|
||||||
pyflakes==2.2.0
|
|
||||||
pytz==2020.1
|
|
||||||
six==1.15.0
|
|
||||||
soupsieve==2.0.1
|
|
||||||
tempora==4.0.0
|
|
||||||
urllib3==1.25.10
|
|
||||||
waitress==1.4.4
|
|
||||||
WebOb==1.8.6
|
|
||||||
WebTest==2.0.35
|
|
||||||
zc.lockfile==2.0
|
|
22
setup.cfg
22
setup.cfg
|
@ -13,6 +13,8 @@ cover-package=nilmdb
|
||||||
cover-erase=1
|
cover-erase=1
|
||||||
# this works, puts html output in cover/ dir:
|
# this works, puts html output in cover/ dir:
|
||||||
# cover-html=1
|
# cover-html=1
|
||||||
|
# need nose 1.1.3 for this:
|
||||||
|
# cover-branches=1
|
||||||
#debug=nose
|
#debug=nose
|
||||||
#debug-log=nose.log
|
#debug-log=nose.log
|
||||||
stop=1
|
stop=1
|
||||||
|
@ -37,23 +39,3 @@ tests=tests
|
||||||
#with-profile=1
|
#with-profile=1
|
||||||
#profile-sort=time
|
#profile-sort=time
|
||||||
##profile-restrict=10 # doesn't work right, treated as string or something
|
##profile-restrict=10 # doesn't work right, treated as string or something
|
||||||
|
|
||||||
[versioneer]
|
|
||||||
VCS=git
|
|
||||||
style=pep440
|
|
||||||
versionfile_source=nilmdb/_version.py
|
|
||||||
versionfile_build=nilmdb/_version.py
|
|
||||||
tag_prefix=nilmdb-
|
|
||||||
parentdir_prefix=nilmdb-
|
|
||||||
|
|
||||||
[flake8]
|
|
||||||
exclude=_version.py
|
|
||||||
extend-ignore=E731
|
|
||||||
per-file-ignores=__init__.py:F401,E402 \
|
|
||||||
serializer.py:E722 \
|
|
||||||
mustclose.py:E722 \
|
|
||||||
fsck.py:E266
|
|
||||||
|
|
||||||
[pylint]
|
|
||||||
ignore=_version.py
|
|
||||||
disable=C0103,C0111,R0913,R0914
|
|
||||||
|
|
106
setup.py
106
setup.py
|
@ -1,62 +1,134 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/python
|
||||||
|
|
||||||
# To release a new version, tag it:
|
# To release a new version, tag it:
|
||||||
# git tag -a nilmdb-1.1 -m "Version 1.1"
|
# git tag -a nilmdb-1.1 -m "Version 1.1"
|
||||||
# git push --tags
|
# git push --tags
|
||||||
# Then just package it up:
|
# Then just package it up:
|
||||||
# python3 setup.py sdist
|
# python setup.py sdist
|
||||||
|
|
||||||
|
# This is supposed to be using Distribute:
|
||||||
|
#
|
||||||
|
# distutils provides a "setup" method.
|
||||||
|
# setuptools is a set of monkeypatches on top of that.
|
||||||
|
# distribute is a particular version/implementation of setuptools.
|
||||||
|
#
|
||||||
|
# So we don't really know if this is using the old setuptools or the
|
||||||
|
# Distribute-provided version of setuptools.
|
||||||
|
|
||||||
|
import traceback
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
from setuptools import setup
|
|
||||||
|
try:
|
||||||
|
from setuptools import setup, find_packages
|
||||||
from distutils.extension import Extension
|
from distutils.extension import Extension
|
||||||
|
import distutils.version
|
||||||
|
except ImportError:
|
||||||
|
traceback.print_exc()
|
||||||
|
print "Please install the prerequisites listed in README.txt"
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
# Versioneer manages version numbers from git tags.
|
# Versioneer manages version numbers from git tags.
|
||||||
# https://github.com/warner/python-versioneer
|
# https://github.com/warner/python-versioneer
|
||||||
import versioneer
|
import versioneer
|
||||||
|
versioneer.versionfile_source = 'nilmdb/_version.py'
|
||||||
|
versioneer.versionfile_build = 'nilmdb/_version.py'
|
||||||
|
versioneer.tag_prefix = 'nilmdb-'
|
||||||
|
versioneer.parentdir_prefix = 'nilmdb-'
|
||||||
|
|
||||||
# External modules that need to be built
|
# Hack to workaround logging/multiprocessing issue:
|
||||||
ext_modules = [ Extension('nilmdb.server.rocket', ['nilmdb/server/rocket.c' ]) ]
|
# https://groups.google.com/d/msg/nose-users/fnJ-kAUbYHQ/_UsLN786ygcJ
|
||||||
|
try: import multiprocessing
|
||||||
|
except: pass
|
||||||
|
|
||||||
# Use Cython.
|
# Use Cython if it's new enough, otherwise use preexisting C files.
|
||||||
cython_modules = [ 'nilmdb.server.interval', 'nilmdb.server.rbtree' ]
|
cython_modules = [ 'nilmdb.server.interval',
|
||||||
|
'nilmdb.server.layout',
|
||||||
|
'nilmdb.server.rbtree' ]
|
||||||
|
try:
|
||||||
import Cython
|
import Cython
|
||||||
from Cython.Build import cythonize
|
from Cython.Build import cythonize
|
||||||
|
if (distutils.version.LooseVersion(Cython.__version__) <
|
||||||
|
distutils.version.LooseVersion("0.16")):
|
||||||
|
print "Cython version", Cython.__version__, "is too old; not using it."
|
||||||
|
raise ImportError()
|
||||||
|
use_cython = True
|
||||||
|
except ImportError:
|
||||||
|
use_cython = False
|
||||||
|
|
||||||
|
ext_modules = []
|
||||||
for modulename in cython_modules:
|
for modulename in cython_modules:
|
||||||
filename = modulename.replace('.','/')
|
filename = modulename.replace('.','/')
|
||||||
|
if use_cython:
|
||||||
ext_modules.extend(cythonize(filename + ".pyx"))
|
ext_modules.extend(cythonize(filename + ".pyx"))
|
||||||
|
else:
|
||||||
|
cfile = filename + ".c"
|
||||||
|
if not os.path.exists(cfile):
|
||||||
|
raise Exception("Missing source file " + cfile + ". "
|
||||||
|
"Try installing cython >= 0.16.")
|
||||||
|
ext_modules.append(Extension(modulename, [ cfile ]))
|
||||||
|
|
||||||
# Get list of requirements to use in `install_requires` below. Note
|
# We need a MANIFEST.in. Generate it here rather than polluting the
|
||||||
# that we don't make a distinction between things that are actually
|
# repository with yet another setup-related file.
|
||||||
# required for end-users vs developers (or use `test_requires` or
|
with open("MANIFEST.in", "w") as m:
|
||||||
# anything else) -- just install everything for simplicity.
|
m.write("""
|
||||||
install_requires = open('requirements.txt').readlines()
|
# Root
|
||||||
|
include README.txt
|
||||||
|
include setup.cfg
|
||||||
|
include setup.py
|
||||||
|
include versioneer.py
|
||||||
|
include Makefile
|
||||||
|
include .coveragerc
|
||||||
|
include .pylintrc
|
||||||
|
|
||||||
|
# Cython files -- include source.
|
||||||
|
recursive-include nilmdb/server *.pyx *.pyxdep *.pxd
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
recursive-include tests *.py
|
||||||
|
recursive-include tests/data *
|
||||||
|
include tests/test.order
|
||||||
|
|
||||||
|
# Docs
|
||||||
|
recursive-include docs Makefile *.md
|
||||||
|
""")
|
||||||
|
|
||||||
# Run setup
|
# Run setup
|
||||||
setup(name='nilmdb',
|
setup(name='nilmdb',
|
||||||
version = versioneer.get_version(),
|
version = versioneer.get_version(),
|
||||||
cmdclass = versioneer.get_cmdclass(),
|
cmdclass = versioneer.get_cmdclass(),
|
||||||
url = 'https://git.jim.sh/nilm/nilmdb.git',
|
url = 'https://git.jim.sh/jim/lees/nilmdb.git',
|
||||||
author = 'Jim Paris',
|
author = 'Jim Paris',
|
||||||
description = "NILM Database",
|
description = "NILM Database",
|
||||||
long_description = "NILM Database",
|
long_description = "NILM Database",
|
||||||
license = "Proprietary",
|
license = "Proprietary",
|
||||||
author_email = 'jim@jtan.com',
|
author_email = 'jim@jtan.com',
|
||||||
setup_requires = [ 'setuptools' ],
|
tests_require = [ 'nose',
|
||||||
install_requires = install_requires,
|
'coverage',
|
||||||
|
],
|
||||||
|
setup_requires = [ 'distribute',
|
||||||
|
],
|
||||||
|
install_requires = [ 'decorator',
|
||||||
|
'cherrypy >= 3.2',
|
||||||
|
'simplejson',
|
||||||
|
'pycurl',
|
||||||
|
'python-dateutil',
|
||||||
|
'pytz',
|
||||||
|
'psutil >= 0.3.0',
|
||||||
|
'requests >= 1.1.0, < 2.0.0',
|
||||||
|
],
|
||||||
packages = [ 'nilmdb',
|
packages = [ 'nilmdb',
|
||||||
'nilmdb.utils',
|
'nilmdb.utils',
|
||||||
|
'nilmdb.utils.datetime_tz',
|
||||||
'nilmdb.server',
|
'nilmdb.server',
|
||||||
'nilmdb.client',
|
'nilmdb.client',
|
||||||
'nilmdb.cmdline',
|
'nilmdb.cmdline',
|
||||||
'nilmdb.scripts',
|
'nilmdb.scripts',
|
||||||
'nilmdb.fsck',
|
|
||||||
],
|
],
|
||||||
entry_points = {
|
entry_points = {
|
||||||
'console_scripts': [
|
'console_scripts': [
|
||||||
'nilmtool = nilmdb.scripts.nilmtool:main',
|
'nilmtool = nilmdb.scripts.nilmtool:main',
|
||||||
'nilmdb-server = nilmdb.scripts.nilmdb_server:main',
|
'nilmdb-server = nilmdb.scripts.nilmdb_server:main',
|
||||||
'nilmdb-fsck = nilmdb.scripts.nilmdb_fsck:main',
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
ext_modules = ext_modules,
|
ext_modules = ext_modules,
|
||||||
|
|
|
@ -1,124 +1,124 @@
|
||||||
# path: /newton/prep
|
# path: /newton/prep
|
||||||
# layout: float32_8
|
# layout: PrepData
|
||||||
# start: Fri, 23 Mar 2012 10:00:30.000000 +0000
|
# start: Fri, 23 Mar 2012 10:00:30.000000 +0000
|
||||||
# end: Fri, 23 Mar 2012 10:00:31.000000 +0000
|
# end: Fri, 23 Mar 2012 10:00:31.000000 +0000
|
||||||
1332496830000000 2.517740e+05 2.242410e+05 5.688100e+03 1.915530e+03 9.329220e+03 4.183710e+03 1.212350e+03 2.641790e+03
|
1332496830.000000 251774.000000 224241.000000 5688.100098 1915.530029 9329.219727 4183.709961 1212.349976 2641.790039
|
||||||
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
1332496830.008333 259567.000000 222698.000000 6207.600098 678.671997 9380.230469 4575.580078 2830.610107 2688.629883
|
||||||
1332496830016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
1332496830.016667 263073.000000 223304.000000 4961.640137 2197.120117 7687.310059 4861.859863 2732.780029 3008.540039
|
||||||
1332496830025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
|
1332496830.025000 257614.000000 223323.000000 5003.660156 3525.139893 7165.310059 4685.620117 1715.380005 3440.479980
|
||||||
1332496830033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
|
1332496830.033333 255780.000000 221915.000000 6357.310059 2145.290039 8426.969727 3775.350098 1475.390015 3797.239990
|
||||||
1332496830041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
|
1332496830.041667 260166.000000 223008.000000 6702.589844 1484.959961 9288.099609 3330.830078 1228.500000 3214.320068
|
||||||
1332496830050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
|
1332496830.050000 261231.000000 226426.000000 4980.060059 2982.379883 8499.629883 4267.669922 994.088989 2292.889893
|
||||||
1332496830058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
|
1332496830.058333 255117.000000 226642.000000 4584.410156 4656.439941 7860.149902 5317.310059 1473.599976 2111.689941
|
||||||
1332496830066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
|
1332496830.066667 253300.000000 223554.000000 6455.089844 3036.649902 8869.750000 4986.310059 2607.360107 2839.590088
|
||||||
1332496830075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
|
1332496830.075000 261061.000000 221263.000000 6951.979980 1500.239990 9386.099609 3791.679932 2677.010010 3980.629883
|
||||||
1332496830083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
|
1332496830.083333 266503.000000 223198.000000 5189.609863 2594.560059 8571.530273 3175.000000 919.840027 3792.010010
|
||||||
1332496830091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
|
1332496830.091667 260692.000000 225184.000000 3782.479980 4642.879883 7662.959961 3917.790039 -251.097000 2907.060059
|
||||||
1332496830100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
|
1332496830.100000 253963.000000 225081.000000 5123.529785 3839.550049 8669.030273 4877.819824 943.723999 2527.449951
|
||||||
1332496830108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
|
1332496830.108333 256555.000000 224169.000000 5930.600098 2298.540039 8906.709961 5331.680176 2549.909912 3053.560059
|
||||||
1332496830116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
|
1332496830.116667 260889.000000 225010.000000 4681.129883 2971.870117 7900.040039 4874.080078 2322.429932 3649.120117
|
||||||
1332496830125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
|
1332496830.125000 257944.000000 224923.000000 3291.139893 4357.089844 7131.589844 4385.560059 1077.050049 3664.040039
|
||||||
1332496830133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
|
1332496830.133333 255009.000000 223018.000000 4584.819824 2864.000000 8469.490234 3625.580078 985.557007 3504.229980
|
||||||
1332496830141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
|
1332496830.141667 260114.000000 221947.000000 5676.189941 1210.339966 9393.780273 3390.239990 1654.020020 3018.699951
|
||||||
1332496830150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
|
1332496830.150000 264277.000000 224438.000000 4446.620117 2176.719971 8142.089844 4584.879883 2327.830078 2615.800049
|
||||||
1332496830158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
|
1332496830.158333 259221.000000 226471.000000 2734.439941 4182.759766 6389.549805 5540.520020 1958.880005 2720.120117
|
||||||
1332496830166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
|
1332496830.166667 252650.000000 224831.000000 4163.640137 2989.989990 7179.200195 5213.060059 1929.550049 3457.659912
|
||||||
1332496830175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
|
1332496830.175000 257083.000000 222048.000000 5759.040039 702.440979 8566.549805 3552.020020 1832.939941 3956.189941
|
||||||
1332496830183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
|
1332496830.183333 263130.000000 222967.000000 5141.140137 1166.119995 8666.959961 2720.370117 971.374023 3479.729980
|
||||||
1332496830191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
|
1332496830.191667 260236.000000 225265.000000 3425.139893 3339.080078 7853.609863 3674.949951 525.908020 2443.310059
|
||||||
1332496830200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
|
1332496830.200000 253503.000000 224527.000000 4398.129883 2927.429932 8110.279785 4842.470215 1513.869995 2467.100098
|
||||||
1332496830208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
|
1332496830.208333 256126.000000 222693.000000 6043.529785 656.223999 8797.559570 4832.410156 2832.370117 3426.139893
|
||||||
1332496830216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
|
1332496830.216667 261677.000000 223608.000000 5830.459961 1033.910034 8123.939941 3980.689941 1927.959961 4092.719971
|
||||||
1332496830225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
|
1332496830.225000 259457.000000 225536.000000 4015.570068 2995.989990 7135.439941 3713.550049 307.220001 3849.429932
|
||||||
1332496830233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
|
1332496830.233333 253352.000000 224216.000000 4650.560059 3196.620117 8131.279785 3586.159912 70.832298 3074.179932
|
||||||
1332496830241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
|
1332496830.241667 256124.000000 221513.000000 6100.479980 821.979980 9757.540039 3474.510010 1647.520020 2559.860107
|
||||||
1332496830250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
|
1332496830.250000 263024.000000 221559.000000 5789.959961 699.416992 9129.740234 4153.080078 2829.250000 2677.270020
|
||||||
1332496830258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
|
1332496830.258333 261720.000000 224015.000000 4358.500000 2645.360107 7414.109863 4810.669922 2225.989990 3185.989990
|
||||||
1332496830266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
|
1332496830.266667 254756.000000 224240.000000 4857.379883 3229.679932 7539.310059 4769.140137 1507.130005 3668.260010
|
||||||
1332496830275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
|
1332496830.275000 256889.000000 222658.000000 6473.419922 1214.109985 9010.759766 3848.729980 1303.839966 3778.500000
|
||||||
1332496830283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
|
1332496830.283333 264208.000000 223316.000000 5700.450195 1116.560059 9087.610352 3846.679932 1293.589966 2891.560059
|
||||||
1332496830291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
|
1332496830.291667 263310.000000 225719.000000 3936.120117 3252.360107 7552.850098 4897.859863 1156.630005 2037.160034
|
||||||
1332496830300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
|
1332496830.300000 255079.000000 225086.000000 4536.450195 3960.110107 7454.589844 5479.069824 1596.359985 2190.800049
|
||||||
1332496830308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
|
1332496830.308333 254487.000000 222508.000000 6635.859863 1758.849976 8732.969727 4466.970215 2650.360107 3139.310059
|
||||||
1332496830316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
|
1332496830.316667 261241.000000 222432.000000 6702.270020 1085.130005 8989.230469 3112.989990 1933.560059 3828.409912
|
||||||
1332496830325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
|
1332496830.325000 262119.000000 225587.000000 4714.950195 2892.360107 8107.819824 2961.310059 239.977997 3273.719971
|
||||||
1332496830333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
|
1332496830.333333 254999.000000 226514.000000 4532.089844 4126.899902 8200.129883 3872.590088 56.089001 2370.580078
|
||||||
1332496830341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
|
1332496830.341667 254289.000000 224033.000000 6538.810059 2251.439941 9419.429688 4564.450195 2077.810059 2508.169922
|
||||||
1332496830350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
|
1332496830.350000 261890.000000 221960.000000 6846.089844 1475.270020 9125.589844 4598.290039 3299.219971 3475.419922
|
||||||
1332496830358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
|
1332496830.358333 264502.000000 223085.000000 5066.379883 3270.560059 7933.169922 4173.709961 1908.910034 3867.459961
|
||||||
1332496830366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
|
1332496830.366667 257889.000000 223656.000000 4201.660156 4473.640137 7688.339844 4161.580078 687.578979 3653.689941
|
||||||
1332496830375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
|
1332496830.375000 254270.000000 223151.000000 5715.140137 2752.139893 9273.320312 3772.949951 896.403992 3256.060059
|
||||||
1332496830383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
|
1332496830.383333 258257.000000 224217.000000 6114.310059 1856.859985 9604.320312 4200.490234 1764.380005 2939.219971
|
||||||
1332496830391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
|
1332496830.391667 260020.000000 226868.000000 4237.529785 3605.879883 8066.220215 5430.250000 2138.580078 2696.709961
|
||||||
1332496830400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
|
1332496830.400000 255083.000000 225924.000000 3350.310059 4853.069824 7045.819824 5925.200195 1893.609985 2897.340088
|
||||||
1332496830408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
|
1332496830.408333 254453.000000 222127.000000 5271.330078 2491.500000 8436.679688 5032.080078 2436.050049 3724.590088
|
||||||
1332496830416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
|
1332496830.416667 262588.000000 219950.000000 5994.620117 789.273987 9029.650391 3515.739990 1953.569946 4014.520020
|
||||||
1332496830425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
|
1332496830.425000 265610.000000 223333.000000 4391.410156 2400.959961 8146.459961 3536.959961 530.231995 3133.919922
|
||||||
1332496830433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
|
1332496830.433333 257470.000000 226977.000000 2975.320068 4633.529785 7278.560059 4640.100098 -50.150200 2024.959961
|
||||||
1332496830441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
|
1332496830.441667 250687.000000 226331.000000 4517.859863 3183.800049 8072.600098 5281.660156 1605.140015 2335.139893
|
||||||
1332496830450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
|
1332496830.450000 255563.000000 224495.000000 5551.000000 1101.300049 8461.490234 4725.700195 2726.669922 3480.540039
|
||||||
1332496830458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
|
1332496830.458333 261335.000000 224645.000000 4764.680176 1557.020020 7833.350098 3524.810059 1577.410034 4038.620117
|
||||||
1332496830466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
|
1332496830.466667 260269.000000 224008.000000 3558.030029 2987.610107 7362.439941 3279.229980 562.442017 3786.550049
|
||||||
1332496830475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
|
1332496830.475000 257435.000000 221777.000000 4972.600098 2166.879883 8481.440430 3328.719971 1037.130005 3271.370117
|
||||||
1332496830483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
|
1332496830.483333 261046.000000 221550.000000 5816.180176 590.216980 9120.929688 3895.399902 2382.669922 2824.169922
|
||||||
1332496830491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
|
1332496830.491667 262766.000000 224473.000000 4835.049805 1785.770020 7880.759766 4745.620117 2443.659912 3229.550049
|
||||||
1332496830500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
|
1332496830.500000 256509.000000 226413.000000 3758.870117 3461.199951 6743.770020 4928.959961 1536.619995 3546.689941
|
||||||
1332496830508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
|
1332496830.508333 250793.000000 224372.000000 5218.490234 2865.260010 7803.959961 4351.089844 1333.819946 3680.489990
|
||||||
1332496830516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
|
1332496830.516667 256319.000000 222066.000000 6403.970215 732.344971 9627.759766 3089.300049 1516.780029 3653.689941
|
||||||
1332496830525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
|
1332496830.525000 263343.000000 223235.000000 5200.430176 1388.579956 9372.849609 3371.229980 1450.390015 2678.909912
|
||||||
1332496830533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
|
1332496830.533333 260903.000000 225110.000000 3722.580078 3246.659912 7876.540039 4716.810059 1498.439941 2116.520020
|
||||||
1332496830541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
|
1332496830.541667 254416.000000 223769.000000 4841.649902 2956.399902 8115.919922 5392.359863 2142.810059 2652.320068
|
||||||
1332496830550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
|
1332496830.550000 256698.000000 222172.000000 6471.229980 970.395996 8834.980469 4816.839844 2376.629883 3605.860107
|
||||||
1332496830558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
|
1332496830.558333 261841.000000 223537.000000 5500.740234 1189.660034 8365.730469 4016.469971 1042.270020 3821.199951
|
||||||
1332496830566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
|
1332496830.566667 259503.000000 225840.000000 3827.929932 3088.840088 7676.140137 3978.310059 -357.006989 3016.419922
|
||||||
1332496830575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
|
1332496830.575000 253457.000000 224636.000000 4914.609863 3097.449951 8224.900391 4321.439941 171.373993 2412.360107
|
||||||
1332496830583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
|
1332496830.583333 256029.000000 222221.000000 6841.799805 1028.500000 9252.299805 4387.569824 2418.139893 2510.100098
|
||||||
1332496830591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
|
1332496830.591667 262840.000000 222550.000000 6210.250000 1410.729980 8538.900391 4152.580078 3009.300049 3219.760010
|
||||||
1332496830600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
|
1332496830.600000 261633.000000 225065.000000 4284.529785 3357.209961 7282.169922 3823.590088 1402.839966 3644.669922
|
||||||
1332496830608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
|
1332496830.608333 254591.000000 225109.000000 4693.160156 3647.739990 7745.160156 3686.379883 490.161011 3448.860107
|
||||||
1332496830616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
|
1332496830.616667 254780.000000 223599.000000 6527.379883 1569.869995 9438.429688 3456.580078 1162.520020 3252.010010
|
||||||
1332496830625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
|
1332496830.625000 260639.000000 224107.000000 6531.049805 1633.050049 9283.719727 4174.020020 2089.550049 2775.750000
|
||||||
1332496830633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
|
1332496830.633333 261108.000000 225472.000000 4968.259766 3527.850098 7692.870117 5137.100098 2207.389893 2436.659912
|
||||||
1332496830641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
|
1332496830.641667 255775.000000 223708.000000 4963.450195 4017.370117 7701.419922 5269.649902 2284.399902 2842.080078
|
||||||
1332496830650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
|
1332496830.650000 257398.000000 220947.000000 6767.500000 1645.709961 9107.070312 4000.179932 2548.860107 3624.770020
|
||||||
1332496830658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
|
1332496830.658333 264924.000000 221559.000000 6471.459961 1110.329956 9459.650391 3108.169922 1696.969971 3893.439941
|
||||||
1332496830666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
|
1332496830.666667 265339.000000 225733.000000 4348.799805 3459.510010 8475.299805 4031.239990 573.346985 2910.270020
|
||||||
1332496830675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
|
1332496830.675000 256814.000000 226995.000000 3479.540039 4949.790039 7499.910156 5624.709961 751.656006 2347.709961
|
||||||
1332496830683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
|
1332496830.683333 253316.000000 225161.000000 5147.060059 3218.429932 8460.160156 5869.299805 2336.320068 2987.959961
|
||||||
1332496830691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
|
1332496830.691667 259360.000000 223101.000000 5549.120117 1869.949951 8740.759766 4668.939941 2457.909912 3758.820068
|
||||||
1332496830700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
|
1332496830.700000 262012.000000 224016.000000 4173.609863 3004.129883 8157.040039 3704.729980 987.963989 3652.750000
|
||||||
1332496830708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
|
1332496830.708333 257176.000000 224420.000000 3517.300049 4118.750000 7822.240234 3718.229980 37.264900 2953.679932
|
||||||
1332496830716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
|
1332496830.716667 255146.000000 223322.000000 4923.979980 2330.679932 9095.910156 3792.399902 1013.070007 2711.239990
|
||||||
1332496830725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
|
1332496830.725000 260524.000000 223651.000000 5413.629883 1146.209961 8817.169922 4419.649902 2446.649902 2832.050049
|
||||||
1332496830733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
|
1332496830.733333 262098.000000 225752.000000 4262.979980 2270.969971 7135.479980 5067.120117 2294.679932 3376.620117
|
||||||
1332496830741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
|
1332496830.741667 256889.000000 225379.000000 3606.459961 3568.189941 6552.649902 4970.270020 1516.380005 3662.570068
|
||||||
1332496830750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
|
1332496830.750000 253948.000000 222631.000000 5511.700195 2066.300049 7952.660156 4019.909912 1513.140015 3752.629883
|
||||||
1332496830758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
|
1332496830.758333 259799.000000 222067.000000 5873.500000 608.583984 9253.780273 2870.739990 1348.239990 3344.199951
|
||||||
1332496830766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
|
1332496830.766667 262547.000000 224901.000000 4346.080078 1928.099976 8590.969727 3455.459961 904.390991 2379.270020
|
||||||
1332496830775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
|
1332496830.775000 256137.000000 226761.000000 3423.560059 3379.080078 7471.149902 4894.169922 1153.540039 2031.410034
|
||||||
1332496830783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
|
1332496830.783333 250326.000000 225013.000000 5519.979980 2423.969971 7991.759766 5117.950195 2098.790039 3099.239990
|
||||||
1332496830791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
|
1332496830.791667 255454.000000 222992.000000 6547.950195 496.496002 8751.339844 3900.560059 2132.290039 4076.810059
|
||||||
1332496830800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
|
1332496830.800000 261286.000000 223489.000000 5152.850098 1501.510010 8425.610352 2888.030029 776.114014 3786.360107
|
||||||
1332496830808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
|
1332496830.808333 258969.000000 224069.000000 3832.610107 3001.979980 7979.259766 3182.310059 52.716000 2874.800049
|
||||||
1332496830816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
|
1332496830.816667 254946.000000 222035.000000 5317.879883 2139.800049 9103.139648 3955.610107 1235.170044 2394.149902
|
||||||
1332496830825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
|
1332496830.825000 258676.000000 221205.000000 6594.910156 505.343994 9423.360352 4562.470215 2913.739990 2892.350098
|
||||||
1332496830833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
|
1332496830.833333 262125.000000 223566.000000 5116.750000 1773.599976 8082.200195 4776.370117 2386.389893 3659.729980
|
||||||
1332496830841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
|
1332496830.841667 257835.000000 225918.000000 3714.300049 3477.080078 7205.370117 4554.609863 711.539001 3878.419922
|
||||||
1332496830850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
|
1332496830.850000 253660.000000 224371.000000 5022.450195 2592.429932 8277.200195 4119.370117 486.507996 3666.739990
|
||||||
1332496830858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
|
1332496830.858333 259503.000000 222061.000000 6589.950195 659.935974 9596.919922 3598.100098 1702.489990 3036.600098
|
||||||
1332496830866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
|
1332496830.866667 265495.000000 222843.000000 5541.850098 1728.430054 8459.959961 4492.000000 2231.969971 2430.620117
|
||||||
1332496830875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
|
1332496830.875000 260929.000000 224996.000000 4000.949951 3745.989990 6983.790039 5430.859863 1855.260010 2533.379883
|
||||||
1332496830883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
|
1332496830.883333 252716.000000 224335.000000 5086.560059 3401.149902 7597.970215 5196.120117 1755.719971 3079.760010
|
||||||
1332496830891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
|
1332496830.891667 254110.000000 223111.000000 6822.189941 1229.079956 9164.339844 3761.229980 1679.390015 3584.879883
|
||||||
1332496830900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
|
1332496830.900000 259969.000000 224693.000000 6183.950195 1538.500000 9222.080078 3139.169922 949.901978 3180.800049
|
||||||
1332496830908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
|
1332496830.908333 259078.000000 226913.000000 4388.890137 3694.820068 8195.019531 3933.000000 426.079987 2388.449951
|
||||||
1332496830916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
|
1332496830.916667 254563.000000 224760.000000 5168.439941 4020.939941 8450.269531 4758.910156 1458.900024 2286.429932
|
||||||
1332496830925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
|
1332496830.925000 258059.000000 221217.000000 6883.459961 1649.530029 9232.780273 4457.649902 3057.820068 3031.949951
|
||||||
1332496830933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
|
1332496830.933333 264667.000000 221177.000000 6218.509766 1645.729980 8657.179688 3663.500000 2528.280029 3978.340088
|
||||||
1332496830941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
|
1332496830.941667 262925.000000 224382.000000 4627.500000 3635.929932 7892.799805 3431.320068 604.508972 3901.370117
|
||||||
1332496830950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
|
1332496830.950000 254708.000000 225448.000000 4408.250000 4461.040039 8197.169922 3953.750000 -44.534599 3154.870117
|
||||||
1332496830958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
|
1332496830.958333 253702.000000 224635.000000 5825.770020 2577.050049 9590.049805 4569.250000 1460.270020 2785.169922
|
||||||
1332496830966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
|
1332496830.966667 260206.000000 224140.000000 5387.979980 1951.160034 8789.509766 5131.660156 2706.379883 2972.479980
|
||||||
1332496830975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
|
1332496830.975000 261240.000000 224737.000000 3860.810059 3418.310059 7414.529785 5284.520020 2271.379883 3183.149902
|
||||||
1332496830983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
|
1332496830.983333 256140.000000 223252.000000 3850.010010 3957.139893 7262.649902 4964.640137 1499.510010 3453.129883
|
||||||
1332496830991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03
|
1332496830.991667 256116.000000 221349.000000 5594.479980 2054.399902 8835.129883 3662.010010 1485.510010 3613.010010
|
||||||
|
|
|
@ -1,119 +1,119 @@
|
||||||
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
1332496830.008333 259567.000000 222698.000000 6207.600098 678.671997 9380.230469 4575.580078 2830.610107 2688.629883
|
||||||
1332496830016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
1332496830.016667 263073.000000 223304.000000 4961.640137 2197.120117 7687.310059 4861.859863 2732.780029 3008.540039
|
||||||
1332496830025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
|
1332496830.025000 257614.000000 223323.000000 5003.660156 3525.139893 7165.310059 4685.620117 1715.380005 3440.479980
|
||||||
1332496830033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
|
1332496830.033333 255780.000000 221915.000000 6357.310059 2145.290039 8426.969727 3775.350098 1475.390015 3797.239990
|
||||||
1332496830041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
|
1332496830.041667 260166.000000 223008.000000 6702.589844 1484.959961 9288.099609 3330.830078 1228.500000 3214.320068
|
||||||
1332496830050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
|
1332496830.050000 261231.000000 226426.000000 4980.060059 2982.379883 8499.629883 4267.669922 994.088989 2292.889893
|
||||||
1332496830058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
|
1332496830.058333 255117.000000 226642.000000 4584.410156 4656.439941 7860.149902 5317.310059 1473.599976 2111.689941
|
||||||
1332496830066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
|
1332496830.066667 253300.000000 223554.000000 6455.089844 3036.649902 8869.750000 4986.310059 2607.360107 2839.590088
|
||||||
1332496830075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
|
1332496830.075000 261061.000000 221263.000000 6951.979980 1500.239990 9386.099609 3791.679932 2677.010010 3980.629883
|
||||||
1332496830083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
|
1332496830.083333 266503.000000 223198.000000 5189.609863 2594.560059 8571.530273 3175.000000 919.840027 3792.010010
|
||||||
1332496830091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
|
1332496830.091667 260692.000000 225184.000000 3782.479980 4642.879883 7662.959961 3917.790039 -251.097000 2907.060059
|
||||||
1332496830100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
|
1332496830.100000 253963.000000 225081.000000 5123.529785 3839.550049 8669.030273 4877.819824 943.723999 2527.449951
|
||||||
1332496830108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
|
1332496830.108333 256555.000000 224169.000000 5930.600098 2298.540039 8906.709961 5331.680176 2549.909912 3053.560059
|
||||||
1332496830116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
|
1332496830.116667 260889.000000 225010.000000 4681.129883 2971.870117 7900.040039 4874.080078 2322.429932 3649.120117
|
||||||
1332496830125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
|
1332496830.125000 257944.000000 224923.000000 3291.139893 4357.089844 7131.589844 4385.560059 1077.050049 3664.040039
|
||||||
1332496830133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
|
1332496830.133333 255009.000000 223018.000000 4584.819824 2864.000000 8469.490234 3625.580078 985.557007 3504.229980
|
||||||
1332496830141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
|
1332496830.141667 260114.000000 221947.000000 5676.189941 1210.339966 9393.780273 3390.239990 1654.020020 3018.699951
|
||||||
1332496830150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
|
1332496830.150000 264277.000000 224438.000000 4446.620117 2176.719971 8142.089844 4584.879883 2327.830078 2615.800049
|
||||||
1332496830158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
|
1332496830.158333 259221.000000 226471.000000 2734.439941 4182.759766 6389.549805 5540.520020 1958.880005 2720.120117
|
||||||
1332496830166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
|
1332496830.166667 252650.000000 224831.000000 4163.640137 2989.989990 7179.200195 5213.060059 1929.550049 3457.659912
|
||||||
1332496830175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
|
1332496830.175000 257083.000000 222048.000000 5759.040039 702.440979 8566.549805 3552.020020 1832.939941 3956.189941
|
||||||
1332496830183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
|
1332496830.183333 263130.000000 222967.000000 5141.140137 1166.119995 8666.959961 2720.370117 971.374023 3479.729980
|
||||||
1332496830191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
|
1332496830.191667 260236.000000 225265.000000 3425.139893 3339.080078 7853.609863 3674.949951 525.908020 2443.310059
|
||||||
1332496830200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
|
1332496830.200000 253503.000000 224527.000000 4398.129883 2927.429932 8110.279785 4842.470215 1513.869995 2467.100098
|
||||||
1332496830208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
|
1332496830.208333 256126.000000 222693.000000 6043.529785 656.223999 8797.559570 4832.410156 2832.370117 3426.139893
|
||||||
1332496830216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
|
1332496830.216667 261677.000000 223608.000000 5830.459961 1033.910034 8123.939941 3980.689941 1927.959961 4092.719971
|
||||||
1332496830225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
|
1332496830.225000 259457.000000 225536.000000 4015.570068 2995.989990 7135.439941 3713.550049 307.220001 3849.429932
|
||||||
1332496830233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
|
1332496830.233333 253352.000000 224216.000000 4650.560059 3196.620117 8131.279785 3586.159912 70.832298 3074.179932
|
||||||
1332496830241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
|
1332496830.241667 256124.000000 221513.000000 6100.479980 821.979980 9757.540039 3474.510010 1647.520020 2559.860107
|
||||||
1332496830250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
|
1332496830.250000 263024.000000 221559.000000 5789.959961 699.416992 9129.740234 4153.080078 2829.250000 2677.270020
|
||||||
1332496830258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
|
1332496830.258333 261720.000000 224015.000000 4358.500000 2645.360107 7414.109863 4810.669922 2225.989990 3185.989990
|
||||||
1332496830266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
|
1332496830.266667 254756.000000 224240.000000 4857.379883 3229.679932 7539.310059 4769.140137 1507.130005 3668.260010
|
||||||
1332496830275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
|
1332496830.275000 256889.000000 222658.000000 6473.419922 1214.109985 9010.759766 3848.729980 1303.839966 3778.500000
|
||||||
1332496830283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
|
1332496830.283333 264208.000000 223316.000000 5700.450195 1116.560059 9087.610352 3846.679932 1293.589966 2891.560059
|
||||||
1332496830291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
|
1332496830.291667 263310.000000 225719.000000 3936.120117 3252.360107 7552.850098 4897.859863 1156.630005 2037.160034
|
||||||
1332496830300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
|
1332496830.300000 255079.000000 225086.000000 4536.450195 3960.110107 7454.589844 5479.069824 1596.359985 2190.800049
|
||||||
1332496830308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
|
1332496830.308333 254487.000000 222508.000000 6635.859863 1758.849976 8732.969727 4466.970215 2650.360107 3139.310059
|
||||||
1332496830316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
|
1332496830.316667 261241.000000 222432.000000 6702.270020 1085.130005 8989.230469 3112.989990 1933.560059 3828.409912
|
||||||
1332496830325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
|
1332496830.325000 262119.000000 225587.000000 4714.950195 2892.360107 8107.819824 2961.310059 239.977997 3273.719971
|
||||||
1332496830333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
|
1332496830.333333 254999.000000 226514.000000 4532.089844 4126.899902 8200.129883 3872.590088 56.089001 2370.580078
|
||||||
1332496830341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
|
1332496830.341667 254289.000000 224033.000000 6538.810059 2251.439941 9419.429688 4564.450195 2077.810059 2508.169922
|
||||||
1332496830350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
|
1332496830.350000 261890.000000 221960.000000 6846.089844 1475.270020 9125.589844 4598.290039 3299.219971 3475.419922
|
||||||
1332496830358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
|
1332496830.358333 264502.000000 223085.000000 5066.379883 3270.560059 7933.169922 4173.709961 1908.910034 3867.459961
|
||||||
1332496830366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
|
1332496830.366667 257889.000000 223656.000000 4201.660156 4473.640137 7688.339844 4161.580078 687.578979 3653.689941
|
||||||
1332496830375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
|
1332496830.375000 254270.000000 223151.000000 5715.140137 2752.139893 9273.320312 3772.949951 896.403992 3256.060059
|
||||||
1332496830383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
|
1332496830.383333 258257.000000 224217.000000 6114.310059 1856.859985 9604.320312 4200.490234 1764.380005 2939.219971
|
||||||
1332496830391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
|
1332496830.391667 260020.000000 226868.000000 4237.529785 3605.879883 8066.220215 5430.250000 2138.580078 2696.709961
|
||||||
1332496830400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
|
1332496830.400000 255083.000000 225924.000000 3350.310059 4853.069824 7045.819824 5925.200195 1893.609985 2897.340088
|
||||||
1332496830408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
|
1332496830.408333 254453.000000 222127.000000 5271.330078 2491.500000 8436.679688 5032.080078 2436.050049 3724.590088
|
||||||
1332496830416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
|
1332496830.416667 262588.000000 219950.000000 5994.620117 789.273987 9029.650391 3515.739990 1953.569946 4014.520020
|
||||||
1332496830425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
|
1332496830.425000 265610.000000 223333.000000 4391.410156 2400.959961 8146.459961 3536.959961 530.231995 3133.919922
|
||||||
1332496830433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
|
1332496830.433333 257470.000000 226977.000000 2975.320068 4633.529785 7278.560059 4640.100098 -50.150200 2024.959961
|
||||||
1332496830441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
|
1332496830.441667 250687.000000 226331.000000 4517.859863 3183.800049 8072.600098 5281.660156 1605.140015 2335.139893
|
||||||
1332496830450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
|
1332496830.450000 255563.000000 224495.000000 5551.000000 1101.300049 8461.490234 4725.700195 2726.669922 3480.540039
|
||||||
1332496830458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
|
1332496830.458333 261335.000000 224645.000000 4764.680176 1557.020020 7833.350098 3524.810059 1577.410034 4038.620117
|
||||||
1332496830466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
|
1332496830.466667 260269.000000 224008.000000 3558.030029 2987.610107 7362.439941 3279.229980 562.442017 3786.550049
|
||||||
1332496830475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
|
1332496830.475000 257435.000000 221777.000000 4972.600098 2166.879883 8481.440430 3328.719971 1037.130005 3271.370117
|
||||||
1332496830483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
|
1332496830.483333 261046.000000 221550.000000 5816.180176 590.216980 9120.929688 3895.399902 2382.669922 2824.169922
|
||||||
1332496830491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
|
1332496830.491667 262766.000000 224473.000000 4835.049805 1785.770020 7880.759766 4745.620117 2443.659912 3229.550049
|
||||||
1332496830500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
|
1332496830.500000 256509.000000 226413.000000 3758.870117 3461.199951 6743.770020 4928.959961 1536.619995 3546.689941
|
||||||
1332496830508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
|
1332496830.508333 250793.000000 224372.000000 5218.490234 2865.260010 7803.959961 4351.089844 1333.819946 3680.489990
|
||||||
1332496830516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
|
1332496830.516667 256319.000000 222066.000000 6403.970215 732.344971 9627.759766 3089.300049 1516.780029 3653.689941
|
||||||
1332496830525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
|
1332496830.525000 263343.000000 223235.000000 5200.430176 1388.579956 9372.849609 3371.229980 1450.390015 2678.909912
|
||||||
1332496830533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
|
1332496830.533333 260903.000000 225110.000000 3722.580078 3246.659912 7876.540039 4716.810059 1498.439941 2116.520020
|
||||||
1332496830541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
|
1332496830.541667 254416.000000 223769.000000 4841.649902 2956.399902 8115.919922 5392.359863 2142.810059 2652.320068
|
||||||
1332496830550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
|
1332496830.550000 256698.000000 222172.000000 6471.229980 970.395996 8834.980469 4816.839844 2376.629883 3605.860107
|
||||||
1332496830558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
|
1332496830.558333 261841.000000 223537.000000 5500.740234 1189.660034 8365.730469 4016.469971 1042.270020 3821.199951
|
||||||
1332496830566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
|
1332496830.566667 259503.000000 225840.000000 3827.929932 3088.840088 7676.140137 3978.310059 -357.006989 3016.419922
|
||||||
1332496830575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
|
1332496830.575000 253457.000000 224636.000000 4914.609863 3097.449951 8224.900391 4321.439941 171.373993 2412.360107
|
||||||
1332496830583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
|
1332496830.583333 256029.000000 222221.000000 6841.799805 1028.500000 9252.299805 4387.569824 2418.139893 2510.100098
|
||||||
1332496830591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
|
1332496830.591667 262840.000000 222550.000000 6210.250000 1410.729980 8538.900391 4152.580078 3009.300049 3219.760010
|
||||||
1332496830600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
|
1332496830.600000 261633.000000 225065.000000 4284.529785 3357.209961 7282.169922 3823.590088 1402.839966 3644.669922
|
||||||
1332496830608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
|
1332496830.608333 254591.000000 225109.000000 4693.160156 3647.739990 7745.160156 3686.379883 490.161011 3448.860107
|
||||||
1332496830616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
|
1332496830.616667 254780.000000 223599.000000 6527.379883 1569.869995 9438.429688 3456.580078 1162.520020 3252.010010
|
||||||
1332496830625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
|
1332496830.625000 260639.000000 224107.000000 6531.049805 1633.050049 9283.719727 4174.020020 2089.550049 2775.750000
|
||||||
1332496830633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
|
1332496830.633333 261108.000000 225472.000000 4968.259766 3527.850098 7692.870117 5137.100098 2207.389893 2436.659912
|
||||||
1332496830641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
|
1332496830.641667 255775.000000 223708.000000 4963.450195 4017.370117 7701.419922 5269.649902 2284.399902 2842.080078
|
||||||
1332496830650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
|
1332496830.650000 257398.000000 220947.000000 6767.500000 1645.709961 9107.070312 4000.179932 2548.860107 3624.770020
|
||||||
1332496830658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
|
1332496830.658333 264924.000000 221559.000000 6471.459961 1110.329956 9459.650391 3108.169922 1696.969971 3893.439941
|
||||||
1332496830666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
|
1332496830.666667 265339.000000 225733.000000 4348.799805 3459.510010 8475.299805 4031.239990 573.346985 2910.270020
|
||||||
1332496830675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
|
1332496830.675000 256814.000000 226995.000000 3479.540039 4949.790039 7499.910156 5624.709961 751.656006 2347.709961
|
||||||
1332496830683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
|
1332496830.683333 253316.000000 225161.000000 5147.060059 3218.429932 8460.160156 5869.299805 2336.320068 2987.959961
|
||||||
1332496830691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
|
1332496830.691667 259360.000000 223101.000000 5549.120117 1869.949951 8740.759766 4668.939941 2457.909912 3758.820068
|
||||||
1332496830700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
|
1332496830.700000 262012.000000 224016.000000 4173.609863 3004.129883 8157.040039 3704.729980 987.963989 3652.750000
|
||||||
1332496830708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
|
1332496830.708333 257176.000000 224420.000000 3517.300049 4118.750000 7822.240234 3718.229980 37.264900 2953.679932
|
||||||
1332496830716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
|
1332496830.716667 255146.000000 223322.000000 4923.979980 2330.679932 9095.910156 3792.399902 1013.070007 2711.239990
|
||||||
1332496830725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
|
1332496830.725000 260524.000000 223651.000000 5413.629883 1146.209961 8817.169922 4419.649902 2446.649902 2832.050049
|
||||||
1332496830733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
|
1332496830.733333 262098.000000 225752.000000 4262.979980 2270.969971 7135.479980 5067.120117 2294.679932 3376.620117
|
||||||
1332496830741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
|
1332496830.741667 256889.000000 225379.000000 3606.459961 3568.189941 6552.649902 4970.270020 1516.380005 3662.570068
|
||||||
1332496830750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
|
1332496830.750000 253948.000000 222631.000000 5511.700195 2066.300049 7952.660156 4019.909912 1513.140015 3752.629883
|
||||||
1332496830758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
|
1332496830.758333 259799.000000 222067.000000 5873.500000 608.583984 9253.780273 2870.739990 1348.239990 3344.199951
|
||||||
1332496830766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
|
1332496830.766667 262547.000000 224901.000000 4346.080078 1928.099976 8590.969727 3455.459961 904.390991 2379.270020
|
||||||
1332496830775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
|
1332496830.775000 256137.000000 226761.000000 3423.560059 3379.080078 7471.149902 4894.169922 1153.540039 2031.410034
|
||||||
1332496830783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
|
1332496830.783333 250326.000000 225013.000000 5519.979980 2423.969971 7991.759766 5117.950195 2098.790039 3099.239990
|
||||||
1332496830791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
|
1332496830.791667 255454.000000 222992.000000 6547.950195 496.496002 8751.339844 3900.560059 2132.290039 4076.810059
|
||||||
1332496830800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
|
1332496830.800000 261286.000000 223489.000000 5152.850098 1501.510010 8425.610352 2888.030029 776.114014 3786.360107
|
||||||
1332496830808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
|
1332496830.808333 258969.000000 224069.000000 3832.610107 3001.979980 7979.259766 3182.310059 52.716000 2874.800049
|
||||||
1332496830816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
|
1332496830.816667 254946.000000 222035.000000 5317.879883 2139.800049 9103.139648 3955.610107 1235.170044 2394.149902
|
||||||
1332496830825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
|
1332496830.825000 258676.000000 221205.000000 6594.910156 505.343994 9423.360352 4562.470215 2913.739990 2892.350098
|
||||||
1332496830833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
|
1332496830.833333 262125.000000 223566.000000 5116.750000 1773.599976 8082.200195 4776.370117 2386.389893 3659.729980
|
||||||
1332496830841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
|
1332496830.841667 257835.000000 225918.000000 3714.300049 3477.080078 7205.370117 4554.609863 711.539001 3878.419922
|
||||||
1332496830850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
|
1332496830.850000 253660.000000 224371.000000 5022.450195 2592.429932 8277.200195 4119.370117 486.507996 3666.739990
|
||||||
1332496830858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
|
1332496830.858333 259503.000000 222061.000000 6589.950195 659.935974 9596.919922 3598.100098 1702.489990 3036.600098
|
||||||
1332496830866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
|
1332496830.866667 265495.000000 222843.000000 5541.850098 1728.430054 8459.959961 4492.000000 2231.969971 2430.620117
|
||||||
1332496830875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
|
1332496830.875000 260929.000000 224996.000000 4000.949951 3745.989990 6983.790039 5430.859863 1855.260010 2533.379883
|
||||||
1332496830883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
|
1332496830.883333 252716.000000 224335.000000 5086.560059 3401.149902 7597.970215 5196.120117 1755.719971 3079.760010
|
||||||
1332496830891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
|
1332496830.891667 254110.000000 223111.000000 6822.189941 1229.079956 9164.339844 3761.229980 1679.390015 3584.879883
|
||||||
1332496830900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
|
1332496830.900000 259969.000000 224693.000000 6183.950195 1538.500000 9222.080078 3139.169922 949.901978 3180.800049
|
||||||
1332496830908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
|
1332496830.908333 259078.000000 226913.000000 4388.890137 3694.820068 8195.019531 3933.000000 426.079987 2388.449951
|
||||||
1332496830916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
|
1332496830.916667 254563.000000 224760.000000 5168.439941 4020.939941 8450.269531 4758.910156 1458.900024 2286.429932
|
||||||
1332496830925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
|
1332496830.925000 258059.000000 221217.000000 6883.459961 1649.530029 9232.780273 4457.649902 3057.820068 3031.949951
|
||||||
1332496830933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
|
1332496830.933333 264667.000000 221177.000000 6218.509766 1645.729980 8657.179688 3663.500000 2528.280029 3978.340088
|
||||||
1332496830941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
|
1332496830.941667 262925.000000 224382.000000 4627.500000 3635.929932 7892.799805 3431.320068 604.508972 3901.370117
|
||||||
1332496830950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
|
1332496830.950000 254708.000000 225448.000000 4408.250000 4461.040039 8197.169922 3953.750000 -44.534599 3154.870117
|
||||||
1332496830958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
|
1332496830.958333 253702.000000 224635.000000 5825.770020 2577.050049 9590.049805 4569.250000 1460.270020 2785.169922
|
||||||
1332496830966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
|
1332496830.966667 260206.000000 224140.000000 5387.979980 1951.160034 8789.509766 5131.660156 2706.379883 2972.479980
|
||||||
1332496830975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
|
1332496830.975000 261240.000000 224737.000000 3860.810059 3418.310059 7414.529785 5284.520020 2271.379883 3183.149902
|
||||||
1332496830983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
|
1332496830.983333 256140.000000 223252.000000 3850.010010 3957.139893 7262.649902 4964.640137 1499.510010 3453.129883
|
||||||
1332496830991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03
|
1332496830.991667 256116.000000 221349.000000 5594.479980 2054.399902 8835.129883 3662.010010 1485.510010 3613.010010
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
1332496830.008333 259567.000000 222698.000000 6207.600098 678.671997 9380.230469 4575.580078 2830.610107 2688.629883
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
1332496830.008333 259567.000000 222698.000000 6207.600098 678.671997 9380.230469 4575.580078 2830.610107 2688.629883
|
||||||
1332496830016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
1332496830.016667 263073.000000 223304.000000 4961.640137 2197.120117 7687.310059 4861.859863 2732.780029 3008.540039
|
||||||
|
|
|
@ -1,124 +1,124 @@
|
||||||
# path: /newton/prep
|
# path: /newton/prep
|
||||||
# layout: float32_8
|
# layout: PrepData
|
||||||
# start: Fri, 23 Mar 2012 10:00:30.000000 +0000
|
# start: Fri, 23 Mar 2012 10:00:30.000000 +0000
|
||||||
# end: Fri, 23 Mar 2012 10:00:31.000000 +0000
|
# end: Fri, 23 Mar 2012 10:00:31.000000 +0000
|
||||||
2.517740e+05 2.242410e+05 5.688100e+03 1.915530e+03 9.329220e+03 4.183710e+03 1.212350e+03 2.641790e+03
|
251774.000000 224241.000000 5688.100098 1915.530029 9329.219727 4183.709961 1212.349976 2641.790039
|
||||||
2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
259567.000000 222698.000000 6207.600098 678.671997 9380.230469 4575.580078 2830.610107 2688.629883
|
||||||
2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
263073.000000 223304.000000 4961.640137 2197.120117 7687.310059 4861.859863 2732.780029 3008.540039
|
||||||
2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
|
257614.000000 223323.000000 5003.660156 3525.139893 7165.310059 4685.620117 1715.380005 3440.479980
|
||||||
2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
|
255780.000000 221915.000000 6357.310059 2145.290039 8426.969727 3775.350098 1475.390015 3797.239990
|
||||||
2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
|
260166.000000 223008.000000 6702.589844 1484.959961 9288.099609 3330.830078 1228.500000 3214.320068
|
||||||
2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
|
261231.000000 226426.000000 4980.060059 2982.379883 8499.629883 4267.669922 994.088989 2292.889893
|
||||||
2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
|
255117.000000 226642.000000 4584.410156 4656.439941 7860.149902 5317.310059 1473.599976 2111.689941
|
||||||
2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
|
253300.000000 223554.000000 6455.089844 3036.649902 8869.750000 4986.310059 2607.360107 2839.590088
|
||||||
2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
|
261061.000000 221263.000000 6951.979980 1500.239990 9386.099609 3791.679932 2677.010010 3980.629883
|
||||||
2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
|
266503.000000 223198.000000 5189.609863 2594.560059 8571.530273 3175.000000 919.840027 3792.010010
|
||||||
2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
|
260692.000000 225184.000000 3782.479980 4642.879883 7662.959961 3917.790039 -251.097000 2907.060059
|
||||||
2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
|
253963.000000 225081.000000 5123.529785 3839.550049 8669.030273 4877.819824 943.723999 2527.449951
|
||||||
2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
|
256555.000000 224169.000000 5930.600098 2298.540039 8906.709961 5331.680176 2549.909912 3053.560059
|
||||||
2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
|
260889.000000 225010.000000 4681.129883 2971.870117 7900.040039 4874.080078 2322.429932 3649.120117
|
||||||
2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
|
257944.000000 224923.000000 3291.139893 4357.089844 7131.589844 4385.560059 1077.050049 3664.040039
|
||||||
2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
|
255009.000000 223018.000000 4584.819824 2864.000000 8469.490234 3625.580078 985.557007 3504.229980
|
||||||
2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
|
260114.000000 221947.000000 5676.189941 1210.339966 9393.780273 3390.239990 1654.020020 3018.699951
|
||||||
2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
|
264277.000000 224438.000000 4446.620117 2176.719971 8142.089844 4584.879883 2327.830078 2615.800049
|
||||||
2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
|
259221.000000 226471.000000 2734.439941 4182.759766 6389.549805 5540.520020 1958.880005 2720.120117
|
||||||
2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
|
252650.000000 224831.000000 4163.640137 2989.989990 7179.200195 5213.060059 1929.550049 3457.659912
|
||||||
2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
|
257083.000000 222048.000000 5759.040039 702.440979 8566.549805 3552.020020 1832.939941 3956.189941
|
||||||
2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
|
263130.000000 222967.000000 5141.140137 1166.119995 8666.959961 2720.370117 971.374023 3479.729980
|
||||||
2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
|
260236.000000 225265.000000 3425.139893 3339.080078 7853.609863 3674.949951 525.908020 2443.310059
|
||||||
2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
|
253503.000000 224527.000000 4398.129883 2927.429932 8110.279785 4842.470215 1513.869995 2467.100098
|
||||||
2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
|
256126.000000 222693.000000 6043.529785 656.223999 8797.559570 4832.410156 2832.370117 3426.139893
|
||||||
2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
|
261677.000000 223608.000000 5830.459961 1033.910034 8123.939941 3980.689941 1927.959961 4092.719971
|
||||||
2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
|
259457.000000 225536.000000 4015.570068 2995.989990 7135.439941 3713.550049 307.220001 3849.429932
|
||||||
2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
|
253352.000000 224216.000000 4650.560059 3196.620117 8131.279785 3586.159912 70.832298 3074.179932
|
||||||
2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
|
256124.000000 221513.000000 6100.479980 821.979980 9757.540039 3474.510010 1647.520020 2559.860107
|
||||||
2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
|
263024.000000 221559.000000 5789.959961 699.416992 9129.740234 4153.080078 2829.250000 2677.270020
|
||||||
2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
|
261720.000000 224015.000000 4358.500000 2645.360107 7414.109863 4810.669922 2225.989990 3185.989990
|
||||||
2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
|
254756.000000 224240.000000 4857.379883 3229.679932 7539.310059 4769.140137 1507.130005 3668.260010
|
||||||
2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
|
256889.000000 222658.000000 6473.419922 1214.109985 9010.759766 3848.729980 1303.839966 3778.500000
|
||||||
2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
|
264208.000000 223316.000000 5700.450195 1116.560059 9087.610352 3846.679932 1293.589966 2891.560059
|
||||||
2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
|
263310.000000 225719.000000 3936.120117 3252.360107 7552.850098 4897.859863 1156.630005 2037.160034
|
||||||
2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
|
255079.000000 225086.000000 4536.450195 3960.110107 7454.589844 5479.069824 1596.359985 2190.800049
|
||||||
2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
|
254487.000000 222508.000000 6635.859863 1758.849976 8732.969727 4466.970215 2650.360107 3139.310059
|
||||||
2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
|
261241.000000 222432.000000 6702.270020 1085.130005 8989.230469 3112.989990 1933.560059 3828.409912
|
||||||
2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
|
262119.000000 225587.000000 4714.950195 2892.360107 8107.819824 2961.310059 239.977997 3273.719971
|
||||||
2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
|
254999.000000 226514.000000 4532.089844 4126.899902 8200.129883 3872.590088 56.089001 2370.580078
|
||||||
2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
|
254289.000000 224033.000000 6538.810059 2251.439941 9419.429688 4564.450195 2077.810059 2508.169922
|
||||||
2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
|
261890.000000 221960.000000 6846.089844 1475.270020 9125.589844 4598.290039 3299.219971 3475.419922
|
||||||
2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
|
264502.000000 223085.000000 5066.379883 3270.560059 7933.169922 4173.709961 1908.910034 3867.459961
|
||||||
2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
|
257889.000000 223656.000000 4201.660156 4473.640137 7688.339844 4161.580078 687.578979 3653.689941
|
||||||
2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
|
254270.000000 223151.000000 5715.140137 2752.139893 9273.320312 3772.949951 896.403992 3256.060059
|
||||||
2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
|
258257.000000 224217.000000 6114.310059 1856.859985 9604.320312 4200.490234 1764.380005 2939.219971
|
||||||
2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
|
260020.000000 226868.000000 4237.529785 3605.879883 8066.220215 5430.250000 2138.580078 2696.709961
|
||||||
2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
|
255083.000000 225924.000000 3350.310059 4853.069824 7045.819824 5925.200195 1893.609985 2897.340088
|
||||||
2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
|
254453.000000 222127.000000 5271.330078 2491.500000 8436.679688 5032.080078 2436.050049 3724.590088
|
||||||
2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
|
262588.000000 219950.000000 5994.620117 789.273987 9029.650391 3515.739990 1953.569946 4014.520020
|
||||||
2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
|
265610.000000 223333.000000 4391.410156 2400.959961 8146.459961 3536.959961 530.231995 3133.919922
|
||||||
2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
|
257470.000000 226977.000000 2975.320068 4633.529785 7278.560059 4640.100098 -50.150200 2024.959961
|
||||||
2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
|
250687.000000 226331.000000 4517.859863 3183.800049 8072.600098 5281.660156 1605.140015 2335.139893
|
||||||
2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
|
255563.000000 224495.000000 5551.000000 1101.300049 8461.490234 4725.700195 2726.669922 3480.540039
|
||||||
2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
|
261335.000000 224645.000000 4764.680176 1557.020020 7833.350098 3524.810059 1577.410034 4038.620117
|
||||||
2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
|
260269.000000 224008.000000 3558.030029 2987.610107 7362.439941 3279.229980 562.442017 3786.550049
|
||||||
2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
|
257435.000000 221777.000000 4972.600098 2166.879883 8481.440430 3328.719971 1037.130005 3271.370117
|
||||||
2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
|
261046.000000 221550.000000 5816.180176 590.216980 9120.929688 3895.399902 2382.669922 2824.169922
|
||||||
2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
|
262766.000000 224473.000000 4835.049805 1785.770020 7880.759766 4745.620117 2443.659912 3229.550049
|
||||||
2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
|
256509.000000 226413.000000 3758.870117 3461.199951 6743.770020 4928.959961 1536.619995 3546.689941
|
||||||
2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
|
250793.000000 224372.000000 5218.490234 2865.260010 7803.959961 4351.089844 1333.819946 3680.489990
|
||||||
2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
|
256319.000000 222066.000000 6403.970215 732.344971 9627.759766 3089.300049 1516.780029 3653.689941
|
||||||
2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
|
263343.000000 223235.000000 5200.430176 1388.579956 9372.849609 3371.229980 1450.390015 2678.909912
|
||||||
2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
|
260903.000000 225110.000000 3722.580078 3246.659912 7876.540039 4716.810059 1498.439941 2116.520020
|
||||||
2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
|
254416.000000 223769.000000 4841.649902 2956.399902 8115.919922 5392.359863 2142.810059 2652.320068
|
||||||
2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
|
256698.000000 222172.000000 6471.229980 970.395996 8834.980469 4816.839844 2376.629883 3605.860107
|
||||||
2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
|
261841.000000 223537.000000 5500.740234 1189.660034 8365.730469 4016.469971 1042.270020 3821.199951
|
||||||
2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
|
259503.000000 225840.000000 3827.929932 3088.840088 7676.140137 3978.310059 -357.006989 3016.419922
|
||||||
2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
|
253457.000000 224636.000000 4914.609863 3097.449951 8224.900391 4321.439941 171.373993 2412.360107
|
||||||
2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
|
256029.000000 222221.000000 6841.799805 1028.500000 9252.299805 4387.569824 2418.139893 2510.100098
|
||||||
2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
|
262840.000000 222550.000000 6210.250000 1410.729980 8538.900391 4152.580078 3009.300049 3219.760010
|
||||||
2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
|
261633.000000 225065.000000 4284.529785 3357.209961 7282.169922 3823.590088 1402.839966 3644.669922
|
||||||
2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
|
254591.000000 225109.000000 4693.160156 3647.739990 7745.160156 3686.379883 490.161011 3448.860107
|
||||||
2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
|
254780.000000 223599.000000 6527.379883 1569.869995 9438.429688 3456.580078 1162.520020 3252.010010
|
||||||
2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
|
260639.000000 224107.000000 6531.049805 1633.050049 9283.719727 4174.020020 2089.550049 2775.750000
|
||||||
2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
|
261108.000000 225472.000000 4968.259766 3527.850098 7692.870117 5137.100098 2207.389893 2436.659912
|
||||||
2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
|
255775.000000 223708.000000 4963.450195 4017.370117 7701.419922 5269.649902 2284.399902 2842.080078
|
||||||
2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
|
257398.000000 220947.000000 6767.500000 1645.709961 9107.070312 4000.179932 2548.860107 3624.770020
|
||||||
2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
|
264924.000000 221559.000000 6471.459961 1110.329956 9459.650391 3108.169922 1696.969971 3893.439941
|
||||||
2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
|
265339.000000 225733.000000 4348.799805 3459.510010 8475.299805 4031.239990 573.346985 2910.270020
|
||||||
2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
|
256814.000000 226995.000000 3479.540039 4949.790039 7499.910156 5624.709961 751.656006 2347.709961
|
||||||
2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
|
253316.000000 225161.000000 5147.060059 3218.429932 8460.160156 5869.299805 2336.320068 2987.959961
|
||||||
2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
|
259360.000000 223101.000000 5549.120117 1869.949951 8740.759766 4668.939941 2457.909912 3758.820068
|
||||||
2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
|
262012.000000 224016.000000 4173.609863 3004.129883 8157.040039 3704.729980 987.963989 3652.750000
|
||||||
2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
|
257176.000000 224420.000000 3517.300049 4118.750000 7822.240234 3718.229980 37.264900 2953.679932
|
||||||
2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
|
255146.000000 223322.000000 4923.979980 2330.679932 9095.910156 3792.399902 1013.070007 2711.239990
|
||||||
2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
|
260524.000000 223651.000000 5413.629883 1146.209961 8817.169922 4419.649902 2446.649902 2832.050049
|
||||||
2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
|
262098.000000 225752.000000 4262.979980 2270.969971 7135.479980 5067.120117 2294.679932 3376.620117
|
||||||
2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
|
256889.000000 225379.000000 3606.459961 3568.189941 6552.649902 4970.270020 1516.380005 3662.570068
|
||||||
2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
|
253948.000000 222631.000000 5511.700195 2066.300049 7952.660156 4019.909912 1513.140015 3752.629883
|
||||||
2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
|
259799.000000 222067.000000 5873.500000 608.583984 9253.780273 2870.739990 1348.239990 3344.199951
|
||||||
2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
|
262547.000000 224901.000000 4346.080078 1928.099976 8590.969727 3455.459961 904.390991 2379.270020
|
||||||
2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
|
256137.000000 226761.000000 3423.560059 3379.080078 7471.149902 4894.169922 1153.540039 2031.410034
|
||||||
2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
|
250326.000000 225013.000000 5519.979980 2423.969971 7991.759766 5117.950195 2098.790039 3099.239990
|
||||||
2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
|
255454.000000 222992.000000 6547.950195 496.496002 8751.339844 3900.560059 2132.290039 4076.810059
|
||||||
2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
|
261286.000000 223489.000000 5152.850098 1501.510010 8425.610352 2888.030029 776.114014 3786.360107
|
||||||
2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
|
258969.000000 224069.000000 3832.610107 3001.979980 7979.259766 3182.310059 52.716000 2874.800049
|
||||||
2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
|
254946.000000 222035.000000 5317.879883 2139.800049 9103.139648 3955.610107 1235.170044 2394.149902
|
||||||
2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
|
258676.000000 221205.000000 6594.910156 505.343994 9423.360352 4562.470215 2913.739990 2892.350098
|
||||||
2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
|
262125.000000 223566.000000 5116.750000 1773.599976 8082.200195 4776.370117 2386.389893 3659.729980
|
||||||
2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
|
257835.000000 225918.000000 3714.300049 3477.080078 7205.370117 4554.609863 711.539001 3878.419922
|
||||||
2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
|
253660.000000 224371.000000 5022.450195 2592.429932 8277.200195 4119.370117 486.507996 3666.739990
|
||||||
2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
|
259503.000000 222061.000000 6589.950195 659.935974 9596.919922 3598.100098 1702.489990 3036.600098
|
||||||
2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
|
265495.000000 222843.000000 5541.850098 1728.430054 8459.959961 4492.000000 2231.969971 2430.620117
|
||||||
2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
|
260929.000000 224996.000000 4000.949951 3745.989990 6983.790039 5430.859863 1855.260010 2533.379883
|
||||||
2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
|
252716.000000 224335.000000 5086.560059 3401.149902 7597.970215 5196.120117 1755.719971 3079.760010
|
||||||
2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
|
254110.000000 223111.000000 6822.189941 1229.079956 9164.339844 3761.229980 1679.390015 3584.879883
|
||||||
2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
|
259969.000000 224693.000000 6183.950195 1538.500000 9222.080078 3139.169922 949.901978 3180.800049
|
||||||
2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
|
259078.000000 226913.000000 4388.890137 3694.820068 8195.019531 3933.000000 426.079987 2388.449951
|
||||||
2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
|
254563.000000 224760.000000 5168.439941 4020.939941 8450.269531 4758.910156 1458.900024 2286.429932
|
||||||
2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
|
258059.000000 221217.000000 6883.459961 1649.530029 9232.780273 4457.649902 3057.820068 3031.949951
|
||||||
2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
|
264667.000000 221177.000000 6218.509766 1645.729980 8657.179688 3663.500000 2528.280029 3978.340088
|
||||||
2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
|
262925.000000 224382.000000 4627.500000 3635.929932 7892.799805 3431.320068 604.508972 3901.370117
|
||||||
2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
|
254708.000000 225448.000000 4408.250000 4461.040039 8197.169922 3953.750000 -44.534599 3154.870117
|
||||||
2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
|
253702.000000 224635.000000 5825.770020 2577.050049 9590.049805 4569.250000 1460.270020 2785.169922
|
||||||
2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
|
260206.000000 224140.000000 5387.979980 1951.160034 8789.509766 5131.660156 2706.379883 2972.479980
|
||||||
2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
|
261240.000000 224737.000000 3860.810059 3418.310059 7414.529785 5284.520020 2271.379883 3183.149902
|
||||||
2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
|
256140.000000 223252.000000 3850.010010 3957.139893 7262.649902 4964.640137 1499.510010 3453.129883
|
||||||
2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03
|
256116.000000 221349.000000 5594.479980 2054.399902 8835.129883 3662.010010 1485.510010 3613.010010
|
||||||
|
|
|
@ -1,120 +1,120 @@
|
||||||
2.517740e+05 2.242410e+05 5.688100e+03 1.915530e+03 9.329220e+03 4.183710e+03 1.212350e+03 2.641790e+03
|
251774.000000 224241.000000 5688.100098 1915.530029 9329.219727 4183.709961 1212.349976 2641.790039
|
||||||
2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
259567.000000 222698.000000 6207.600098 678.671997 9380.230469 4575.580078 2830.610107 2688.629883
|
||||||
2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
263073.000000 223304.000000 4961.640137 2197.120117 7687.310059 4861.859863 2732.780029 3008.540039
|
||||||
2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
|
257614.000000 223323.000000 5003.660156 3525.139893 7165.310059 4685.620117 1715.380005 3440.479980
|
||||||
2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
|
255780.000000 221915.000000 6357.310059 2145.290039 8426.969727 3775.350098 1475.390015 3797.239990
|
||||||
2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
|
260166.000000 223008.000000 6702.589844 1484.959961 9288.099609 3330.830078 1228.500000 3214.320068
|
||||||
2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
|
261231.000000 226426.000000 4980.060059 2982.379883 8499.629883 4267.669922 994.088989 2292.889893
|
||||||
2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
|
255117.000000 226642.000000 4584.410156 4656.439941 7860.149902 5317.310059 1473.599976 2111.689941
|
||||||
2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
|
253300.000000 223554.000000 6455.089844 3036.649902 8869.750000 4986.310059 2607.360107 2839.590088
|
||||||
2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
|
261061.000000 221263.000000 6951.979980 1500.239990 9386.099609 3791.679932 2677.010010 3980.629883
|
||||||
2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
|
266503.000000 223198.000000 5189.609863 2594.560059 8571.530273 3175.000000 919.840027 3792.010010
|
||||||
2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
|
260692.000000 225184.000000 3782.479980 4642.879883 7662.959961 3917.790039 -251.097000 2907.060059
|
||||||
2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
|
253963.000000 225081.000000 5123.529785 3839.550049 8669.030273 4877.819824 943.723999 2527.449951
|
||||||
2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
|
256555.000000 224169.000000 5930.600098 2298.540039 8906.709961 5331.680176 2549.909912 3053.560059
|
||||||
2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
|
260889.000000 225010.000000 4681.129883 2971.870117 7900.040039 4874.080078 2322.429932 3649.120117
|
||||||
2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
|
257944.000000 224923.000000 3291.139893 4357.089844 7131.589844 4385.560059 1077.050049 3664.040039
|
||||||
2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
|
255009.000000 223018.000000 4584.819824 2864.000000 8469.490234 3625.580078 985.557007 3504.229980
|
||||||
2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
|
260114.000000 221947.000000 5676.189941 1210.339966 9393.780273 3390.239990 1654.020020 3018.699951
|
||||||
2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
|
264277.000000 224438.000000 4446.620117 2176.719971 8142.089844 4584.879883 2327.830078 2615.800049
|
||||||
2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
|
259221.000000 226471.000000 2734.439941 4182.759766 6389.549805 5540.520020 1958.880005 2720.120117
|
||||||
2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
|
252650.000000 224831.000000 4163.640137 2989.989990 7179.200195 5213.060059 1929.550049 3457.659912
|
||||||
2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
|
257083.000000 222048.000000 5759.040039 702.440979 8566.549805 3552.020020 1832.939941 3956.189941
|
||||||
2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
|
263130.000000 222967.000000 5141.140137 1166.119995 8666.959961 2720.370117 971.374023 3479.729980
|
||||||
2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
|
260236.000000 225265.000000 3425.139893 3339.080078 7853.609863 3674.949951 525.908020 2443.310059
|
||||||
2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
|
253503.000000 224527.000000 4398.129883 2927.429932 8110.279785 4842.470215 1513.869995 2467.100098
|
||||||
2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
|
256126.000000 222693.000000 6043.529785 656.223999 8797.559570 4832.410156 2832.370117 3426.139893
|
||||||
2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
|
261677.000000 223608.000000 5830.459961 1033.910034 8123.939941 3980.689941 1927.959961 4092.719971
|
||||||
2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
|
259457.000000 225536.000000 4015.570068 2995.989990 7135.439941 3713.550049 307.220001 3849.429932
|
||||||
2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
|
253352.000000 224216.000000 4650.560059 3196.620117 8131.279785 3586.159912 70.832298 3074.179932
|
||||||
2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
|
256124.000000 221513.000000 6100.479980 821.979980 9757.540039 3474.510010 1647.520020 2559.860107
|
||||||
2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
|
263024.000000 221559.000000 5789.959961 699.416992 9129.740234 4153.080078 2829.250000 2677.270020
|
||||||
2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
|
261720.000000 224015.000000 4358.500000 2645.360107 7414.109863 4810.669922 2225.989990 3185.989990
|
||||||
2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
|
254756.000000 224240.000000 4857.379883 3229.679932 7539.310059 4769.140137 1507.130005 3668.260010
|
||||||
2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
|
256889.000000 222658.000000 6473.419922 1214.109985 9010.759766 3848.729980 1303.839966 3778.500000
|
||||||
2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
|
264208.000000 223316.000000 5700.450195 1116.560059 9087.610352 3846.679932 1293.589966 2891.560059
|
||||||
2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
|
263310.000000 225719.000000 3936.120117 3252.360107 7552.850098 4897.859863 1156.630005 2037.160034
|
||||||
2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
|
255079.000000 225086.000000 4536.450195 3960.110107 7454.589844 5479.069824 1596.359985 2190.800049
|
||||||
2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
|
254487.000000 222508.000000 6635.859863 1758.849976 8732.969727 4466.970215 2650.360107 3139.310059
|
||||||
2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
|
261241.000000 222432.000000 6702.270020 1085.130005 8989.230469 3112.989990 1933.560059 3828.409912
|
||||||
2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
|
262119.000000 225587.000000 4714.950195 2892.360107 8107.819824 2961.310059 239.977997 3273.719971
|
||||||
2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
|
254999.000000 226514.000000 4532.089844 4126.899902 8200.129883 3872.590088 56.089001 2370.580078
|
||||||
2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
|
254289.000000 224033.000000 6538.810059 2251.439941 9419.429688 4564.450195 2077.810059 2508.169922
|
||||||
2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
|
261890.000000 221960.000000 6846.089844 1475.270020 9125.589844 4598.290039 3299.219971 3475.419922
|
||||||
2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
|
264502.000000 223085.000000 5066.379883 3270.560059 7933.169922 4173.709961 1908.910034 3867.459961
|
||||||
2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
|
257889.000000 223656.000000 4201.660156 4473.640137 7688.339844 4161.580078 687.578979 3653.689941
|
||||||
2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
|
254270.000000 223151.000000 5715.140137 2752.139893 9273.320312 3772.949951 896.403992 3256.060059
|
||||||
2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
|
258257.000000 224217.000000 6114.310059 1856.859985 9604.320312 4200.490234 1764.380005 2939.219971
|
||||||
2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
|
260020.000000 226868.000000 4237.529785 3605.879883 8066.220215 5430.250000 2138.580078 2696.709961
|
||||||
2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
|
255083.000000 225924.000000 3350.310059 4853.069824 7045.819824 5925.200195 1893.609985 2897.340088
|
||||||
2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
|
254453.000000 222127.000000 5271.330078 2491.500000 8436.679688 5032.080078 2436.050049 3724.590088
|
||||||
2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
|
262588.000000 219950.000000 5994.620117 789.273987 9029.650391 3515.739990 1953.569946 4014.520020
|
||||||
2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
|
265610.000000 223333.000000 4391.410156 2400.959961 8146.459961 3536.959961 530.231995 3133.919922
|
||||||
2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
|
257470.000000 226977.000000 2975.320068 4633.529785 7278.560059 4640.100098 -50.150200 2024.959961
|
||||||
2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
|
250687.000000 226331.000000 4517.859863 3183.800049 8072.600098 5281.660156 1605.140015 2335.139893
|
||||||
2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
|
255563.000000 224495.000000 5551.000000 1101.300049 8461.490234 4725.700195 2726.669922 3480.540039
|
||||||
2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
|
261335.000000 224645.000000 4764.680176 1557.020020 7833.350098 3524.810059 1577.410034 4038.620117
|
||||||
2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
|
260269.000000 224008.000000 3558.030029 2987.610107 7362.439941 3279.229980 562.442017 3786.550049
|
||||||
2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
|
257435.000000 221777.000000 4972.600098 2166.879883 8481.440430 3328.719971 1037.130005 3271.370117
|
||||||
2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
|
261046.000000 221550.000000 5816.180176 590.216980 9120.929688 3895.399902 2382.669922 2824.169922
|
||||||
2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
|
262766.000000 224473.000000 4835.049805 1785.770020 7880.759766 4745.620117 2443.659912 3229.550049
|
||||||
2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
|
256509.000000 226413.000000 3758.870117 3461.199951 6743.770020 4928.959961 1536.619995 3546.689941
|
||||||
2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
|
250793.000000 224372.000000 5218.490234 2865.260010 7803.959961 4351.089844 1333.819946 3680.489990
|
||||||
2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
|
256319.000000 222066.000000 6403.970215 732.344971 9627.759766 3089.300049 1516.780029 3653.689941
|
||||||
2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
|
263343.000000 223235.000000 5200.430176 1388.579956 9372.849609 3371.229980 1450.390015 2678.909912
|
||||||
2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
|
260903.000000 225110.000000 3722.580078 3246.659912 7876.540039 4716.810059 1498.439941 2116.520020
|
||||||
2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
|
254416.000000 223769.000000 4841.649902 2956.399902 8115.919922 5392.359863 2142.810059 2652.320068
|
||||||
2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
|
256698.000000 222172.000000 6471.229980 970.395996 8834.980469 4816.839844 2376.629883 3605.860107
|
||||||
2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
|
261841.000000 223537.000000 5500.740234 1189.660034 8365.730469 4016.469971 1042.270020 3821.199951
|
||||||
2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
|
259503.000000 225840.000000 3827.929932 3088.840088 7676.140137 3978.310059 -357.006989 3016.419922
|
||||||
2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
|
253457.000000 224636.000000 4914.609863 3097.449951 8224.900391 4321.439941 171.373993 2412.360107
|
||||||
2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
|
256029.000000 222221.000000 6841.799805 1028.500000 9252.299805 4387.569824 2418.139893 2510.100098
|
||||||
2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
|
262840.000000 222550.000000 6210.250000 1410.729980 8538.900391 4152.580078 3009.300049 3219.760010
|
||||||
2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
|
261633.000000 225065.000000 4284.529785 3357.209961 7282.169922 3823.590088 1402.839966 3644.669922
|
||||||
2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
|
254591.000000 225109.000000 4693.160156 3647.739990 7745.160156 3686.379883 490.161011 3448.860107
|
||||||
2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
|
254780.000000 223599.000000 6527.379883 1569.869995 9438.429688 3456.580078 1162.520020 3252.010010
|
||||||
2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
|
260639.000000 224107.000000 6531.049805 1633.050049 9283.719727 4174.020020 2089.550049 2775.750000
|
||||||
2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
|
261108.000000 225472.000000 4968.259766 3527.850098 7692.870117 5137.100098 2207.389893 2436.659912
|
||||||
2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
|
255775.000000 223708.000000 4963.450195 4017.370117 7701.419922 5269.649902 2284.399902 2842.080078
|
||||||
2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
|
257398.000000 220947.000000 6767.500000 1645.709961 9107.070312 4000.179932 2548.860107 3624.770020
|
||||||
2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
|
264924.000000 221559.000000 6471.459961 1110.329956 9459.650391 3108.169922 1696.969971 3893.439941
|
||||||
2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
|
265339.000000 225733.000000 4348.799805 3459.510010 8475.299805 4031.239990 573.346985 2910.270020
|
||||||
2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
|
256814.000000 226995.000000 3479.540039 4949.790039 7499.910156 5624.709961 751.656006 2347.709961
|
||||||
2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
|
253316.000000 225161.000000 5147.060059 3218.429932 8460.160156 5869.299805 2336.320068 2987.959961
|
||||||
2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
|
259360.000000 223101.000000 5549.120117 1869.949951 8740.759766 4668.939941 2457.909912 3758.820068
|
||||||
2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
|
262012.000000 224016.000000 4173.609863 3004.129883 8157.040039 3704.729980 987.963989 3652.750000
|
||||||
2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
|
257176.000000 224420.000000 3517.300049 4118.750000 7822.240234 3718.229980 37.264900 2953.679932
|
||||||
2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
|
255146.000000 223322.000000 4923.979980 2330.679932 9095.910156 3792.399902 1013.070007 2711.239990
|
||||||
2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
|
260524.000000 223651.000000 5413.629883 1146.209961 8817.169922 4419.649902 2446.649902 2832.050049
|
||||||
2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
|
262098.000000 225752.000000 4262.979980 2270.969971 7135.479980 5067.120117 2294.679932 3376.620117
|
||||||
2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
|
256889.000000 225379.000000 3606.459961 3568.189941 6552.649902 4970.270020 1516.380005 3662.570068
|
||||||
2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
|
253948.000000 222631.000000 5511.700195 2066.300049 7952.660156 4019.909912 1513.140015 3752.629883
|
||||||
2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
|
259799.000000 222067.000000 5873.500000 608.583984 9253.780273 2870.739990 1348.239990 3344.199951
|
||||||
2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
|
262547.000000 224901.000000 4346.080078 1928.099976 8590.969727 3455.459961 904.390991 2379.270020
|
||||||
2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
|
256137.000000 226761.000000 3423.560059 3379.080078 7471.149902 4894.169922 1153.540039 2031.410034
|
||||||
2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
|
250326.000000 225013.000000 5519.979980 2423.969971 7991.759766 5117.950195 2098.790039 3099.239990
|
||||||
2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
|
255454.000000 222992.000000 6547.950195 496.496002 8751.339844 3900.560059 2132.290039 4076.810059
|
||||||
2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
|
261286.000000 223489.000000 5152.850098 1501.510010 8425.610352 2888.030029 776.114014 3786.360107
|
||||||
2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
|
258969.000000 224069.000000 3832.610107 3001.979980 7979.259766 3182.310059 52.716000 2874.800049
|
||||||
2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
|
254946.000000 222035.000000 5317.879883 2139.800049 9103.139648 3955.610107 1235.170044 2394.149902
|
||||||
2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
|
258676.000000 221205.000000 6594.910156 505.343994 9423.360352 4562.470215 2913.739990 2892.350098
|
||||||
2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
|
262125.000000 223566.000000 5116.750000 1773.599976 8082.200195 4776.370117 2386.389893 3659.729980
|
||||||
2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
|
257835.000000 225918.000000 3714.300049 3477.080078 7205.370117 4554.609863 711.539001 3878.419922
|
||||||
2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
|
253660.000000 224371.000000 5022.450195 2592.429932 8277.200195 4119.370117 486.507996 3666.739990
|
||||||
2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
|
259503.000000 222061.000000 6589.950195 659.935974 9596.919922 3598.100098 1702.489990 3036.600098
|
||||||
2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
|
265495.000000 222843.000000 5541.850098 1728.430054 8459.959961 4492.000000 2231.969971 2430.620117
|
||||||
2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
|
260929.000000 224996.000000 4000.949951 3745.989990 6983.790039 5430.859863 1855.260010 2533.379883
|
||||||
2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
|
252716.000000 224335.000000 5086.560059 3401.149902 7597.970215 5196.120117 1755.719971 3079.760010
|
||||||
2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
|
254110.000000 223111.000000 6822.189941 1229.079956 9164.339844 3761.229980 1679.390015 3584.879883
|
||||||
2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
|
259969.000000 224693.000000 6183.950195 1538.500000 9222.080078 3139.169922 949.901978 3180.800049
|
||||||
2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
|
259078.000000 226913.000000 4388.890137 3694.820068 8195.019531 3933.000000 426.079987 2388.449951
|
||||||
2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
|
254563.000000 224760.000000 5168.439941 4020.939941 8450.269531 4758.910156 1458.900024 2286.429932
|
||||||
2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
|
258059.000000 221217.000000 6883.459961 1649.530029 9232.780273 4457.649902 3057.820068 3031.949951
|
||||||
2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
|
264667.000000 221177.000000 6218.509766 1645.729980 8657.179688 3663.500000 2528.280029 3978.340088
|
||||||
2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
|
262925.000000 224382.000000 4627.500000 3635.929932 7892.799805 3431.320068 604.508972 3901.370117
|
||||||
2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
|
254708.000000 225448.000000 4408.250000 4461.040039 8197.169922 3953.750000 -44.534599 3154.870117
|
||||||
2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
|
253702.000000 224635.000000 5825.770020 2577.050049 9590.049805 4569.250000 1460.270020 2785.169922
|
||||||
2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
|
260206.000000 224140.000000 5387.979980 1951.160034 8789.509766 5131.660156 2706.379883 2972.479980
|
||||||
2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
|
261240.000000 224737.000000 3860.810059 3418.310059 7414.529785 5284.520020 2271.379883 3183.149902
|
||||||
2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
|
256140.000000 223252.000000 3850.010010 3957.139893 7262.649902 4964.640137 1499.510010 3453.129883
|
||||||
2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03
|
256116.000000 221349.000000 5594.479980 2054.399902 8835.129883 3662.010010 1485.510010 3613.010010
|
||||||
|
|
|
@ -1,124 +1,124 @@
|
||||||
# path: /newton/prep
|
# path: /newton/prep
|
||||||
# layout: float32_8
|
# layout: PrepData
|
||||||
# start: 1332496830000000
|
# start: 1332496830.0
|
||||||
# end: 1332496830999000
|
# end: 1332496830.999
|
||||||
1332496830000000 2.517740e+05 2.242410e+05 5.688100e+03 1.915530e+03 9.329220e+03 4.183710e+03 1.212350e+03 2.641790e+03
|
1332496830.000000 251774.000000 224241.000000 5688.100098 1915.530029 9329.219727 4183.709961 1212.349976 2641.790039
|
||||||
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
1332496830.008333 259567.000000 222698.000000 6207.600098 678.671997 9380.230469 4575.580078 2830.610107 2688.629883
|
||||||
1332496830016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
1332496830.016667 263073.000000 223304.000000 4961.640137 2197.120117 7687.310059 4861.859863 2732.780029 3008.540039
|
||||||
1332496830025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
|
1332496830.025000 257614.000000 223323.000000 5003.660156 3525.139893 7165.310059 4685.620117 1715.380005 3440.479980
|
||||||
1332496830033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
|
1332496830.033333 255780.000000 221915.000000 6357.310059 2145.290039 8426.969727 3775.350098 1475.390015 3797.239990
|
||||||
1332496830041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
|
1332496830.041667 260166.000000 223008.000000 6702.589844 1484.959961 9288.099609 3330.830078 1228.500000 3214.320068
|
||||||
1332496830050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
|
1332496830.050000 261231.000000 226426.000000 4980.060059 2982.379883 8499.629883 4267.669922 994.088989 2292.889893
|
||||||
1332496830058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
|
1332496830.058333 255117.000000 226642.000000 4584.410156 4656.439941 7860.149902 5317.310059 1473.599976 2111.689941
|
||||||
1332496830066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
|
1332496830.066667 253300.000000 223554.000000 6455.089844 3036.649902 8869.750000 4986.310059 2607.360107 2839.590088
|
||||||
1332496830075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
|
1332496830.075000 261061.000000 221263.000000 6951.979980 1500.239990 9386.099609 3791.679932 2677.010010 3980.629883
|
||||||
1332496830083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
|
1332496830.083333 266503.000000 223198.000000 5189.609863 2594.560059 8571.530273 3175.000000 919.840027 3792.010010
|
||||||
1332496830091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
|
1332496830.091667 260692.000000 225184.000000 3782.479980 4642.879883 7662.959961 3917.790039 -251.097000 2907.060059
|
||||||
1332496830100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
|
1332496830.100000 253963.000000 225081.000000 5123.529785 3839.550049 8669.030273 4877.819824 943.723999 2527.449951
|
||||||
1332496830108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
|
1332496830.108333 256555.000000 224169.000000 5930.600098 2298.540039 8906.709961 5331.680176 2549.909912 3053.560059
|
||||||
1332496830116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
|
1332496830.116667 260889.000000 225010.000000 4681.129883 2971.870117 7900.040039 4874.080078 2322.429932 3649.120117
|
||||||
1332496830125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
|
1332496830.125000 257944.000000 224923.000000 3291.139893 4357.089844 7131.589844 4385.560059 1077.050049 3664.040039
|
||||||
1332496830133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
|
1332496830.133333 255009.000000 223018.000000 4584.819824 2864.000000 8469.490234 3625.580078 985.557007 3504.229980
|
||||||
1332496830141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
|
1332496830.141667 260114.000000 221947.000000 5676.189941 1210.339966 9393.780273 3390.239990 1654.020020 3018.699951
|
||||||
1332496830150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
|
1332496830.150000 264277.000000 224438.000000 4446.620117 2176.719971 8142.089844 4584.879883 2327.830078 2615.800049
|
||||||
1332496830158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
|
1332496830.158333 259221.000000 226471.000000 2734.439941 4182.759766 6389.549805 5540.520020 1958.880005 2720.120117
|
||||||
1332496830166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
|
1332496830.166667 252650.000000 224831.000000 4163.640137 2989.989990 7179.200195 5213.060059 1929.550049 3457.659912
|
||||||
1332496830175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
|
1332496830.175000 257083.000000 222048.000000 5759.040039 702.440979 8566.549805 3552.020020 1832.939941 3956.189941
|
||||||
1332496830183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
|
1332496830.183333 263130.000000 222967.000000 5141.140137 1166.119995 8666.959961 2720.370117 971.374023 3479.729980
|
||||||
1332496830191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
|
1332496830.191667 260236.000000 225265.000000 3425.139893 3339.080078 7853.609863 3674.949951 525.908020 2443.310059
|
||||||
1332496830200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
|
1332496830.200000 253503.000000 224527.000000 4398.129883 2927.429932 8110.279785 4842.470215 1513.869995 2467.100098
|
||||||
1332496830208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
|
1332496830.208333 256126.000000 222693.000000 6043.529785 656.223999 8797.559570 4832.410156 2832.370117 3426.139893
|
||||||
1332496830216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
|
1332496830.216667 261677.000000 223608.000000 5830.459961 1033.910034 8123.939941 3980.689941 1927.959961 4092.719971
|
||||||
1332496830225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
|
1332496830.225000 259457.000000 225536.000000 4015.570068 2995.989990 7135.439941 3713.550049 307.220001 3849.429932
|
||||||
1332496830233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
|
1332496830.233333 253352.000000 224216.000000 4650.560059 3196.620117 8131.279785 3586.159912 70.832298 3074.179932
|
||||||
1332496830241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
|
1332496830.241667 256124.000000 221513.000000 6100.479980 821.979980 9757.540039 3474.510010 1647.520020 2559.860107
|
||||||
1332496830250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
|
1332496830.250000 263024.000000 221559.000000 5789.959961 699.416992 9129.740234 4153.080078 2829.250000 2677.270020
|
||||||
1332496830258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
|
1332496830.258333 261720.000000 224015.000000 4358.500000 2645.360107 7414.109863 4810.669922 2225.989990 3185.989990
|
||||||
1332496830266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
|
1332496830.266667 254756.000000 224240.000000 4857.379883 3229.679932 7539.310059 4769.140137 1507.130005 3668.260010
|
||||||
1332496830275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
|
1332496830.275000 256889.000000 222658.000000 6473.419922 1214.109985 9010.759766 3848.729980 1303.839966 3778.500000
|
||||||
1332496830283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
|
1332496830.283333 264208.000000 223316.000000 5700.450195 1116.560059 9087.610352 3846.679932 1293.589966 2891.560059
|
||||||
1332496830291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
|
1332496830.291667 263310.000000 225719.000000 3936.120117 3252.360107 7552.850098 4897.859863 1156.630005 2037.160034
|
||||||
1332496830300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
|
1332496830.300000 255079.000000 225086.000000 4536.450195 3960.110107 7454.589844 5479.069824 1596.359985 2190.800049
|
||||||
1332496830308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
|
1332496830.308333 254487.000000 222508.000000 6635.859863 1758.849976 8732.969727 4466.970215 2650.360107 3139.310059
|
||||||
1332496830316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
|
1332496830.316667 261241.000000 222432.000000 6702.270020 1085.130005 8989.230469 3112.989990 1933.560059 3828.409912
|
||||||
1332496830325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
|
1332496830.325000 262119.000000 225587.000000 4714.950195 2892.360107 8107.819824 2961.310059 239.977997 3273.719971
|
||||||
1332496830333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
|
1332496830.333333 254999.000000 226514.000000 4532.089844 4126.899902 8200.129883 3872.590088 56.089001 2370.580078
|
||||||
1332496830341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
|
1332496830.341667 254289.000000 224033.000000 6538.810059 2251.439941 9419.429688 4564.450195 2077.810059 2508.169922
|
||||||
1332496830350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
|
1332496830.350000 261890.000000 221960.000000 6846.089844 1475.270020 9125.589844 4598.290039 3299.219971 3475.419922
|
||||||
1332496830358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
|
1332496830.358333 264502.000000 223085.000000 5066.379883 3270.560059 7933.169922 4173.709961 1908.910034 3867.459961
|
||||||
1332496830366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
|
1332496830.366667 257889.000000 223656.000000 4201.660156 4473.640137 7688.339844 4161.580078 687.578979 3653.689941
|
||||||
1332496830375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
|
1332496830.375000 254270.000000 223151.000000 5715.140137 2752.139893 9273.320312 3772.949951 896.403992 3256.060059
|
||||||
1332496830383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
|
1332496830.383333 258257.000000 224217.000000 6114.310059 1856.859985 9604.320312 4200.490234 1764.380005 2939.219971
|
||||||
1332496830391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
|
1332496830.391667 260020.000000 226868.000000 4237.529785 3605.879883 8066.220215 5430.250000 2138.580078 2696.709961
|
||||||
1332496830400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
|
1332496830.400000 255083.000000 225924.000000 3350.310059 4853.069824 7045.819824 5925.200195 1893.609985 2897.340088
|
||||||
1332496830408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
|
1332496830.408333 254453.000000 222127.000000 5271.330078 2491.500000 8436.679688 5032.080078 2436.050049 3724.590088
|
||||||
1332496830416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
|
1332496830.416667 262588.000000 219950.000000 5994.620117 789.273987 9029.650391 3515.739990 1953.569946 4014.520020
|
||||||
1332496830425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
|
1332496830.425000 265610.000000 223333.000000 4391.410156 2400.959961 8146.459961 3536.959961 530.231995 3133.919922
|
||||||
1332496830433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
|
1332496830.433333 257470.000000 226977.000000 2975.320068 4633.529785 7278.560059 4640.100098 -50.150200 2024.959961
|
||||||
1332496830441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
|
1332496830.441667 250687.000000 226331.000000 4517.859863 3183.800049 8072.600098 5281.660156 1605.140015 2335.139893
|
||||||
1332496830450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
|
1332496830.450000 255563.000000 224495.000000 5551.000000 1101.300049 8461.490234 4725.700195 2726.669922 3480.540039
|
||||||
1332496830458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
|
1332496830.458333 261335.000000 224645.000000 4764.680176 1557.020020 7833.350098 3524.810059 1577.410034 4038.620117
|
||||||
1332496830466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
|
1332496830.466667 260269.000000 224008.000000 3558.030029 2987.610107 7362.439941 3279.229980 562.442017 3786.550049
|
||||||
1332496830475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
|
1332496830.475000 257435.000000 221777.000000 4972.600098 2166.879883 8481.440430 3328.719971 1037.130005 3271.370117
|
||||||
1332496830483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
|
1332496830.483333 261046.000000 221550.000000 5816.180176 590.216980 9120.929688 3895.399902 2382.669922 2824.169922
|
||||||
1332496830491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
|
1332496830.491667 262766.000000 224473.000000 4835.049805 1785.770020 7880.759766 4745.620117 2443.659912 3229.550049
|
||||||
1332496830500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
|
1332496830.500000 256509.000000 226413.000000 3758.870117 3461.199951 6743.770020 4928.959961 1536.619995 3546.689941
|
||||||
1332496830508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
|
1332496830.508333 250793.000000 224372.000000 5218.490234 2865.260010 7803.959961 4351.089844 1333.819946 3680.489990
|
||||||
1332496830516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
|
1332496830.516667 256319.000000 222066.000000 6403.970215 732.344971 9627.759766 3089.300049 1516.780029 3653.689941
|
||||||
1332496830525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
|
1332496830.525000 263343.000000 223235.000000 5200.430176 1388.579956 9372.849609 3371.229980 1450.390015 2678.909912
|
||||||
1332496830533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
|
1332496830.533333 260903.000000 225110.000000 3722.580078 3246.659912 7876.540039 4716.810059 1498.439941 2116.520020
|
||||||
1332496830541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
|
1332496830.541667 254416.000000 223769.000000 4841.649902 2956.399902 8115.919922 5392.359863 2142.810059 2652.320068
|
||||||
1332496830550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
|
1332496830.550000 256698.000000 222172.000000 6471.229980 970.395996 8834.980469 4816.839844 2376.629883 3605.860107
|
||||||
1332496830558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
|
1332496830.558333 261841.000000 223537.000000 5500.740234 1189.660034 8365.730469 4016.469971 1042.270020 3821.199951
|
||||||
1332496830566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
|
1332496830.566667 259503.000000 225840.000000 3827.929932 3088.840088 7676.140137 3978.310059 -357.006989 3016.419922
|
||||||
1332496830575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
|
1332496830.575000 253457.000000 224636.000000 4914.609863 3097.449951 8224.900391 4321.439941 171.373993 2412.360107
|
||||||
1332496830583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
|
1332496830.583333 256029.000000 222221.000000 6841.799805 1028.500000 9252.299805 4387.569824 2418.139893 2510.100098
|
||||||
1332496830591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
|
1332496830.591667 262840.000000 222550.000000 6210.250000 1410.729980 8538.900391 4152.580078 3009.300049 3219.760010
|
||||||
1332496830600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
|
1332496830.600000 261633.000000 225065.000000 4284.529785 3357.209961 7282.169922 3823.590088 1402.839966 3644.669922
|
||||||
1332496830608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
|
1332496830.608333 254591.000000 225109.000000 4693.160156 3647.739990 7745.160156 3686.379883 490.161011 3448.860107
|
||||||
1332496830616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
|
1332496830.616667 254780.000000 223599.000000 6527.379883 1569.869995 9438.429688 3456.580078 1162.520020 3252.010010
|
||||||
1332496830625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
|
1332496830.625000 260639.000000 224107.000000 6531.049805 1633.050049 9283.719727 4174.020020 2089.550049 2775.750000
|
||||||
1332496830633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
|
1332496830.633333 261108.000000 225472.000000 4968.259766 3527.850098 7692.870117 5137.100098 2207.389893 2436.659912
|
||||||
1332496830641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
|
1332496830.641667 255775.000000 223708.000000 4963.450195 4017.370117 7701.419922 5269.649902 2284.399902 2842.080078
|
||||||
1332496830650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
|
1332496830.650000 257398.000000 220947.000000 6767.500000 1645.709961 9107.070312 4000.179932 2548.860107 3624.770020
|
||||||
1332496830658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
|
1332496830.658333 264924.000000 221559.000000 6471.459961 1110.329956 9459.650391 3108.169922 1696.969971 3893.439941
|
||||||
1332496830666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
|
1332496830.666667 265339.000000 225733.000000 4348.799805 3459.510010 8475.299805 4031.239990 573.346985 2910.270020
|
||||||
1332496830675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
|
1332496830.675000 256814.000000 226995.000000 3479.540039 4949.790039 7499.910156 5624.709961 751.656006 2347.709961
|
||||||
1332496830683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
|
1332496830.683333 253316.000000 225161.000000 5147.060059 3218.429932 8460.160156 5869.299805 2336.320068 2987.959961
|
||||||
1332496830691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
|
1332496830.691667 259360.000000 223101.000000 5549.120117 1869.949951 8740.759766 4668.939941 2457.909912 3758.820068
|
||||||
1332496830700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
|
1332496830.700000 262012.000000 224016.000000 4173.609863 3004.129883 8157.040039 3704.729980 987.963989 3652.750000
|
||||||
1332496830708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
|
1332496830.708333 257176.000000 224420.000000 3517.300049 4118.750000 7822.240234 3718.229980 37.264900 2953.679932
|
||||||
1332496830716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
|
1332496830.716667 255146.000000 223322.000000 4923.979980 2330.679932 9095.910156 3792.399902 1013.070007 2711.239990
|
||||||
1332496830725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
|
1332496830.725000 260524.000000 223651.000000 5413.629883 1146.209961 8817.169922 4419.649902 2446.649902 2832.050049
|
||||||
1332496830733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
|
1332496830.733333 262098.000000 225752.000000 4262.979980 2270.969971 7135.479980 5067.120117 2294.679932 3376.620117
|
||||||
1332496830741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
|
1332496830.741667 256889.000000 225379.000000 3606.459961 3568.189941 6552.649902 4970.270020 1516.380005 3662.570068
|
||||||
1332496830750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
|
1332496830.750000 253948.000000 222631.000000 5511.700195 2066.300049 7952.660156 4019.909912 1513.140015 3752.629883
|
||||||
1332496830758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
|
1332496830.758333 259799.000000 222067.000000 5873.500000 608.583984 9253.780273 2870.739990 1348.239990 3344.199951
|
||||||
1332496830766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
|
1332496830.766667 262547.000000 224901.000000 4346.080078 1928.099976 8590.969727 3455.459961 904.390991 2379.270020
|
||||||
1332496830775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
|
1332496830.775000 256137.000000 226761.000000 3423.560059 3379.080078 7471.149902 4894.169922 1153.540039 2031.410034
|
||||||
1332496830783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
|
1332496830.783333 250326.000000 225013.000000 5519.979980 2423.969971 7991.759766 5117.950195 2098.790039 3099.239990
|
||||||
1332496830791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
|
1332496830.791667 255454.000000 222992.000000 6547.950195 496.496002 8751.339844 3900.560059 2132.290039 4076.810059
|
||||||
1332496830800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
|
1332496830.800000 261286.000000 223489.000000 5152.850098 1501.510010 8425.610352 2888.030029 776.114014 3786.360107
|
||||||
1332496830808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
|
1332496830.808333 258969.000000 224069.000000 3832.610107 3001.979980 7979.259766 3182.310059 52.716000 2874.800049
|
||||||
1332496830816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
|
1332496830.816667 254946.000000 222035.000000 5317.879883 2139.800049 9103.139648 3955.610107 1235.170044 2394.149902
|
||||||
1332496830825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
|
1332496830.825000 258676.000000 221205.000000 6594.910156 505.343994 9423.360352 4562.470215 2913.739990 2892.350098
|
||||||
1332496830833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
|
1332496830.833333 262125.000000 223566.000000 5116.750000 1773.599976 8082.200195 4776.370117 2386.389893 3659.729980
|
||||||
1332496830841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
|
1332496830.841667 257835.000000 225918.000000 3714.300049 3477.080078 7205.370117 4554.609863 711.539001 3878.419922
|
||||||
1332496830850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
|
1332496830.850000 253660.000000 224371.000000 5022.450195 2592.429932 8277.200195 4119.370117 486.507996 3666.739990
|
||||||
1332496830858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
|
1332496830.858333 259503.000000 222061.000000 6589.950195 659.935974 9596.919922 3598.100098 1702.489990 3036.600098
|
||||||
1332496830866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
|
1332496830.866667 265495.000000 222843.000000 5541.850098 1728.430054 8459.959961 4492.000000 2231.969971 2430.620117
|
||||||
1332496830875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
|
1332496830.875000 260929.000000 224996.000000 4000.949951 3745.989990 6983.790039 5430.859863 1855.260010 2533.379883
|
||||||
1332496830883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
|
1332496830.883333 252716.000000 224335.000000 5086.560059 3401.149902 7597.970215 5196.120117 1755.719971 3079.760010
|
||||||
1332496830891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
|
1332496830.891667 254110.000000 223111.000000 6822.189941 1229.079956 9164.339844 3761.229980 1679.390015 3584.879883
|
||||||
1332496830900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
|
1332496830.900000 259969.000000 224693.000000 6183.950195 1538.500000 9222.080078 3139.169922 949.901978 3180.800049
|
||||||
1332496830908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
|
1332496830.908333 259078.000000 226913.000000 4388.890137 3694.820068 8195.019531 3933.000000 426.079987 2388.449951
|
||||||
1332496830916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
|
1332496830.916667 254563.000000 224760.000000 5168.439941 4020.939941 8450.269531 4758.910156 1458.900024 2286.429932
|
||||||
1332496830925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
|
1332496830.925000 258059.000000 221217.000000 6883.459961 1649.530029 9232.780273 4457.649902 3057.820068 3031.949951
|
||||||
1332496830933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
|
1332496830.933333 264667.000000 221177.000000 6218.509766 1645.729980 8657.179688 3663.500000 2528.280029 3978.340088
|
||||||
1332496830941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
|
1332496830.941667 262925.000000 224382.000000 4627.500000 3635.929932 7892.799805 3431.320068 604.508972 3901.370117
|
||||||
1332496830950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
|
1332496830.950000 254708.000000 225448.000000 4408.250000 4461.040039 8197.169922 3953.750000 -44.534599 3154.870117
|
||||||
1332496830958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
|
1332496830.958333 253702.000000 224635.000000 5825.770020 2577.050049 9590.049805 4569.250000 1460.270020 2785.169922
|
||||||
1332496830966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
|
1332496830.966667 260206.000000 224140.000000 5387.979980 1951.160034 8789.509766 5131.660156 2706.379883 2972.479980
|
||||||
1332496830975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
|
1332496830.975000 261240.000000 224737.000000 3860.810059 3418.310059 7414.529785 5284.520020 2271.379883 3183.149902
|
||||||
1332496830983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
|
1332496830.983333 256140.000000 223252.000000 3850.010010 3957.139893 7262.649902 4964.640137 1499.510010 3453.129883
|
||||||
1332496830991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03
|
1332496830.991667 256116.000000 221349.000000 5594.479980 2054.399902 8835.129883 3662.010010 1485.510010 3613.010010
|
||||||
|
|
|
@ -1,28 +0,0 @@
|
||||||
# interval-start 1332496919900000
|
|
||||||
1332496919900000 2.523050e+05 2.254020e+05 4.779410e+03 3.638030e+03 8.138070e+03 4.334460e+03 1.083780e+03 3.743730e+03
|
|
||||||
1332496919908333 2.551190e+05 2.237870e+05 5.965640e+03 2.076350e+03 9.468790e+03 3.693880e+03 1.247860e+03 3.393680e+03
|
|
||||||
1332496919916667 2.616370e+05 2.247980e+05 4.848970e+03 2.315620e+03 9.323300e+03 4.225460e+03 1.805780e+03 2.593050e+03
|
|
||||||
1332496919925000 2.606460e+05 2.251300e+05 3.061360e+03 3.951840e+03 7.662910e+03 5.341410e+03 1.986520e+03 2.276780e+03
|
|
||||||
1332496919933333 2.559710e+05 2.235030e+05 4.096030e+03 3.296970e+03 7.827080e+03 5.452120e+03 2.492520e+03 2.929450e+03
|
|
||||||
1332496919941667 2.579260e+05 2.217080e+05 5.472320e+03 1.555700e+03 8.495760e+03 4.491140e+03 2.379780e+03 3.741710e+03
|
|
||||||
1332496919950000 2.610180e+05 2.242350e+05 4.669770e+03 1.876190e+03 8.366680e+03 3.677510e+03 9.021690e+02 3.549040e+03
|
|
||||||
1332496919958333 2.569150e+05 2.274650e+05 2.785070e+03 3.751930e+03 7.440320e+03 3.964860e+03 -3.227860e+02 2.460890e+03
|
|
||||||
1332496919966667 2.509510e+05 2.262000e+05 3.772710e+03 3.131950e+03 8.159860e+03 4.539860e+03 7.375190e+02 2.126750e+03
|
|
||||||
1332496919975000 2.556710e+05 2.223720e+05 5.826200e+03 8.715560e+02 9.120240e+03 4.545110e+03 2.804310e+03 2.721000e+03
|
|
||||||
1332496919983333 2.649730e+05 2.214860e+05 5.839130e+03 4.659180e+02 8.628300e+03 3.934870e+03 2.972490e+03 3.773730e+03
|
|
||||||
1332496919991667 2.652170e+05 2.233920e+05 3.718770e+03 2.834970e+03 7.209900e+03 3.460260e+03 1.324930e+03 4.075960e+03
|
|
||||||
# interval-end 1332496919991668
|
|
||||||
# interval-start 1332496920000000
|
|
||||||
1332496920000000 2.564370e+05 2.244300e+05 4.011610e+03 3.475340e+03 7.495890e+03 3.388940e+03 2.613970e+02 3.731260e+03
|
|
||||||
1332496920008333 2.539630e+05 2.241670e+05 5.621070e+03 1.548010e+03 9.165170e+03 3.522930e+03 1.058930e+03 2.996960e+03
|
|
||||||
1332496920016667 2.585080e+05 2.249300e+05 6.011400e+03 8.188660e+02 9.039950e+03 4.482440e+03 2.490390e+03 2.679340e+03
|
|
||||||
1332496920025000 2.596270e+05 2.260220e+05 4.474500e+03 2.423020e+03 7.414190e+03 5.071970e+03 2.439380e+03 2.962960e+03
|
|
||||||
1332496920033333 2.551870e+05 2.246320e+05 4.738570e+03 3.398040e+03 7.395120e+03 4.726450e+03 1.839030e+03 3.393530e+03
|
|
||||||
1332496920041667 2.571020e+05 2.216230e+05 6.144130e+03 1.441090e+03 8.756480e+03 3.495320e+03 1.869940e+03 3.752530e+03
|
|
||||||
1332496920050000 2.636530e+05 2.217700e+05 6.221770e+03 7.389620e+02 9.547600e+03 2.666820e+03 1.462660e+03 3.332570e+03
|
|
||||||
1332496920058333 2.636130e+05 2.252560e+05 4.477120e+03 2.437450e+03 8.510210e+03 3.855630e+03 9.594420e+02 2.387180e+03
|
|
||||||
1332496920066667 2.553500e+05 2.262640e+05 4.283720e+03 3.923940e+03 7.912470e+03 5.466520e+03 1.284990e+03 2.093720e+03
|
|
||||||
1332496920075000 2.527270e+05 2.246090e+05 5.851930e+03 2.491980e+03 8.540630e+03 5.623050e+03 2.339780e+03 3.007140e+03
|
|
||||||
1332496920083333 2.584750e+05 2.235780e+05 5.924870e+03 1.394480e+03 8.779620e+03 4.544180e+03 2.132030e+03 3.849760e+03
|
|
||||||
1332496920091667 2.615630e+05 2.246090e+05 4.336140e+03 2.455750e+03 8.055380e+03 3.469110e+03 6.278730e+02 3.664200e+03
|
|
||||||
# interval-end 1332496920100000
|
|
|
@ -1,4 +1,3 @@
|
||||||
# comments are cool? what if they contain →UNICODEâ†<C3A2> or invalid utf-8 like Ã(
|
|
||||||
2.66568e+05 2.24029e+05 5.16140e+03 2.52517e+03 8.35084e+03 3.72470e+03 1.35534e+03 2.03900e+03
|
2.66568e+05 2.24029e+05 5.16140e+03 2.52517e+03 8.35084e+03 3.72470e+03 1.35534e+03 2.03900e+03
|
||||||
2.57914e+05 2.27183e+05 4.30368e+03 4.13080e+03 7.25535e+03 4.89047e+03 1.63859e+03 1.93496e+03
|
2.57914e+05 2.27183e+05 4.30368e+03 4.13080e+03 7.25535e+03 4.89047e+03 1.63859e+03 1.93496e+03
|
||||||
2.51717e+05 2.26047e+05 5.99445e+03 3.49363e+03 8.07250e+03 5.08267e+03 2.26917e+03 2.86231e+03
|
2.51717e+05 2.26047e+05 5.99445e+03 3.49363e+03 8.07250e+03 5.08267e+03 2.26917e+03 2.86231e+03
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
1332497040000000 2.56439e+05 2.24775e+05 2.92897e+03 4.66646e+03 7.58491e+03 3.57351e+03 -4.34171e+02 2.98819e+03
|
1332497040.000000 2.56439e+05 2.24775e+05 2.92897e+03 4.66646e+03 7.58491e+03 3.57351e+03 -4.34171e+02 2.98819e+03
|
||||||
1332497040010000 2.51903e+05 2.23202e+05 4.23696e+03 3.49363e+03 8.53493e+03 4.29416e+03 8.49573e+02 2.38189e+03
|
1332497040.010000 2.51903e+05 2.23202e+05 4.23696e+03 3.49363e+03 8.53493e+03 4.29416e+03 8.49573e+02 2.38189e+03
|
||||||
1332497040020000 2.57625e+05 2.20247e+05 5.47017e+03 1.35872e+03 9.18903e+03 4.56136e+03 2.65599e+03 2.60912e+03
|
1332497040.020000 2.57625e+05 2.20247e+05 5.47017e+03 1.35872e+03 9.18903e+03 4.56136e+03 2.65599e+03 2.60912e+03
|
||||||
1332497040030000 2.63375e+05 2.20706e+05 4.51842e+03 1.80758e+03 8.17208e+03 4.17463e+03 2.57884e+03 3.32848e+03
|
1332497040.030000 2.63375e+05 2.20706e+05 4.51842e+03 1.80758e+03 8.17208e+03 4.17463e+03 2.57884e+03 3.32848e+03
|
||||||
1332497040040000 2.59221e+05 2.22346e+05 2.98879e+03 3.66264e+03 6.87274e+03 3.94223e+03 1.25928e+03 3.51786e+03
|
1332497040.040000 2.59221e+05 2.22346e+05 2.98879e+03 3.66264e+03 6.87274e+03 3.94223e+03 1.25928e+03 3.51786e+03
|
||||||
1332497040050000 2.51918e+05 2.22281e+05 4.22677e+03 2.84764e+03 7.78323e+03 3.81659e+03 8.04944e+02 3.46314e+03
|
1332497040.050000 2.51918e+05 2.22281e+05 4.22677e+03 2.84764e+03 7.78323e+03 3.81659e+03 8.04944e+02 3.46314e+03
|
||||||
1332497040050000 2.54478e+05 2.21701e+05 5.61366e+03 1.02262e+03 9.26581e+03 3.50152e+03 1.29331e+03 3.07271e+03
|
1332497040.050000 2.54478e+05 2.21701e+05 5.61366e+03 1.02262e+03 9.26581e+03 3.50152e+03 1.29331e+03 3.07271e+03
|
||||||
1332497040060000 2.59568e+05 2.22945e+05 4.97190e+03 1.28250e+03 8.62081e+03 4.06316e+03 1.85717e+03 2.61990e+03
|
1332497040.060000 2.59568e+05 2.22945e+05 4.97190e+03 1.28250e+03 8.62081e+03 4.06316e+03 1.85717e+03 2.61990e+03
|
||||||
1332497040070000 2.57269e+05 2.23697e+05 3.60527e+03 3.05749e+03 7.22363e+03 4.90330e+03 1.93736e+03 2.35357e+03
|
1332497040.070000 2.57269e+05 2.23697e+05 3.60527e+03 3.05749e+03 7.22363e+03 4.90330e+03 1.93736e+03 2.35357e+03
|
||||||
1332497040080000 2.52274e+05 2.21438e+05 5.01228e+03 2.86309e+03 7.87115e+03 4.80448e+03 2.18291e+03 2.93397e+03
|
1332497040.080000 2.52274e+05 2.21438e+05 5.01228e+03 2.86309e+03 7.87115e+03 4.80448e+03 2.18291e+03 2.93397e+03
|
||||||
1332497040090000 2.56468e+05 2.19205e+05 6.29804e+03 8.09467e+02 9.12895e+03 3.52055e+03 2.16980e+03 3.88739e+03
|
1332497040.090000 2.56468e+05 2.19205e+05 6.29804e+03 8.09467e+02 9.12895e+03 3.52055e+03 2.16980e+03 3.88739e+03
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,8 +0,0 @@
|
||||||
-10000000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
|
||||||
-100000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
|
||||||
-100000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
|
||||||
-1000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
|
||||||
1 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
|
||||||
1000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
|
||||||
1000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
|
||||||
1000000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
|
Binary file not shown.
|
@ -1 +0,0 @@
|
||||||
hi
|
|
|
@ -1 +0,0 @@
|
||||||
hi
|
|
Binary file not shown.
Binary file not shown.
|
@ -1 +0,0 @@
|
||||||
hi
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user