Compare commits
291 Commits
nilmdb-1.3
...
nilmdb-2.0
| Author | SHA1 | Date | |
|---|---|---|---|
| 671f87b047 | |||
| 2f2faeeab7 | |||
| 2ed544bd30 | |||
| 6821b2a97b | |||
| b20bb92988 | |||
| 699de7b11f | |||
| ea67e45be9 | |||
| ca440a42bd | |||
| 4ff4b263b4 | |||
| 79e544c733 | |||
| 9acf99ff25 | |||
| 4958a5ab2e | |||
| f2d89e2da5 | |||
| 1952f245c0 | |||
| 7cbc0c11c3 | |||
| 9f2651c35e | |||
| 9126980ed4 | |||
| ea051c85b3 | |||
| d8294469cf | |||
| 96eadb0577 | |||
| fb524c649f | |||
| 19a34a07a4 | |||
| d8df6f515f | |||
| 90ee127c87 | |||
| 0b631b7dea | |||
| f587518adb | |||
| efbb2665fe | |||
| 544413018c | |||
| 322b0ec423 | |||
| f3833d9b20 | |||
| 735c8497af | |||
| 7252e40c2d | |||
| caa5604d81 | |||
| 6624e8dab6 | |||
| d907638858 | |||
| 39e66fe38c | |||
| ba915bb290 | |||
| 3f0b8e50a2 | |||
| f93edc469c | |||
| 087fb39475 | |||
| 8b4acf41d6 | |||
| 32a76ccf3f | |||
| 5f9367bdd3 | |||
| 5848d03507 | |||
| 36dc448f02 | |||
| 2764283f59 | |||
| 2d0c3f7868 | |||
| cadba9fbba | |||
| 2d200a86c9 | |||
| 640c1bc95e | |||
| b574fc86f4 | |||
| 02ee18c410 | |||
| d1e241a213 | |||
| c5c7f638e7 | |||
| a1218fd20b | |||
| c58a933d21 | |||
| 7874e1ebfa | |||
| 79b410a85b | |||
| 6645395924 | |||
| beb3eadd38 | |||
| edf4568e8f | |||
| a962258b2a | |||
| fa011559c1 | |||
| 349eec3942 | |||
| 99500f3a88 | |||
| 54eccb17aa | |||
| cc8ac74a37 | |||
| 3be904d158 | |||
| 5d9fc5500c | |||
| 57751f5b32 | |||
| 1c005518d8 | |||
| 3279f7ef2c | |||
| a2e124f444 | |||
| 6d673bd2be | |||
| 613a3185e3 | |||
| c83ee65cf7 | |||
| 113633459d | |||
| 41abf53085 | |||
| fef3e1d31e | |||
| 02db87eee6 | |||
| ad85c3dd29 | |||
| 0e6ccd687b | |||
| 85d4c419fd | |||
| 159278066c | |||
| b69358a185 | |||
| e82ef60e2e | |||
| 911d9bc284 | |||
| 752a9b36ae | |||
| 97d17de8ad | |||
| 5da7e6558e | |||
| 1928caa1d7 | |||
| 5db034432c | |||
| 55119a3e07 | |||
| a9eff10dbf | |||
| 0f5c1c0db6 | |||
| d17365ca37 | |||
| 8125d9c840 | |||
| ba55ad82f0 | |||
| 45c81d2019 | |||
| 78cfda32e3 | |||
| 3658d3876b | |||
| 022b50950f | |||
| e5efbadc8e | |||
| 74f633c9da | |||
| ab9a327130 | |||
| da72fc9777 | |||
| a01cb4132d | |||
| 7c3da2fe44 | |||
| f0e06dc436 | |||
| ddc0eb4264 | |||
| 0a22db3965 | |||
| 8bb8f068de | |||
| 416902097d | |||
| f5276e9fc8 | |||
| c47f28f93a | |||
| 63b5f99b90 | |||
| 7d7b89b52f | |||
| 8d249273c6 | |||
| abe431c663 | |||
| ccf1f695af | |||
| 06f7390c9e | |||
| 6de77a08f1 | |||
| 8db9771c20 | |||
| 04f815a24b | |||
| 6868f5f126 | |||
| ca0943ec19 | |||
| 68addb4e4a | |||
| 68c33b1f14 | |||
| 8dd8741100 | |||
| 8e6341ae5d | |||
| 422b1e2df2 | |||
| 0f745b3047 | |||
| 71cd7ed9b7 | |||
| a79d6104d5 | |||
| 8e8ec59e30 | |||
| b89b945a0f | |||
| bd7bdb2eb8 | |||
| 840cd2fd13 | |||
| bbd59c8b50 | |||
| 405c110fd7 | |||
| 274adcd856 | |||
| a1850c9c2c | |||
| 6cd28b67b1 | |||
| d6d215d53d | |||
| e02143ddb2 | |||
| e275384d03 | |||
| a6a67ec15c | |||
| fc43107307 | |||
| 90633413bb | |||
| c7c3aff0fb | |||
| e2347c954e | |||
| 222a5c6c53 | |||
| 1ca2c143e5 | |||
| b5df575c79 | |||
| 2768a5ad15 | |||
| a105543c38 | |||
| 309f38d0ed | |||
| 9a27b6ef6a | |||
| 99532cf9e0 | |||
| dfdd0e5c74 | |||
| 9a2699adfc | |||
| 9bbb95b18b | |||
| 6bbed322c5 | |||
| 2317894355 | |||
| 539c92226c | |||
| 77c766d85d | |||
| 49d04db1d6 | |||
| ea838d05ae | |||
| f2a48bdb2a | |||
| 6d14e0b8aa | |||
| b31b9327b9 | |||
| b98ff1331a | |||
| 00e6ba1124 | |||
| 01029230c9 | |||
| ecc4e5ef9d | |||
| 23f31c472b | |||
| a1e2746360 | |||
| 1c40d59a52 | |||
| bfb09a189f | |||
| 416a499866 | |||
| 637d193807 | |||
| b7fa5745ce | |||
| 0104c8edd9 | |||
| cf3b8e787d | |||
| 83d022016c | |||
| 43b740ecaa | |||
| 4ce059b920 | |||
| 99a4228285 | |||
| 230ec72609 | |||
| d36ece3767 | |||
| 231963538e | |||
| b4d6aad6de | |||
| e95142eabf | |||
| d21c3470bc | |||
| 7576883f49 | |||
| cc211542f8 | |||
| 8292dcf70b | |||
| b362fd37f6 | |||
| 41ec13ee17 | |||
| efa9aa9097 | |||
| d9afb48f45 | |||
| d1140e0f16 | |||
| 6091e44561 | |||
| e233ba790f | |||
| f0304b4c00 | |||
| 60594ca58e | |||
| c7f2df4abc | |||
| 5b7409f802 | |||
| 06038062a2 | |||
| ae9fe89759 | |||
| 04def60021 | |||
| 9ce0f69dff | |||
| 90c3be91c4 | |||
| ebccfb3531 | |||
| e006f1d02e | |||
| 5292319802 | |||
| 173121ca87 | |||
| 26bab031bd | |||
| b5fefffa09 | |||
| dccb3e370a | |||
| 95ca55aa7e | |||
| e01813f29d | |||
| 7f41e117a2 | |||
| dd5fc806e5 | |||
| f8ca8d31e6 | |||
| ed89d803f0 | |||
| 3d24092cd2 | |||
| 304bb43d85 | |||
| 59a79a30a5 | |||
| c0d450d39e | |||
| 6f14d609b2 | |||
| 77ef87456f | |||
| 32d6af935c | |||
| 6af3a6fc41 | |||
| f8a06fb3b7 | |||
| e790bb9e8a | |||
| 89be6f5931 | |||
| 4cdef3285d | |||
| bcd82c4d59 | |||
| caf63ab01f | |||
| 2d72891162 | |||
| cda2ac3e77 | |||
| 57d3d60f6a | |||
| d6b5befe76 | |||
| 7429c1788d | |||
| 0ef71c193b | |||
| 4a50dd015e | |||
| 22274550ab | |||
| 4f06d6ae68 | |||
| c54d8041c3 | |||
| 52ae397d7d | |||
| d05b6f6348 | |||
| 049375d30e | |||
| 88eb0123f5 | |||
| a547ddbbba | |||
| 28e72fd53e | |||
| f63107b334 | |||
| 955d7aa871 | |||
| b8d2cf1b78 | |||
| 7c465730de | |||
| aca130272d | |||
| 76e5e9883f | |||
| fb4f4519ff | |||
| 30328714a7 | |||
| 759466de4a | |||
| d3efb829b5 | |||
| 90b96799ac | |||
| 56679ad770 | |||
| b5541722c2 | |||
| aaea105861 | |||
| e6a081d639 | |||
| 1835d03412 | |||
| c7a712d8d8 | |||
| 20d315b4f7 | |||
| a44a5e3135 | |||
| 039b2a0557 | |||
| cd1dfe7dcd | |||
| fb35517dfa | |||
| b9f0b35bbe | |||
| b1b09f8cd0 | |||
| d467df7980 | |||
| 09bc7eb48c | |||
| b77f07a4cd | |||
| 59f0076306 | |||
| 83bc5bc775 | |||
| 6b1dfec828 | |||
| d827f41fa5 | |||
| 7eca587fdf | |||
| a351bc1b10 | |||
| 1d61d61a81 | |||
| 755255030b |
@@ -1,10 +1,11 @@
|
|||||||
# -*- conf -*-
|
# -*- conf -*-
|
||||||
|
|
||||||
[run]
|
[run]
|
||||||
# branch = True
|
branch = True
|
||||||
|
|
||||||
[report]
|
[report]
|
||||||
exclude_lines =
|
exclude_lines =
|
||||||
pragma: no cover
|
pragma: no cover
|
||||||
if 0:
|
if 0:
|
||||||
omit = nilmdb/utils/datetime_tz*,nilmdb/scripts,nilmdb/_version.py
|
omit = nilmdb/scripts,nilmdb/_version.py,nilmdb/fsck
|
||||||
|
show_missing = True
|
||||||
|
|||||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -4,6 +4,7 @@ tests/*testdb/
|
|||||||
db/
|
db/
|
||||||
|
|
||||||
# Compiled / cythonized files
|
# Compiled / cythonized files
|
||||||
|
README.html
|
||||||
docs/*.html
|
docs/*.html
|
||||||
build/
|
build/
|
||||||
*.pyc
|
*.pyc
|
||||||
@@ -15,10 +16,8 @@ nilmdb/server/rbtree.c
|
|||||||
# Setup junk
|
# Setup junk
|
||||||
dist/
|
dist/
|
||||||
nilmdb.egg-info/
|
nilmdb.egg-info/
|
||||||
|
venv/
|
||||||
# This gets generated as needed by setup.py
|
.eggs/
|
||||||
MANIFEST.in
|
|
||||||
MANIFEST
|
|
||||||
|
|
||||||
# Misc
|
# Misc
|
||||||
timeit*out
|
timeit*out
|
||||||
|
|||||||
250
.pylintrc
250
.pylintrc
@@ -1,250 +0,0 @@
|
|||||||
# -*- conf -*-
|
|
||||||
[MASTER]
|
|
||||||
|
|
||||||
# Specify a configuration file.
|
|
||||||
#rcfile=
|
|
||||||
|
|
||||||
# Python code to execute, usually for sys.path manipulation such as
|
|
||||||
# pygtk.require().
|
|
||||||
#init-hook=
|
|
||||||
|
|
||||||
# Profiled execution.
|
|
||||||
profile=no
|
|
||||||
|
|
||||||
# Add files or directories to the blacklist. They should be base names, not
|
|
||||||
# paths.
|
|
||||||
ignore=datetime_tz
|
|
||||||
|
|
||||||
# Pickle collected data for later comparisons.
|
|
||||||
persistent=no
|
|
||||||
|
|
||||||
# List of plugins (as comma separated values of python modules names) to load,
|
|
||||||
# usually to register additional checkers.
|
|
||||||
load-plugins=
|
|
||||||
|
|
||||||
|
|
||||||
[MESSAGES CONTROL]
|
|
||||||
|
|
||||||
# Enable the message, report, category or checker with the given id(s). You can
|
|
||||||
# either give multiple identifier separated by comma (,) or put this option
|
|
||||||
# multiple time.
|
|
||||||
#enable=
|
|
||||||
|
|
||||||
# Disable the message, report, category or checker with the given id(s). You
|
|
||||||
# can either give multiple identifier separated by comma (,) or put this option
|
|
||||||
# multiple time (only on the command line, not in the configuration file where
|
|
||||||
# it should appear only once).
|
|
||||||
disable=C0111,R0903,R0201,R0914,R0912,W0142,W0703,W0702
|
|
||||||
|
|
||||||
|
|
||||||
[REPORTS]
|
|
||||||
|
|
||||||
# Set the output format. Available formats are text, parseable, colorized, msvs
|
|
||||||
# (visual studio) and html
|
|
||||||
output-format=parseable
|
|
||||||
|
|
||||||
# Include message's id in output
|
|
||||||
include-ids=yes
|
|
||||||
|
|
||||||
# Put messages in a separate file for each module / package specified on the
|
|
||||||
# command line instead of printing them on stdout. Reports (if any) will be
|
|
||||||
# written in a file name "pylint_global.[txt|html]".
|
|
||||||
files-output=no
|
|
||||||
|
|
||||||
# Tells whether to display a full report or only the messages
|
|
||||||
reports=yes
|
|
||||||
|
|
||||||
# Python expression which should return a note less than 10 (10 is the highest
|
|
||||||
# note). You have access to the variables errors warning, statement which
|
|
||||||
# respectively contain the number of errors / warnings messages and the total
|
|
||||||
# number of statements analyzed. This is used by the global evaluation report
|
|
||||||
# (RP0004).
|
|
||||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
|
||||||
|
|
||||||
# Add a comment according to your evaluation note. This is used by the global
|
|
||||||
# evaluation report (RP0004).
|
|
||||||
comment=no
|
|
||||||
|
|
||||||
|
|
||||||
[SIMILARITIES]
|
|
||||||
|
|
||||||
# Minimum lines number of a similarity.
|
|
||||||
min-similarity-lines=4
|
|
||||||
|
|
||||||
# Ignore comments when computing similarities.
|
|
||||||
ignore-comments=yes
|
|
||||||
|
|
||||||
# Ignore docstrings when computing similarities.
|
|
||||||
ignore-docstrings=yes
|
|
||||||
|
|
||||||
|
|
||||||
[TYPECHECK]
|
|
||||||
|
|
||||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
|
||||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
|
||||||
ignore-mixin-members=yes
|
|
||||||
|
|
||||||
# List of classes names for which member attributes should not be checked
|
|
||||||
# (useful for classes with attributes dynamically set).
|
|
||||||
ignored-classes=SQLObject
|
|
||||||
|
|
||||||
# When zope mode is activated, add a predefined set of Zope acquired attributes
|
|
||||||
# to generated-members.
|
|
||||||
zope=no
|
|
||||||
|
|
||||||
# List of members which are set dynamically and missed by pylint inference
|
|
||||||
# system, and so shouldn't trigger E0201 when accessed. Python regular
|
|
||||||
# expressions are accepted.
|
|
||||||
generated-members=REQUEST,acl_users,aq_parent
|
|
||||||
|
|
||||||
|
|
||||||
[FORMAT]
|
|
||||||
|
|
||||||
# Maximum number of characters on a single line.
|
|
||||||
max-line-length=80
|
|
||||||
|
|
||||||
# Maximum number of lines in a module
|
|
||||||
max-module-lines=1000
|
|
||||||
|
|
||||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
|
||||||
# tab).
|
|
||||||
indent-string=' '
|
|
||||||
|
|
||||||
|
|
||||||
[MISCELLANEOUS]
|
|
||||||
|
|
||||||
# List of note tags to take in consideration, separated by a comma.
|
|
||||||
notes=FIXME,XXX,TODO
|
|
||||||
|
|
||||||
|
|
||||||
[VARIABLES]
|
|
||||||
|
|
||||||
# Tells whether we should check for unused import in __init__ files.
|
|
||||||
init-import=no
|
|
||||||
|
|
||||||
# A regular expression matching the beginning of the name of dummy variables
|
|
||||||
# (i.e. not used).
|
|
||||||
dummy-variables-rgx=_|dummy
|
|
||||||
|
|
||||||
# List of additional names supposed to be defined in builtins. Remember that
|
|
||||||
# you should avoid to define new builtins when possible.
|
|
||||||
additional-builtins=
|
|
||||||
|
|
||||||
|
|
||||||
[BASIC]
|
|
||||||
|
|
||||||
# Required attributes for module, separated by a comma
|
|
||||||
required-attributes=
|
|
||||||
|
|
||||||
# List of builtins function names that should not be used, separated by a comma
|
|
||||||
bad-functions=apply,input
|
|
||||||
|
|
||||||
# Regular expression which should only match correct module names
|
|
||||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct module level names
|
|
||||||
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__)|version)$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct class names
|
|
||||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct function names
|
|
||||||
function-rgx=[a-z_][a-z0-9_]{0,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct method names
|
|
||||||
method-rgx=[a-z_][a-z0-9_]{0,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct instance attribute names
|
|
||||||
attr-rgx=[a-z_][a-z0-9_]{0,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct argument names
|
|
||||||
argument-rgx=[a-z_][a-z0-9_]{0,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct variable names
|
|
||||||
variable-rgx=[a-z_][a-z0-9_]{0,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct list comprehension /
|
|
||||||
# generator expression variable names
|
|
||||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
|
||||||
|
|
||||||
# Good variable names which should always be accepted, separated by a comma
|
|
||||||
good-names=i,j,k,ex,Run,_
|
|
||||||
|
|
||||||
# Bad variable names which should always be refused, separated by a comma
|
|
||||||
bad-names=foo,bar,baz,toto,tutu,tata
|
|
||||||
|
|
||||||
# Regular expression which should only match functions or classes name which do
|
|
||||||
# not require a docstring
|
|
||||||
no-docstring-rgx=__.*__
|
|
||||||
|
|
||||||
|
|
||||||
[CLASSES]
|
|
||||||
|
|
||||||
# List of interface methods to ignore, separated by a comma. This is used for
|
|
||||||
# instance to not check methods defines in Zope's Interface base class.
|
|
||||||
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
|
|
||||||
|
|
||||||
# List of method names used to declare (i.e. assign) instance attributes.
|
|
||||||
defining-attr-methods=__init__,__new__,setUp
|
|
||||||
|
|
||||||
# List of valid names for the first argument in a class method.
|
|
||||||
valid-classmethod-first-arg=cls
|
|
||||||
|
|
||||||
|
|
||||||
[DESIGN]
|
|
||||||
|
|
||||||
# Maximum number of arguments for function / method
|
|
||||||
max-args=5
|
|
||||||
|
|
||||||
# Argument names that match this expression will be ignored. Default to name
|
|
||||||
# with leading underscore
|
|
||||||
ignored-argument-names=_.*
|
|
||||||
|
|
||||||
# Maximum number of locals for function / method body
|
|
||||||
max-locals=15
|
|
||||||
|
|
||||||
# Maximum number of return / yield for function / method body
|
|
||||||
max-returns=6
|
|
||||||
|
|
||||||
# Maximum number of branch for function / method body
|
|
||||||
max-branchs=12
|
|
||||||
|
|
||||||
# Maximum number of statements in function / method body
|
|
||||||
max-statements=50
|
|
||||||
|
|
||||||
# Maximum number of parents for a class (see R0901).
|
|
||||||
max-parents=7
|
|
||||||
|
|
||||||
# Maximum number of attributes for a class (see R0902).
|
|
||||||
max-attributes=7
|
|
||||||
|
|
||||||
# Minimum number of public methods for a class (see R0903).
|
|
||||||
min-public-methods=2
|
|
||||||
|
|
||||||
# Maximum number of public methods for a class (see R0904).
|
|
||||||
max-public-methods=20
|
|
||||||
|
|
||||||
|
|
||||||
[IMPORTS]
|
|
||||||
|
|
||||||
# Deprecated modules which should not be used, separated by a comma
|
|
||||||
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
|
|
||||||
|
|
||||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
|
||||||
# given file (report RP0402 must not be disabled)
|
|
||||||
import-graph=
|
|
||||||
|
|
||||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
|
||||||
# not be disabled)
|
|
||||||
ext-import-graph=
|
|
||||||
|
|
||||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
|
||||||
# not be disabled)
|
|
||||||
int-import-graph=
|
|
||||||
|
|
||||||
|
|
||||||
[EXCEPTIONS]
|
|
||||||
|
|
||||||
# Exceptions that will emit a warning when being caught. Defaults to
|
|
||||||
# "Exception"
|
|
||||||
overgeneral-exceptions=Exception
|
|
||||||
29
MANIFEST.in
Normal file
29
MANIFEST.in
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# Root
|
||||||
|
include README.txt
|
||||||
|
include setup.cfg
|
||||||
|
include setup.py
|
||||||
|
include versioneer.py
|
||||||
|
include Makefile
|
||||||
|
include .coveragerc
|
||||||
|
include .pylintrc
|
||||||
|
include requirements.txt
|
||||||
|
|
||||||
|
# Cython files -- include .pyx source, but not the generated .c files
|
||||||
|
# (Downstream systems must have cython installed in order to build)
|
||||||
|
recursive-include nilmdb/server *.pyx *.pyxdep *.pxd
|
||||||
|
exclude nilmdb/server/interval.c
|
||||||
|
exclude nilmdb/server/rbtree.c
|
||||||
|
|
||||||
|
# Version
|
||||||
|
include nilmdb/_version.py
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
recursive-include tests *.py
|
||||||
|
recursive-include tests/data *
|
||||||
|
include tests/test.order
|
||||||
|
|
||||||
|
# Docs
|
||||||
|
recursive-include docs Makefile *.md
|
||||||
|
|
||||||
|
# Extras
|
||||||
|
recursive-include extras *
|
||||||
30
Makefile
30
Makefile
@@ -2,45 +2,49 @@
|
|||||||
all: test
|
all: test
|
||||||
|
|
||||||
version:
|
version:
|
||||||
python setup.py version
|
python3 setup.py version
|
||||||
|
|
||||||
build:
|
build:
|
||||||
python setup.py build_ext --inplace
|
python3 setup.py build_ext --inplace
|
||||||
|
|
||||||
dist: sdist
|
dist: sdist
|
||||||
sdist:
|
sdist:
|
||||||
python setup.py sdist
|
python3 setup.py sdist
|
||||||
|
|
||||||
install:
|
install:
|
||||||
python setup.py install
|
python3 setup.py install
|
||||||
|
|
||||||
develop:
|
develop:
|
||||||
python setup.py develop
|
python3 setup.py develop
|
||||||
|
|
||||||
docs:
|
docs:
|
||||||
make -C docs
|
make -C docs
|
||||||
|
|
||||||
|
ctrl: flake
|
||||||
|
flake:
|
||||||
|
flake8 nilmdb
|
||||||
lint:
|
lint:
|
||||||
pylint --rcfile=.pylintrc nilmdb
|
pylint3 --rcfile=setup.cfg nilmdb
|
||||||
|
|
||||||
test:
|
test:
|
||||||
ifeq ($(INSIDE_EMACS), t)
|
ifneq ($(INSIDE_EMACS),)
|
||||||
# Use the slightly more flexible script
|
# Use the slightly more flexible script
|
||||||
python setup.py build_ext --inplace
|
python3 setup.py build_ext --inplace
|
||||||
python tests/runtests.py
|
python3 tests/runtests.py
|
||||||
else
|
else
|
||||||
# Let setup.py check dependencies, build stuff, and run the test
|
# Let setup.py check dependencies, build stuff, and run the test
|
||||||
python setup.py nosetests
|
python3 setup.py nosetests
|
||||||
endif
|
endif
|
||||||
|
|
||||||
clean::
|
clean::
|
||||||
find . -name '*pyc' | xargs rm -f
|
find . -name '*.pyc' -o -name '__pycache__' -print0 | xargs -0 rm -rf
|
||||||
rm -f .coverage
|
rm -f .coverage
|
||||||
rm -rf tests/*testdb*
|
rm -rf tests/*testdb*
|
||||||
rm -rf nilmdb.egg-info/ build/ nilmdb/server/*.so MANIFEST.in
|
rm -rf nilmdb.egg-info/ build/ nilmdb/server/*.so
|
||||||
make -C docs clean
|
make -C docs clean
|
||||||
|
|
||||||
gitclean::
|
gitclean::
|
||||||
git clean -dXf
|
git clean -dXf
|
||||||
|
|
||||||
.PHONY: all version build dist sdist install docs lint test clean
|
.PHONY: all version build dist sdist install docs test
|
||||||
|
.PHONY: ctrl lint flake clean gitclean
|
||||||
|
|||||||
33
README.md
Normal file
33
README.md
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# nilmdb: Non-Intrusive Load Monitor Database
|
||||||
|
by Jim Paris <jim@jtan.com>
|
||||||
|
|
||||||
|
NilmDB requires Python 3.7 or newer.
|
||||||
|
|
||||||
|
## Prerequisites:
|
||||||
|
|
||||||
|
# Runtime and build environments
|
||||||
|
sudo apt install python3.7 python3.7-dev python3.7-venv python3-pip
|
||||||
|
|
||||||
|
# Optional: create a new Python virtual environment to isolate
|
||||||
|
# dependencies. To leave the virtual environment, run "deactivate"
|
||||||
|
python -m venv venv
|
||||||
|
source venv/bin/activate
|
||||||
|
|
||||||
|
# Install all Python dependencies from PyPI.
|
||||||
|
pip3 install -r requirements.txt
|
||||||
|
|
||||||
|
## Test:
|
||||||
|
|
||||||
|
python3 setup.py nosetests
|
||||||
|
|
||||||
|
## Install:
|
||||||
|
|
||||||
|
sudo python3 setup.py install
|
||||||
|
|
||||||
|
## Usage:
|
||||||
|
|
||||||
|
nilmdb-server --help
|
||||||
|
nilmdb-fsck --help
|
||||||
|
nilmtool --help
|
||||||
|
|
||||||
|
See docs/wsgi.md for info on setting up a WSGI application in Apache.
|
||||||
26
README.txt
26
README.txt
@@ -1,26 +0,0 @@
|
|||||||
nilmdb: Non-Intrusive Load Monitor Database
|
|
||||||
by Jim Paris <jim@jtan.com>
|
|
||||||
|
|
||||||
Prerequisites:
|
|
||||||
|
|
||||||
# Runtime and build environments
|
|
||||||
sudo apt-get install python2.7 python2.7-dev python-setuptools cython
|
|
||||||
|
|
||||||
# Base NilmDB dependencies
|
|
||||||
sudo apt-get install python-cherrypy3 python-decorator python-simplejson
|
|
||||||
sudo apt-get install python-requests python-dateutil python-tz python-psutil
|
|
||||||
|
|
||||||
# Tools for running tests
|
|
||||||
sudo apt-get install python-nose python-coverage
|
|
||||||
|
|
||||||
Test:
|
|
||||||
python setup.py nosetests
|
|
||||||
|
|
||||||
Install:
|
|
||||||
|
|
||||||
python setup.py install
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
|
|
||||||
nilmdb-server --help
|
|
||||||
nilmtool --help
|
|
||||||
141
docs/design.md
141
docs/design.md
@@ -140,7 +140,7 @@ Speed
|
|||||||
|
|
||||||
- Next slowdown target is nilmdb.layout.Parser.parse().
|
- Next slowdown target is nilmdb.layout.Parser.parse().
|
||||||
- Rewrote parsers using cython and sscanf
|
- Rewrote parsers using cython and sscanf
|
||||||
- Stats (rev 10831), with _add_interval disabled
|
- Stats (rev 10831), with `_add_interval` disabled
|
||||||
|
|
||||||
layout.pyx.Parser.parse:128 6303 sec, 262k calls
|
layout.pyx.Parser.parse:128 6303 sec, 262k calls
|
||||||
layout.pyx.parse:63 13913 sec, 5.1g calls
|
layout.pyx.parse:63 13913 sec, 5.1g calls
|
||||||
@@ -186,6 +186,19 @@ IntervalSet speed
|
|||||||
- rbtree and interval converted to cython:
|
- rbtree and interval converted to cython:
|
||||||
8.4 μS, total 12 s, 134 MB RAM
|
8.4 μS, total 12 s, 134 MB RAM
|
||||||
|
|
||||||
|
- Would like to move Interval itself back to Python so other
|
||||||
|
non-cythonized code like client code can use it more easily.
|
||||||
|
Testing speed with just `test_interval` being tested, with
|
||||||
|
`range(5,22)`, using `/usr/bin/time -v python tests/runtests.py`,
|
||||||
|
times recorded for 2097152:
|
||||||
|
- 52ae397 (Interval in cython):
|
||||||
|
12.6133 μs each, ratio 0.866533, total 47 sec, 399 MB RAM
|
||||||
|
- 9759dcf (Interval in python):
|
||||||
|
21.2937 μs each, ratio 1.462870, total 83 sec, 1107 MB RAM
|
||||||
|
That's a huge difference! Instead, will keep Interval and DBInterval
|
||||||
|
cythonized inside nilmdb, and just have an additional copy in
|
||||||
|
nilmdb.utils for clients to use.
|
||||||
|
|
||||||
Layouts
|
Layouts
|
||||||
-------
|
-------
|
||||||
Current/old design has specific layouts: RawData, PrepData, RawNotchedData.
|
Current/old design has specific layouts: RawData, PrepData, RawNotchedData.
|
||||||
@@ -328,3 +341,129 @@ Current places where we use lines:
|
|||||||
- Finished. Just a single insert() that takes any length string and
|
- Finished. Just a single insert() that takes any length string and
|
||||||
does very little processing until it's time to send it to the
|
does very little processing until it's time to send it to the
|
||||||
server.
|
server.
|
||||||
|
|
||||||
|
Timestamps
|
||||||
|
----------
|
||||||
|
|
||||||
|
Timestamps are currently double-precision floats (64 bit). Since the
|
||||||
|
mantissa is 53-bit, this can only represent about 15-17 significant
|
||||||
|
figures, and microsecond Unix timestamps like 1222333444.000111 are
|
||||||
|
already 16 significant figures. Rounding is therefore an issue;
|
||||||
|
it's hard to sure that converting from ASCII, then back to ASCII,
|
||||||
|
will always give the same result.
|
||||||
|
|
||||||
|
Also, if the client provides a floating point value like 1.9999999999,
|
||||||
|
we need to be careful that we don't store it as 1.9999999999 but later
|
||||||
|
print it as 2.000000, because then round-trips change the data.
|
||||||
|
|
||||||
|
Possible solutions:
|
||||||
|
|
||||||
|
- When the client provides a floating point value to the server,
|
||||||
|
always round to the 6th decimal digit before verifying & storing.
|
||||||
|
Good for compatibility and simplicity. But still might have rounding
|
||||||
|
issues, and clients will also need to round when doing their own
|
||||||
|
verification. Having every piece of code need to know which digit
|
||||||
|
to round at is not ideal.
|
||||||
|
|
||||||
|
- Always store int64 timestamps on the server, representing
|
||||||
|
microseconds since epoch. int64 timestamps are used in all HTTP
|
||||||
|
parameters, in insert/extract ASCII strings, client API, commandline
|
||||||
|
raw timestamps, etc. Pretty big change.
|
||||||
|
|
||||||
|
This is what we'll go with...
|
||||||
|
|
||||||
|
- Client programs that interpret the timestamps as doubles instead
|
||||||
|
of ints will remain accurate until 2^53 microseconds, or year
|
||||||
|
2255.
|
||||||
|
|
||||||
|
- On insert, maybe it's OK to send floating point microsecond values
|
||||||
|
(1234567890123456.0), just to cope with clients that want to print
|
||||||
|
everything as a double. Server could try parsing as int64, and if
|
||||||
|
that fails, parse as double and truncate to int64. However, this
|
||||||
|
wouldn't catch imprecise inputs like "1.23456789012e+15". But
|
||||||
|
maybe that can just be ignored; it's likely to cause a
|
||||||
|
non-monotonic error at the client.
|
||||||
|
|
||||||
|
- Timestamps like 1234567890.123456 never show up anywhere, except
|
||||||
|
for interfacing to datetime_tz etc. Command line "raw timestamps"
|
||||||
|
are always printed as int64 values, and a new format
|
||||||
|
"@1234567890123456" is added to the parser for specifying them
|
||||||
|
exactly.
|
||||||
|
|
||||||
|
Binary interface
|
||||||
|
----------------
|
||||||
|
|
||||||
|
The ASCII interface is too slow for high-bandwidth processing, like
|
||||||
|
sinefits, prep, etc. A binary interface was added so that you can
|
||||||
|
extract the raw binary out of the bulkdata storage. This binary is
|
||||||
|
a little-endian format, e.g. in C a uint16_6 stream would be:
|
||||||
|
|
||||||
|
#include <endian.h>
|
||||||
|
#include <stdint.h>
|
||||||
|
struct {
|
||||||
|
int64_t timestamp_le;
|
||||||
|
uint16_t data_le[6];
|
||||||
|
} __attribute__((packed));
|
||||||
|
|
||||||
|
Remember to byteswap (with e.g. `letoh` in C)!
|
||||||
|
|
||||||
|
This interface is used by the new `nilmdb.client.numpyclient.NumpyClient`
|
||||||
|
class, which is a subclass of the normal `nilmcb.client.client.Client`
|
||||||
|
and has all of the same functions. It adds three new functions:
|
||||||
|
|
||||||
|
- `stream_extract_numpy` to extract data as a Numpy array
|
||||||
|
|
||||||
|
- `stream_insert_numpy` to insert data as a Numpy array
|
||||||
|
|
||||||
|
- `stream_insert_numpy_context` is the context manager for
|
||||||
|
incrementally inserting data
|
||||||
|
|
||||||
|
It is significantly faster! It is about 20 times faster to decimate a
|
||||||
|
stream with `nilm-decimate` when the filter code is using the new
|
||||||
|
binary/numpy interface.
|
||||||
|
|
||||||
|
|
||||||
|
WSGI interface & chunked requests
|
||||||
|
---------------------------------
|
||||||
|
|
||||||
|
mod_wsgi requires "WSGIChunkedRequest On" to handle
|
||||||
|
"Transfer-encoding: Chunked" requests. However, `/stream/insert`
|
||||||
|
doesn't handle this correctly right now, because:
|
||||||
|
|
||||||
|
- The `cherrypy.request.body.read()` call needs to be fixed for chunked requests
|
||||||
|
|
||||||
|
- We don't want to just buffer endlessly in the server, and it will
|
||||||
|
require some thought on how to handle data in chunks (what to do about
|
||||||
|
interval endpoints).
|
||||||
|
|
||||||
|
It is probably better to just keep the endpoint management on the client
|
||||||
|
side, so leave "WSGIChunkedRequest off" for now.
|
||||||
|
|
||||||
|
|
||||||
|
Unicode & character encoding
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
Stream data is passed back and forth as raw `bytes` objects in most
|
||||||
|
places, including the `nilmdb.client` and command-line interfaces.
|
||||||
|
This is done partially for performance reasons, and partially to
|
||||||
|
support the binary insert/extract options, where character-set encoding
|
||||||
|
would not apply.
|
||||||
|
|
||||||
|
For the HTTP server, the raw bytes transferred over HTTP are interpreted
|
||||||
|
as follows:
|
||||||
|
- For `/stream/insert`, the client-provided `Content-Type` is ignored,
|
||||||
|
and the data is read as if it were `application/octet-stream`.
|
||||||
|
- For `/stream/extract`, the returned data is `application/octet-stream`.
|
||||||
|
- All other endpoints communicate via JSON, which is specified to always
|
||||||
|
be encoded as UTF-8. This includes:
|
||||||
|
- `/version`
|
||||||
|
- `/dbinfo`
|
||||||
|
- `/stream/list`
|
||||||
|
- `/stream/create`
|
||||||
|
- `/stream/destroy`
|
||||||
|
- `/stream/rename`
|
||||||
|
- `/stream/get_metadata`
|
||||||
|
- `/stream/set_metadata`
|
||||||
|
- `/stream/update_metadata`
|
||||||
|
- `/stream/remove`
|
||||||
|
- `/stream/intervals`
|
||||||
|
|||||||
32
docs/wsgi.md
Normal file
32
docs/wsgi.md
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
WSGI Application in Apache
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
Install `apache2` and `libapache2-mod-wsgi`
|
||||||
|
|
||||||
|
We'll set up the database server at URL `http://myhost.com/nilmdb`.
|
||||||
|
The database will be stored in `/home/nilm/db`, and the process will
|
||||||
|
run as user `nilm`, group `nilm`.
|
||||||
|
|
||||||
|
First, create a WSGI script `/home/nilm/nilmdb.wsgi` containing:
|
||||||
|
|
||||||
|
import nilmdb.server
|
||||||
|
application = nilmdb.server.wsgi_application("/home/nilm/db", "/nilmdb")
|
||||||
|
|
||||||
|
The first parameter is the local filesystem path, and the second
|
||||||
|
parameter is the path part of the URL.
|
||||||
|
|
||||||
|
Then, set up Apache with a configuration like:
|
||||||
|
|
||||||
|
<VirtualHost>
|
||||||
|
WSGIScriptAlias /nilmdb /home/nilm/nilmdb.wsgi
|
||||||
|
WSGIDaemonProcess nilmdb-procgroup threads=32 user=nilm group=nilm
|
||||||
|
<Location /nilmdb>
|
||||||
|
WSGIProcessGroup nilmdb-procgroup
|
||||||
|
WSGIApplicationGroup nilmdb-appgroup
|
||||||
|
|
||||||
|
# Access control example:
|
||||||
|
Order deny,allow
|
||||||
|
Deny from all
|
||||||
|
Allow from 1.2.3.4
|
||||||
|
</Location>
|
||||||
|
</VirtualHost>
|
||||||
50
extras/fix-oversize-files.py
Normal file
50
extras/fix-oversize-files.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import pickle
|
||||||
|
import argparse
|
||||||
|
import fcntl
|
||||||
|
import re
|
||||||
|
from nilmdb.client.numpyclient import layout_to_dtype
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description = """
|
||||||
|
Fix database corruption where binary writes caused too much data to be
|
||||||
|
written to the file. Truncates files to the correct length. This was
|
||||||
|
fixed by b98ff1331a515ad47fd3203615e835b529b039f9.
|
||||||
|
""")
|
||||||
|
parser.add_argument("path", action="store", help='Database root path')
|
||||||
|
parser.add_argument("-y", "--yes", action="store_true", help='Fix them')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
lock = os.path.join(args.path, "data.lock")
|
||||||
|
with open(lock, "w") as f:
|
||||||
|
fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||||
|
|
||||||
|
fix = {}
|
||||||
|
|
||||||
|
for (path, dirs, files) in os.walk(args.path):
|
||||||
|
if "_format" in files:
|
||||||
|
with open(os.path.join(path, "_format")) as format:
|
||||||
|
fmt = pickle.load(format)
|
||||||
|
rowsize = layout_to_dtype(fmt["layout"]).itemsize
|
||||||
|
maxsize = rowsize * fmt["rows_per_file"]
|
||||||
|
fix[path] = maxsize
|
||||||
|
if maxsize < 128000000: # sanity check
|
||||||
|
raise Exception("bad maxsize " + str(maxsize))
|
||||||
|
|
||||||
|
for fixpath in fix:
|
||||||
|
for (path, dirs, files) in os.walk(fixpath):
|
||||||
|
for fn in files:
|
||||||
|
if not re.match("^[0-9a-f]{4,}$", fn):
|
||||||
|
continue
|
||||||
|
fn = os.path.join(path, fn)
|
||||||
|
size = os.path.getsize(fn)
|
||||||
|
maxsize = fix[fixpath]
|
||||||
|
if size > maxsize:
|
||||||
|
diff = size - maxsize
|
||||||
|
print(diff, "too big:", fn)
|
||||||
|
if args.yes:
|
||||||
|
with open(fn, "a+") as dbfile:
|
||||||
|
dbfile.truncate(maxsize)
|
||||||
20
extras/nilmtool-bash-completion.sh
Normal file
20
extras/nilmtool-bash-completion.sh
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# To enable bash completion:
|
||||||
|
#
|
||||||
|
# 1. Ensure python-argcomplete is installed:
|
||||||
|
# pip install argcomplete
|
||||||
|
# 2. Source this file:
|
||||||
|
# . nilmtool-bash-completion.sh
|
||||||
|
|
||||||
|
_nilmtool_argcomplete() {
|
||||||
|
local IFS=$(printf "\013")
|
||||||
|
COMPREPLY=( $(IFS="$IFS" \
|
||||||
|
COMP_LINE="$COMP_LINE" \
|
||||||
|
COMP_WORDBREAKS="$COMP_WORDBREAKS" \
|
||||||
|
COMP_POINT="$COMP_POINT" \
|
||||||
|
_ARGCOMPLETE=1 \
|
||||||
|
"$1" 8>&1 9>&2 1>/dev/null 2>/dev/null) )
|
||||||
|
if [[ $? != 0 ]]; then
|
||||||
|
unset COMPREPLY
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
complete -o nospace -F _nilmtool_argcomplete nilmtool
|
||||||
@@ -1,10 +1,5 @@
|
|||||||
"""Main NilmDB import"""
|
"""Main NilmDB import"""
|
||||||
|
|
||||||
# These aren't imported automatically, because loading the server
|
from ._version import get_versions
|
||||||
# stuff isn't always necessary.
|
|
||||||
#from nilmdb.server import NilmDB, Server
|
|
||||||
#from nilmdb.client import Client
|
|
||||||
|
|
||||||
from nilmdb._version import get_versions
|
|
||||||
__version__ = get_versions()['version']
|
__version__ = get_versions()['version']
|
||||||
del get_versions
|
del get_versions
|
||||||
|
|||||||
@@ -1,197 +1,520 @@
|
|||||||
|
|
||||||
IN_LONG_VERSION_PY = True
|
|
||||||
# This file helps to compute a version number in source trees obtained from
|
# This file helps to compute a version number in source trees obtained from
|
||||||
# git-archive tarball (such as those provided by githubs download-from-tag
|
# git-archive tarball (such as those provided by githubs download-from-tag
|
||||||
# feature). Distribution tarballs (build by setup.py sdist) and build
|
# feature). Distribution tarballs (built by setup.py sdist) and build
|
||||||
# directories (produced by setup.py build) will contain a much shorter file
|
# directories (produced by setup.py build) will contain a much shorter file
|
||||||
# that just contains the computed version number.
|
# that just contains the computed version number.
|
||||||
|
|
||||||
# This file is released into the public domain. Generated by
|
# This file is released into the public domain. Generated by
|
||||||
# versioneer-0.7+ (https://github.com/warner/python-versioneer)
|
# versioneer-0.18 (https://github.com/warner/python-versioneer)
|
||||||
|
|
||||||
# these strings will be replaced by git during git-archive
|
|
||||||
git_refnames = "$Format:%d$"
|
|
||||||
git_full = "$Format:%H$"
|
|
||||||
|
|
||||||
|
"""Git implementation of _version.py."""
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
def run_command(args, cwd=None, verbose=False):
|
|
||||||
try:
|
def get_keywords():
|
||||||
# remember shell=False, so use git.cmd on windows, not just git
|
"""Get the keywords needed to look up the version information."""
|
||||||
p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd)
|
# these strings will be replaced by git during git-archive.
|
||||||
except EnvironmentError:
|
# setup.py/versioneer.py will grep for the variable names, so they must
|
||||||
e = sys.exc_info()[1]
|
# each be defined on a line of their own. _version.py will just call
|
||||||
|
# get_keywords().
|
||||||
|
git_refnames = "$Format:%d$"
|
||||||
|
git_full = "$Format:%H$"
|
||||||
|
git_date = "$Format:%ci$"
|
||||||
|
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
|
||||||
|
return keywords
|
||||||
|
|
||||||
|
|
||||||
|
class VersioneerConfig:
|
||||||
|
"""Container for Versioneer configuration parameters."""
|
||||||
|
|
||||||
|
|
||||||
|
def get_config():
|
||||||
|
"""Create, populate and return the VersioneerConfig() object."""
|
||||||
|
# these strings are filled in when 'setup.py versioneer' creates
|
||||||
|
# _version.py
|
||||||
|
cfg = VersioneerConfig()
|
||||||
|
cfg.VCS = "git"
|
||||||
|
cfg.style = "pep440"
|
||||||
|
cfg.tag_prefix = "nilmdb-"
|
||||||
|
cfg.parentdir_prefix = "nilmdb-"
|
||||||
|
cfg.versionfile_source = "nilmdb/_version.py"
|
||||||
|
cfg.verbose = False
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
|
||||||
|
class NotThisMethod(Exception):
|
||||||
|
"""Exception raised if a method is not valid for the current scenario."""
|
||||||
|
|
||||||
|
|
||||||
|
LONG_VERSION_PY = {}
|
||||||
|
HANDLERS = {}
|
||||||
|
|
||||||
|
|
||||||
|
def register_vcs_handler(vcs, method): # decorator
|
||||||
|
"""Decorator to mark a method as the handler for a particular VCS."""
|
||||||
|
def decorate(f):
|
||||||
|
"""Store f in HANDLERS[vcs][method]."""
|
||||||
|
if vcs not in HANDLERS:
|
||||||
|
HANDLERS[vcs] = {}
|
||||||
|
HANDLERS[vcs][method] = f
|
||||||
|
return f
|
||||||
|
return decorate
|
||||||
|
|
||||||
|
|
||||||
|
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
|
||||||
|
env=None):
|
||||||
|
"""Call the given command(s)."""
|
||||||
|
assert isinstance(commands, list)
|
||||||
|
p = None
|
||||||
|
for c in commands:
|
||||||
|
try:
|
||||||
|
dispcmd = str([c] + args)
|
||||||
|
# remember shell=False, so use git.cmd on windows, not just git
|
||||||
|
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=(subprocess.PIPE if hide_stderr
|
||||||
|
else None))
|
||||||
|
break
|
||||||
|
except EnvironmentError:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
|
if e.errno == errno.ENOENT:
|
||||||
|
continue
|
||||||
|
if verbose:
|
||||||
|
print("unable to run %s" % dispcmd)
|
||||||
|
print(e)
|
||||||
|
return None, None
|
||||||
|
else:
|
||||||
if verbose:
|
if verbose:
|
||||||
print("unable to run %s" % args[0])
|
print("unable to find command, tried %s" % (commands,))
|
||||||
print(e)
|
return None, None
|
||||||
return None
|
|
||||||
stdout = p.communicate()[0].strip()
|
stdout = p.communicate()[0].strip()
|
||||||
if sys.version >= '3':
|
if sys.version_info[0] >= 3:
|
||||||
stdout = stdout.decode()
|
stdout = stdout.decode()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
if verbose:
|
if verbose:
|
||||||
print("unable to run %s (error)" % args[0])
|
print("unable to run %s (error)" % dispcmd)
|
||||||
return None
|
print("stdout was %s" % stdout)
|
||||||
return stdout
|
return None, p.returncode
|
||||||
|
return stdout, p.returncode
|
||||||
|
|
||||||
|
|
||||||
import sys
|
def versions_from_parentdir(parentdir_prefix, root, verbose):
|
||||||
import re
|
"""Try to determine the version from the parent directory name.
|
||||||
import os.path
|
|
||||||
|
|
||||||
def get_expanded_variables(versionfile_source):
|
Source tarballs conventionally unpack into a directory that includes both
|
||||||
|
the project name and a version string. We will also support searching up
|
||||||
|
two directory levels for an appropriately named parent directory
|
||||||
|
"""
|
||||||
|
rootdirs = []
|
||||||
|
|
||||||
|
for i in range(3):
|
||||||
|
dirname = os.path.basename(root)
|
||||||
|
if dirname.startswith(parentdir_prefix):
|
||||||
|
return {"version": dirname[len(parentdir_prefix):],
|
||||||
|
"full-revisionid": None,
|
||||||
|
"dirty": False, "error": None, "date": None}
|
||||||
|
else:
|
||||||
|
rootdirs.append(root)
|
||||||
|
root = os.path.dirname(root) # up a level
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
print("Tried directories %s but none started with prefix %s" %
|
||||||
|
(str(rootdirs), parentdir_prefix))
|
||||||
|
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
|
||||||
|
|
||||||
|
|
||||||
|
@register_vcs_handler("git", "get_keywords")
|
||||||
|
def git_get_keywords(versionfile_abs):
|
||||||
|
"""Extract version information from the given file."""
|
||||||
# the code embedded in _version.py can just fetch the value of these
|
# the code embedded in _version.py can just fetch the value of these
|
||||||
# variables. When used from setup.py, we don't want to import
|
# keywords. When used from setup.py, we don't want to import _version.py,
|
||||||
# _version.py, so we do it with a regexp instead. This function is not
|
# so we do it with a regexp instead. This function is not used from
|
||||||
# used from _version.py.
|
# _version.py.
|
||||||
variables = {}
|
keywords = {}
|
||||||
try:
|
try:
|
||||||
for line in open(versionfile_source,"r").readlines():
|
f = open(versionfile_abs, "r")
|
||||||
|
for line in f.readlines():
|
||||||
if line.strip().startswith("git_refnames ="):
|
if line.strip().startswith("git_refnames ="):
|
||||||
mo = re.search(r'=\s*"(.*)"', line)
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
if mo:
|
if mo:
|
||||||
variables["refnames"] = mo.group(1)
|
keywords["refnames"] = mo.group(1)
|
||||||
if line.strip().startswith("git_full ="):
|
if line.strip().startswith("git_full ="):
|
||||||
mo = re.search(r'=\s*"(.*)"', line)
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
if mo:
|
if mo:
|
||||||
variables["full"] = mo.group(1)
|
keywords["full"] = mo.group(1)
|
||||||
|
if line.strip().startswith("git_date ="):
|
||||||
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
|
if mo:
|
||||||
|
keywords["date"] = mo.group(1)
|
||||||
|
f.close()
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
pass
|
pass
|
||||||
return variables
|
return keywords
|
||||||
|
|
||||||
def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
|
|
||||||
refnames = variables["refnames"].strip()
|
@register_vcs_handler("git", "keywords")
|
||||||
|
def git_versions_from_keywords(keywords, tag_prefix, verbose):
|
||||||
|
"""Get version information from git keywords."""
|
||||||
|
if not keywords:
|
||||||
|
raise NotThisMethod("no keywords at all, weird")
|
||||||
|
date = keywords.get("date")
|
||||||
|
if date is not None:
|
||||||
|
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
|
||||||
|
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
|
||||||
|
# -like" string, which we must then edit to make compliant), because
|
||||||
|
# it's been around since git-1.5.3, and it's too difficult to
|
||||||
|
# discover which version we're using, or to work around using an
|
||||||
|
# older one.
|
||||||
|
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
|
||||||
|
refnames = keywords["refnames"].strip()
|
||||||
if refnames.startswith("$Format"):
|
if refnames.startswith("$Format"):
|
||||||
if verbose:
|
if verbose:
|
||||||
print("variables are unexpanded, not using")
|
print("keywords are unexpanded, not using")
|
||||||
return {} # unexpanded, so not in an unpacked git-archive tarball
|
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
|
||||||
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
||||||
for ref in list(refs):
|
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
||||||
if not re.search(r'\d', ref):
|
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
||||||
if verbose:
|
TAG = "tag: "
|
||||||
print("discarding '%s', no digits" % ref)
|
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
|
||||||
refs.discard(ref)
|
if not tags:
|
||||||
# Assume all version tags have a digit. git's %d expansion
|
# Either we're using git < 1.8.3, or there really are no tags. We use
|
||||||
# behaves like git log --decorate=short and strips out the
|
# a heuristic: assume all version tags have a digit. The old git %d
|
||||||
# refs/heads/ and refs/tags/ prefixes that would let us
|
# expansion behaves like git log --decorate=short and strips out the
|
||||||
# distinguish between branches and tags. By ignoring refnames
|
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
|
||||||
# without digits, we filter out many common branch names like
|
# between branches and tags. By ignoring refnames without digits, we
|
||||||
# "release" and "stabilization", as well as "HEAD" and "master".
|
# filter out many common branch names like "release" and
|
||||||
|
# "stabilization", as well as "HEAD" and "master".
|
||||||
|
tags = set([r for r in refs if re.search(r'\d', r)])
|
||||||
|
if verbose:
|
||||||
|
print("discarding '%s', no digits" % ",".join(refs - tags))
|
||||||
if verbose:
|
if verbose:
|
||||||
print("remaining refs: %s" % ",".join(sorted(refs)))
|
print("likely tags: %s" % ",".join(sorted(tags)))
|
||||||
for ref in sorted(refs):
|
for ref in sorted(tags):
|
||||||
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
||||||
if ref.startswith(tag_prefix):
|
if ref.startswith(tag_prefix):
|
||||||
r = ref[len(tag_prefix):]
|
r = ref[len(tag_prefix):]
|
||||||
if verbose:
|
if verbose:
|
||||||
print("picking %s" % r)
|
print("picking %s" % r)
|
||||||
return { "version": r,
|
return {"version": r,
|
||||||
"full": variables["full"].strip() }
|
"full-revisionid": keywords["full"].strip(),
|
||||||
# no suitable tags, so we use the full revision id
|
"dirty": False, "error": None,
|
||||||
|
"date": date}
|
||||||
|
# no suitable tags, so version is "0+unknown", but full hex is still there
|
||||||
if verbose:
|
if verbose:
|
||||||
print("no suitable tags, using full revision id")
|
print("no suitable tags, using unknown + full revision id")
|
||||||
return { "version": variables["full"].strip(),
|
return {"version": "0+unknown",
|
||||||
"full": variables["full"].strip() }
|
"full-revisionid": keywords["full"].strip(),
|
||||||
|
"dirty": False, "error": "no suitable tags", "date": None}
|
||||||
|
|
||||||
def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
|
|
||||||
# this runs 'git' from the root of the source tree. That either means
|
@register_vcs_handler("git", "pieces_from_vcs")
|
||||||
# someone ran a setup.py command (and this code is in versioneer.py, so
|
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
|
||||||
# IN_LONG_VERSION_PY=False, thus the containing directory is the root of
|
"""Get version from 'git describe' in the root of the source tree.
|
||||||
# the source tree), or someone ran a project-specific entry point (and
|
|
||||||
# this code is in _version.py, so IN_LONG_VERSION_PY=True, thus the
|
This only gets called if the git-archive 'subst' keywords were *not*
|
||||||
# containing directory is somewhere deeper in the source tree). This only
|
expanded, and _version.py hasn't already been rewritten with a short
|
||||||
# gets called if the git-archive 'subst' variables were *not* expanded,
|
version string, meaning we're inside a checked out source tree.
|
||||||
# and _version.py hasn't already been rewritten with a short version
|
"""
|
||||||
# string, meaning we're inside a checked out source tree.
|
GITS = ["git"]
|
||||||
|
if sys.platform == "win32":
|
||||||
|
GITS = ["git.cmd", "git.exe"]
|
||||||
|
|
||||||
|
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
|
||||||
|
hide_stderr=True)
|
||||||
|
if rc != 0:
|
||||||
|
if verbose:
|
||||||
|
print("Directory %s not under git control" % root)
|
||||||
|
raise NotThisMethod("'git rev-parse --git-dir' returned error")
|
||||||
|
|
||||||
|
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
|
||||||
|
# if there isn't one, this yields HEX[-dirty] (no NUM)
|
||||||
|
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
|
||||||
|
"--always", "--long",
|
||||||
|
"--match", "%s*" % tag_prefix],
|
||||||
|
cwd=root)
|
||||||
|
# --long was added in git-1.5.5
|
||||||
|
if describe_out is None:
|
||||||
|
raise NotThisMethod("'git describe' failed")
|
||||||
|
describe_out = describe_out.strip()
|
||||||
|
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
|
||||||
|
if full_out is None:
|
||||||
|
raise NotThisMethod("'git rev-parse' failed")
|
||||||
|
full_out = full_out.strip()
|
||||||
|
|
||||||
|
pieces = {}
|
||||||
|
pieces["long"] = full_out
|
||||||
|
pieces["short"] = full_out[:7] # maybe improved later
|
||||||
|
pieces["error"] = None
|
||||||
|
|
||||||
|
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
|
||||||
|
# TAG might have hyphens.
|
||||||
|
git_describe = describe_out
|
||||||
|
|
||||||
|
# look for -dirty suffix
|
||||||
|
dirty = git_describe.endswith("-dirty")
|
||||||
|
pieces["dirty"] = dirty
|
||||||
|
if dirty:
|
||||||
|
git_describe = git_describe[:git_describe.rindex("-dirty")]
|
||||||
|
|
||||||
|
# now we have TAG-NUM-gHEX or HEX
|
||||||
|
|
||||||
|
if "-" in git_describe:
|
||||||
|
# TAG-NUM-gHEX
|
||||||
|
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
|
||||||
|
if not mo:
|
||||||
|
# unparseable. Maybe git-describe is misbehaving?
|
||||||
|
pieces["error"] = ("unable to parse git-describe output: '%s'"
|
||||||
|
% describe_out)
|
||||||
|
return pieces
|
||||||
|
|
||||||
|
# tag
|
||||||
|
full_tag = mo.group(1)
|
||||||
|
if not full_tag.startswith(tag_prefix):
|
||||||
|
if verbose:
|
||||||
|
fmt = "tag '%s' doesn't start with prefix '%s'"
|
||||||
|
print(fmt % (full_tag, tag_prefix))
|
||||||
|
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
|
||||||
|
% (full_tag, tag_prefix))
|
||||||
|
return pieces
|
||||||
|
pieces["closest-tag"] = full_tag[len(tag_prefix):]
|
||||||
|
|
||||||
|
# distance: number of commits since tag
|
||||||
|
pieces["distance"] = int(mo.group(2))
|
||||||
|
|
||||||
|
# commit: short hex revision ID
|
||||||
|
pieces["short"] = mo.group(3)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# HEX: no tags
|
||||||
|
pieces["closest-tag"] = None
|
||||||
|
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
|
||||||
|
cwd=root)
|
||||||
|
pieces["distance"] = int(count_out) # total number of commits
|
||||||
|
|
||||||
|
# commit date: see ISO-8601 comment in git_versions_from_keywords()
|
||||||
|
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
|
||||||
|
cwd=root)[0].strip()
|
||||||
|
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
|
||||||
|
|
||||||
|
return pieces
|
||||||
|
|
||||||
|
|
||||||
|
def plus_or_dot(pieces):
|
||||||
|
"""Return a + if we don't already have one, else return a ."""
|
||||||
|
if "+" in pieces.get("closest-tag", ""):
|
||||||
|
return "."
|
||||||
|
return "+"
|
||||||
|
|
||||||
|
|
||||||
|
def render_pep440(pieces):
|
||||||
|
"""Build up version string, with post-release "local version identifier".
|
||||||
|
|
||||||
|
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
|
||||||
|
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"] or pieces["dirty"]:
|
||||||
|
rendered += plus_or_dot(pieces)
|
||||||
|
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dirty"
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
|
||||||
|
pieces["short"])
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dirty"
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_pep440_pre(pieces):
|
||||||
|
"""TAG[.post.devDISTANCE] -- No -dirty.
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. 0.post.devDISTANCE
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"]:
|
||||||
|
rendered += ".post.dev%d" % pieces["distance"]
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = "0.post.dev%d" % pieces["distance"]
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_pep440_post(pieces):
|
||||||
|
"""TAG[.postDISTANCE[.dev0]+gHEX] .
|
||||||
|
|
||||||
|
The ".dev0" means dirty. Note that .dev0 sorts backwards
|
||||||
|
(a dirty tree will appear "older" than the corresponding clean one),
|
||||||
|
but you shouldn't be releasing software with -dirty anyways.
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. 0.postDISTANCE[.dev0]
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"] or pieces["dirty"]:
|
||||||
|
rendered += ".post%d" % pieces["distance"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dev0"
|
||||||
|
rendered += plus_or_dot(pieces)
|
||||||
|
rendered += "g%s" % pieces["short"]
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = "0.post%d" % pieces["distance"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dev0"
|
||||||
|
rendered += "+g%s" % pieces["short"]
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_pep440_old(pieces):
|
||||||
|
"""TAG[.postDISTANCE[.dev0]] .
|
||||||
|
|
||||||
|
The ".dev0" means dirty.
|
||||||
|
|
||||||
|
Eexceptions:
|
||||||
|
1: no tags. 0.postDISTANCE[.dev0]
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"] or pieces["dirty"]:
|
||||||
|
rendered += ".post%d" % pieces["distance"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dev0"
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = "0.post%d" % pieces["distance"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dev0"
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_git_describe(pieces):
|
||||||
|
"""TAG[-DISTANCE-gHEX][-dirty].
|
||||||
|
|
||||||
|
Like 'git describe --tags --dirty --always'.
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"]:
|
||||||
|
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = pieces["short"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += "-dirty"
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_git_describe_long(pieces):
|
||||||
|
"""TAG-DISTANCE-gHEX[-dirty].
|
||||||
|
|
||||||
|
Like 'git describe --tags --dirty --always -long'.
|
||||||
|
The distance/hash is unconditional.
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = pieces["short"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += "-dirty"
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render(pieces, style):
|
||||||
|
"""Render the given version pieces into the requested style."""
|
||||||
|
if pieces["error"]:
|
||||||
|
return {"version": "unknown",
|
||||||
|
"full-revisionid": pieces.get("long"),
|
||||||
|
"dirty": None,
|
||||||
|
"error": pieces["error"],
|
||||||
|
"date": None}
|
||||||
|
|
||||||
|
if not style or style == "default":
|
||||||
|
style = "pep440" # the default
|
||||||
|
|
||||||
|
if style == "pep440":
|
||||||
|
rendered = render_pep440(pieces)
|
||||||
|
elif style == "pep440-pre":
|
||||||
|
rendered = render_pep440_pre(pieces)
|
||||||
|
elif style == "pep440-post":
|
||||||
|
rendered = render_pep440_post(pieces)
|
||||||
|
elif style == "pep440-old":
|
||||||
|
rendered = render_pep440_old(pieces)
|
||||||
|
elif style == "git-describe":
|
||||||
|
rendered = render_git_describe(pieces)
|
||||||
|
elif style == "git-describe-long":
|
||||||
|
rendered = render_git_describe_long(pieces)
|
||||||
|
else:
|
||||||
|
raise ValueError("unknown style '%s'" % style)
|
||||||
|
|
||||||
|
return {"version": rendered, "full-revisionid": pieces["long"],
|
||||||
|
"dirty": pieces["dirty"], "error": None,
|
||||||
|
"date": pieces.get("date")}
|
||||||
|
|
||||||
|
|
||||||
|
def get_versions():
|
||||||
|
"""Get version information or return default if unable to do so."""
|
||||||
|
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
|
||||||
|
# __file__, we can work backwards from there to the root. Some
|
||||||
|
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
|
||||||
|
# case we can only use expanded keywords.
|
||||||
|
|
||||||
|
cfg = get_config()
|
||||||
|
verbose = cfg.verbose
|
||||||
|
|
||||||
try:
|
try:
|
||||||
here = os.path.abspath(__file__)
|
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
|
||||||
except NameError:
|
verbose)
|
||||||
# some py2exe/bbfreeze/non-CPython implementations don't do __file__
|
except NotThisMethod:
|
||||||
return {} # not always correct
|
pass
|
||||||
|
|
||||||
# versionfile_source is the relative path from the top of the source tree
|
try:
|
||||||
# (where the .git directory might live) to this file. Invert this to find
|
root = os.path.realpath(__file__)
|
||||||
# the root from __file__.
|
|
||||||
root = here
|
|
||||||
if IN_LONG_VERSION_PY:
|
|
||||||
for i in range(len(versionfile_source.split("/"))):
|
|
||||||
root = os.path.dirname(root)
|
|
||||||
else:
|
|
||||||
root = os.path.dirname(here)
|
|
||||||
if not os.path.exists(os.path.join(root, ".git")):
|
|
||||||
if verbose:
|
|
||||||
print("no .git in %s" % root)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
GIT = "git"
|
|
||||||
if sys.platform == "win32":
|
|
||||||
GIT = "git.cmd"
|
|
||||||
stdout = run_command([GIT, "describe", "--tags", "--dirty", "--always"],
|
|
||||||
cwd=root)
|
|
||||||
if stdout is None:
|
|
||||||
return {}
|
|
||||||
if not stdout.startswith(tag_prefix):
|
|
||||||
if verbose:
|
|
||||||
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
|
|
||||||
return {}
|
|
||||||
tag = stdout[len(tag_prefix):]
|
|
||||||
stdout = run_command([GIT, "rev-parse", "HEAD"], cwd=root)
|
|
||||||
if stdout is None:
|
|
||||||
return {}
|
|
||||||
full = stdout.strip()
|
|
||||||
if tag.endswith("-dirty"):
|
|
||||||
full += "-dirty"
|
|
||||||
return {"version": tag, "full": full}
|
|
||||||
|
|
||||||
|
|
||||||
def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False):
|
|
||||||
if IN_LONG_VERSION_PY:
|
|
||||||
# We're running from _version.py. If it's from a source tree
|
|
||||||
# (execute-in-place), we can work upwards to find the root of the
|
|
||||||
# tree, and then check the parent directory for a version string. If
|
|
||||||
# it's in an installed application, there's no hope.
|
|
||||||
try:
|
|
||||||
here = os.path.abspath(__file__)
|
|
||||||
except NameError:
|
|
||||||
# py2exe/bbfreeze/non-CPython don't have __file__
|
|
||||||
return {} # without __file__, we have no hope
|
|
||||||
# versionfile_source is the relative path from the top of the source
|
# versionfile_source is the relative path from the top of the source
|
||||||
# tree to _version.py. Invert this to find the root from __file__.
|
# tree (where the .git directory might live) to this file. Invert
|
||||||
root = here
|
# this to find the root from __file__.
|
||||||
for i in range(len(versionfile_source.split("/"))):
|
for i in cfg.versionfile_source.split('/'):
|
||||||
root = os.path.dirname(root)
|
root = os.path.dirname(root)
|
||||||
else:
|
except NameError:
|
||||||
# we're running from versioneer.py, which means we're running from
|
return {"version": "0+unknown", "full-revisionid": None,
|
||||||
# the setup.py in a source tree. sys.argv[0] is setup.py in the root.
|
"dirty": None,
|
||||||
here = os.path.abspath(sys.argv[0])
|
"error": "unable to find root of source tree",
|
||||||
root = os.path.dirname(here)
|
"date": None}
|
||||||
|
|
||||||
# Source tarballs conventionally unpack into a directory that includes
|
try:
|
||||||
# both the project name and a version string.
|
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
|
||||||
dirname = os.path.basename(root)
|
return render(pieces, cfg.style)
|
||||||
if not dirname.startswith(parentdir_prefix):
|
except NotThisMethod:
|
||||||
if verbose:
|
pass
|
||||||
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
|
|
||||||
(root, dirname, parentdir_prefix))
|
|
||||||
return None
|
|
||||||
return {"version": dirname[len(parentdir_prefix):], "full": ""}
|
|
||||||
|
|
||||||
tag_prefix = "nilmdb-"
|
try:
|
||||||
parentdir_prefix = "nilmdb-"
|
if cfg.parentdir_prefix:
|
||||||
versionfile_source = "nilmdb/_version.py"
|
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
|
||||||
|
except NotThisMethod:
|
||||||
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
|
pass
|
||||||
variables = { "refnames": git_refnames, "full": git_full }
|
|
||||||
ver = versions_from_expanded_variables(variables, tag_prefix, verbose)
|
|
||||||
if not ver:
|
|
||||||
ver = versions_from_vcs(tag_prefix, versionfile_source, verbose)
|
|
||||||
if not ver:
|
|
||||||
ver = versions_from_parentdir(parentdir_prefix, versionfile_source,
|
|
||||||
verbose)
|
|
||||||
if not ver:
|
|
||||||
ver = default
|
|
||||||
return ver
|
|
||||||
|
|
||||||
|
return {"version": "0+unknown", "full-revisionid": None,
|
||||||
|
"dirty": None,
|
||||||
|
"error": "unable to compute version", "date": None}
|
||||||
|
|||||||
@@ -2,24 +2,24 @@
|
|||||||
|
|
||||||
"""Class for performing HTTP client requests via libcurl"""
|
"""Class for performing HTTP client requests via libcurl"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import contextlib
|
||||||
|
|
||||||
import nilmdb.utils
|
import nilmdb.utils
|
||||||
import nilmdb.client.httpclient
|
import nilmdb.client.httpclient
|
||||||
from nilmdb.client.errors import ClientError
|
from nilmdb.client.errors import ClientError
|
||||||
|
from nilmdb.utils.time import timestamp_to_string, string_to_timestamp
|
||||||
|
|
||||||
import time
|
|
||||||
import simplejson as json
|
|
||||||
import contextlib
|
|
||||||
|
|
||||||
from nilmdb.utils.time import float_time_to_string
|
|
||||||
|
|
||||||
def extract_timestamp(line):
|
def extract_timestamp(line):
|
||||||
"""Extract just the timestamp from a line of data text"""
|
"""Extract just the timestamp from a line of data text"""
|
||||||
return float(line.split()[0])
|
return string_to_timestamp(line.split()[0])
|
||||||
|
|
||||||
class Client(object):
|
|
||||||
|
class Client():
|
||||||
"""Main client interface to the Nilm database."""
|
"""Main client interface to the Nilm database."""
|
||||||
|
|
||||||
def __init__(self, url, post_json = False):
|
def __init__(self, url, post_json=False):
|
||||||
"""Initialize client with given URL. If post_json is true,
|
"""Initialize client with given URL. If post_json is true,
|
||||||
POST requests are sent with Content-Type 'application/json'
|
POST requests are sent with Content-Type 'application/json'
|
||||||
instead of the default 'x-www-form-urlencoded'."""
|
instead of the default 'x-www-form-urlencoded'."""
|
||||||
@@ -38,7 +38,7 @@ class Client(object):
|
|||||||
if self.post_json:
|
if self.post_json:
|
||||||
# If we're posting as JSON, we don't need to encode it further here
|
# If we're posting as JSON, we don't need to encode it further here
|
||||||
return data
|
return data
|
||||||
return json.dumps(data, separators=(',',':'))
|
return json.dumps(data, separators=(',', ':'))
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Close the connection; safe to call multiple times"""
|
"""Close the connection; safe to call multiple times"""
|
||||||
@@ -57,7 +57,12 @@ class Client(object):
|
|||||||
as a dictionary."""
|
as a dictionary."""
|
||||||
return self.http.get("dbinfo")
|
return self.http.get("dbinfo")
|
||||||
|
|
||||||
def stream_list(self, path = None, layout = None, extended = False):
|
def stream_list(self, path=None, layout=None, extended=False):
|
||||||
|
"""Return a sorted list of [path, layout] lists. If 'path' or
|
||||||
|
'layout' are specified, only return streams that match those
|
||||||
|
exact values. If 'extended' is True, the returned lists have
|
||||||
|
extended info, e.g.: [path, layout, extent_min, extent_max,
|
||||||
|
total_rows, total_seconds."""
|
||||||
params = {}
|
params = {}
|
||||||
if path is not None:
|
if path is not None:
|
||||||
params["path"] = path
|
params["path"] = path
|
||||||
@@ -65,10 +70,12 @@ class Client(object):
|
|||||||
params["layout"] = layout
|
params["layout"] = layout
|
||||||
if extended:
|
if extended:
|
||||||
params["extended"] = 1
|
params["extended"] = 1
|
||||||
return self.http.get("stream/list", params)
|
streams = self.http.get("stream/list", params)
|
||||||
|
return nilmdb.utils.sort.sort_human(streams, key=lambda s: s[0])
|
||||||
|
|
||||||
def stream_get_metadata(self, path, keys = None):
|
def stream_get_metadata(self, path, keys=None):
|
||||||
params = { "path": path }
|
"""Get stream metadata"""
|
||||||
|
params = {"path": path}
|
||||||
if keys is not None:
|
if keys is not None:
|
||||||
params["key"] = keys
|
params["key"] = keys
|
||||||
return self.http.get("stream/get_metadata", params)
|
return self.http.get("stream/get_metadata", params)
|
||||||
@@ -79,7 +86,7 @@ class Client(object):
|
|||||||
params = {
|
params = {
|
||||||
"path": path,
|
"path": path,
|
||||||
"data": self._json_post_param(data)
|
"data": self._json_post_param(data)
|
||||||
}
|
}
|
||||||
return self.http.post("stream/set_metadata", params)
|
return self.http.post("stream/set_metadata", params)
|
||||||
|
|
||||||
def stream_update_metadata(self, path, data):
|
def stream_update_metadata(self, path, data):
|
||||||
@@ -87,38 +94,54 @@ class Client(object):
|
|||||||
params = {
|
params = {
|
||||||
"path": path,
|
"path": path,
|
||||||
"data": self._json_post_param(data)
|
"data": self._json_post_param(data)
|
||||||
}
|
}
|
||||||
return self.http.post("stream/update_metadata", params)
|
return self.http.post("stream/update_metadata", params)
|
||||||
|
|
||||||
def stream_create(self, path, layout):
|
def stream_create(self, path, layout):
|
||||||
"""Create a new stream"""
|
"""Create a new stream"""
|
||||||
params = { "path": path,
|
params = {
|
||||||
"layout" : layout }
|
"path": path,
|
||||||
|
"layout": layout
|
||||||
|
}
|
||||||
return self.http.post("stream/create", params)
|
return self.http.post("stream/create", params)
|
||||||
|
|
||||||
def stream_destroy(self, path):
|
def stream_destroy(self, path):
|
||||||
"""Delete stream and its contents"""
|
"""Delete stream. Fails if any data is still present."""
|
||||||
params = { "path": path }
|
params = {
|
||||||
|
"path": path
|
||||||
|
}
|
||||||
return self.http.post("stream/destroy", params)
|
return self.http.post("stream/destroy", params)
|
||||||
|
|
||||||
def stream_remove(self, path, start = None, end = None):
|
def stream_rename(self, oldpath, newpath):
|
||||||
|
"""Rename a stream."""
|
||||||
|
params = {
|
||||||
|
"oldpath": oldpath,
|
||||||
|
"newpath": newpath
|
||||||
|
}
|
||||||
|
return self.http.post("stream/rename", params)
|
||||||
|
|
||||||
|
def stream_remove(self, path, start=None, end=None):
|
||||||
"""Remove data from the specified time range"""
|
"""Remove data from the specified time range"""
|
||||||
params = {
|
params = {
|
||||||
"path": path
|
"path": path
|
||||||
}
|
}
|
||||||
if start is not None:
|
if start is not None:
|
||||||
params["start"] = float_time_to_string(start)
|
params["start"] = timestamp_to_string(start)
|
||||||
if end is not None:
|
if end is not None:
|
||||||
params["end"] = float_time_to_string(end)
|
params["end"] = timestamp_to_string(end)
|
||||||
return self.http.post("stream/remove", params)
|
total = 0
|
||||||
|
for count in self.http.post_gen("stream/remove", params):
|
||||||
|
total += int(count)
|
||||||
|
return total
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def stream_insert_context(self, path, start = None, end = None):
|
def stream_insert_context(self, path, start=None, end=None):
|
||||||
"""Return a context manager that allows data to be efficiently
|
"""Return a context manager that allows data to be efficiently
|
||||||
inserted into a stream in a piecewise manner. Data is be provided
|
inserted into a stream in a piecewise manner. Data is
|
||||||
as single lines, and is aggregated and sent to the server in larger
|
provided as ASCII lines, and is aggregated and sent to the
|
||||||
chunks as necessary. Data lines must match the database layout for
|
server in larger or smaller chunks as necessary. Data lines
|
||||||
the given path, and end with a newline.
|
must match the database layout for the given path, and end
|
||||||
|
with a newline.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
with client.stream_insert_context('/path', start, end) as ctx:
|
with client.stream_insert_context('/path', start, end) as ctx:
|
||||||
@@ -130,65 +153,103 @@ class Client(object):
|
|||||||
This may make multiple requests to the server, if the data is
|
This may make multiple requests to the server, if the data is
|
||||||
large enough or enough time has passed between insertions.
|
large enough or enough time has passed between insertions.
|
||||||
"""
|
"""
|
||||||
ctx = StreamInserter(self.http, path, start, end)
|
ctx = StreamInserter(self, path, start, end)
|
||||||
yield ctx
|
yield ctx
|
||||||
ctx.finalize()
|
ctx.finalize()
|
||||||
|
ctx.destroy()
|
||||||
|
|
||||||
def stream_insert(self, path, data, start = None, end = None):
|
def stream_insert(self, path, data, start=None, end=None):
|
||||||
"""Insert rows of data into a stream. data should be a string
|
"""Insert rows of data into a stream. data should be a string
|
||||||
or iterable that provides ASCII data that matches the database
|
or iterable that provides ASCII data that matches the database
|
||||||
layout for path. See stream_insert_context for details on the
|
layout for path. Data is passed through stream_insert_context,
|
||||||
'start' and 'end' parameters."""
|
so it will be broken into reasonably-sized chunks and
|
||||||
|
start/end will be deduced if missing."""
|
||||||
with self.stream_insert_context(path, start, end) as ctx:
|
with self.stream_insert_context(path, start, end) as ctx:
|
||||||
if isinstance(data, basestring):
|
if isinstance(data, bytes):
|
||||||
ctx.insert(data)
|
ctx.insert(data)
|
||||||
else:
|
else:
|
||||||
for chunk in data:
|
for chunk in data:
|
||||||
ctx.insert(chunk)
|
ctx.insert(chunk)
|
||||||
return ctx.last_response
|
return ctx.last_response
|
||||||
|
|
||||||
def stream_intervals(self, path, start = None, end = None):
|
def stream_insert_block(self, path, data, start, end, binary=False):
|
||||||
|
"""Insert a single fixed block of data into the stream. It is
|
||||||
|
sent directly to the server in one block with no further
|
||||||
|
processing.
|
||||||
|
|
||||||
|
If 'binary' is True, provide raw binary data in little-endian
|
||||||
|
format matching the path layout, including an int64 timestamp.
|
||||||
|
Otherwise, provide ASCII data matching the layout."""
|
||||||
|
params = {
|
||||||
|
"path": path,
|
||||||
|
"start": timestamp_to_string(start),
|
||||||
|
"end": timestamp_to_string(end),
|
||||||
|
}
|
||||||
|
if binary:
|
||||||
|
params["binary"] = 1
|
||||||
|
return self.http.put("stream/insert", data, params)
|
||||||
|
|
||||||
|
def stream_intervals(self, path, start=None, end=None, diffpath=None):
|
||||||
"""
|
"""
|
||||||
Return a generator that yields each stream interval.
|
Return a generator that yields each stream interval.
|
||||||
|
|
||||||
|
If 'diffpath' is not None, yields only interval ranges that are
|
||||||
|
present in 'path' but not in 'diffpath'.
|
||||||
"""
|
"""
|
||||||
params = {
|
params = {
|
||||||
"path": path
|
"path": path
|
||||||
}
|
}
|
||||||
|
if diffpath is not None:
|
||||||
|
params["diffpath"] = diffpath
|
||||||
if start is not None:
|
if start is not None:
|
||||||
params["start"] = float_time_to_string(start)
|
params["start"] = timestamp_to_string(start)
|
||||||
if end is not None:
|
if end is not None:
|
||||||
params["end"] = float_time_to_string(end)
|
params["end"] = timestamp_to_string(end)
|
||||||
return self.http.get_gen("stream/intervals", params)
|
return self.http.get_gen("stream/intervals", params)
|
||||||
|
|
||||||
def stream_extract(self, path, start = None, end = None, count = False):
|
def stream_extract(self, path, start=None, end=None,
|
||||||
|
count=False, markup=False, binary=False):
|
||||||
"""
|
"""
|
||||||
Extract data from a stream. Returns a generator that yields
|
Extract data from a stream. Returns a generator that yields
|
||||||
lines of ASCII-formatted data that matches the database
|
lines of ASCII-formatted data that matches the database
|
||||||
layout for the given path.
|
layout for the given path.
|
||||||
|
|
||||||
Specify count = True to return a count of matching data points
|
If 'count' is True, return a count of matching data points
|
||||||
rather than the actual data. The output format is unchanged.
|
rather than the actual data. The output format is unchanged.
|
||||||
|
|
||||||
|
If 'markup' is True, include comments in the returned data
|
||||||
|
that indicate interval starts and ends.
|
||||||
|
|
||||||
|
If 'binary' is True, return chunks of raw binary data, rather
|
||||||
|
than lines of ASCII-formatted data. Raw binary data is
|
||||||
|
little-endian and matches the database types (including an
|
||||||
|
int64 timestamp).
|
||||||
"""
|
"""
|
||||||
params = {
|
params = {
|
||||||
"path": path,
|
"path": path,
|
||||||
}
|
}
|
||||||
if start is not None:
|
if start is not None:
|
||||||
params["start"] = float_time_to_string(start)
|
params["start"] = timestamp_to_string(start)
|
||||||
if end is not None:
|
if end is not None:
|
||||||
params["end"] = float_time_to_string(end)
|
params["end"] = timestamp_to_string(end)
|
||||||
if count:
|
if count:
|
||||||
params["count"] = 1
|
params["count"] = 1
|
||||||
return self.http.get_gen("stream/extract", params)
|
if markup:
|
||||||
|
params["markup"] = 1
|
||||||
|
if binary:
|
||||||
|
params["binary"] = 1
|
||||||
|
return self.http.get_gen("stream/extract", params, binary=binary)
|
||||||
|
|
||||||
def stream_count(self, path, start = None, end = None):
|
def stream_count(self, path, start=None, end=None):
|
||||||
"""
|
"""
|
||||||
Return the number of rows of data in the stream that satisfy
|
Return the number of rows of data in the stream that satisfy
|
||||||
the given timestamps.
|
the given timestamps.
|
||||||
"""
|
"""
|
||||||
counts = list(self.stream_extract(path, start, end, count = True))
|
counts = list(self.stream_extract(path, start, end, count=True))
|
||||||
return int(counts[0])
|
return int(counts[0])
|
||||||
|
|
||||||
class StreamInserter(object):
|
|
||||||
|
class StreamInserter():
|
||||||
"""Object returned by stream_insert_context() that manages
|
"""Object returned by stream_insert_context() that manages
|
||||||
the insertion of rows of data into a particular path.
|
the insertion of rows of data into a particular path.
|
||||||
|
|
||||||
@@ -225,17 +286,15 @@ class StreamInserter(object):
|
|||||||
# See design.md for a discussion of how much data to send. This
|
# See design.md for a discussion of how much data to send. This
|
||||||
# is a soft limit -- we might send up to twice as much or so
|
# is a soft limit -- we might send up to twice as much or so
|
||||||
_max_data = 2 * 1024 * 1024
|
_max_data = 2 * 1024 * 1024
|
||||||
|
_max_data_after_send = 64 * 1024
|
||||||
|
|
||||||
# Delta to add to the final timestamp, if "end" wasn't given
|
def __init__(self, client, path, start, end):
|
||||||
_end_epsilon = 1e-6
|
"""'client' is the client object. 'path' is the database
|
||||||
|
|
||||||
def __init__(self, http, path, start = None, end = None):
|
|
||||||
"""'http' is the httpclient object. 'path' is the database
|
|
||||||
path to insert to. 'start' and 'end' are used for the first
|
path to insert to. 'start' and 'end' are used for the first
|
||||||
contiguous interval."""
|
contiguous interval and may be None."""
|
||||||
self.last_response = None
|
self.last_response = None
|
||||||
|
|
||||||
self._http = http
|
self._client = client
|
||||||
self._path = path
|
self._path = path
|
||||||
|
|
||||||
# Start and end for the overall contiguous interval we're
|
# Start and end for the overall contiguous interval we're
|
||||||
@@ -248,6 +307,15 @@ class StreamInserter(object):
|
|||||||
self._block_data = []
|
self._block_data = []
|
||||||
self._block_len = 0
|
self._block_len = 0
|
||||||
|
|
||||||
|
self.destroyed = False
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
"""Ensure this object can't be used again without raising
|
||||||
|
an error"""
|
||||||
|
def error(*args, **kwargs):
|
||||||
|
raise Exception("don't reuse this context object")
|
||||||
|
self._send_block = self.insert = self.finalize = self.send = error
|
||||||
|
|
||||||
def insert(self, data):
|
def insert(self, data):
|
||||||
"""Insert a chunk of ASCII formatted data in string form. The
|
"""Insert a chunk of ASCII formatted data in string form. The
|
||||||
overall data must consist of lines terminated by '\\n'."""
|
overall data must consist of lines terminated by '\\n'."""
|
||||||
@@ -269,7 +337,11 @@ class StreamInserter(object):
|
|||||||
|
|
||||||
# Send the block once we have enough data
|
# Send the block once we have enough data
|
||||||
if self._block_len >= maxdata:
|
if self._block_len >= maxdata:
|
||||||
self._send_block(final = False)
|
self._send_block(final=False)
|
||||||
|
if self._block_len >= self._max_data_after_send:
|
||||||
|
raise ValueError("too much data left over after trying"
|
||||||
|
" to send intermediate block; is it"
|
||||||
|
" missing newlines or malformed?")
|
||||||
|
|
||||||
def update_start(self, start):
|
def update_start(self, start):
|
||||||
"""Update the start time for the next contiguous interval.
|
"""Update the start time for the next contiguous interval.
|
||||||
@@ -292,7 +364,12 @@ class StreamInserter(object):
|
|||||||
|
|
||||||
If more data is inserted after a finalize(), it will become
|
If more data is inserted after a finalize(), it will become
|
||||||
part of a new interval and there may be a gap left in-between."""
|
part of a new interval and there may be a gap left in-between."""
|
||||||
self._send_block(final = True)
|
self._send_block(final=True)
|
||||||
|
|
||||||
|
def send(self):
|
||||||
|
"""Send any data that we might have buffered up. Does not affect
|
||||||
|
any other treatment of timestamps or endpoints."""
|
||||||
|
self._send_block(final=False)
|
||||||
|
|
||||||
def _get_first_noncomment(self, block):
|
def _get_first_noncomment(self, block):
|
||||||
"""Return the (start, end) indices of the first full line in
|
"""Return the (start, end) indices of the first full line in
|
||||||
@@ -300,10 +377,10 @@ class StreamInserter(object):
|
|||||||
there isn't one."""
|
there isn't one."""
|
||||||
start = 0
|
start = 0
|
||||||
while True:
|
while True:
|
||||||
end = block.find('\n', start)
|
end = block.find(b'\n', start)
|
||||||
if end < 0:
|
if end < 0:
|
||||||
raise IndexError
|
raise IndexError
|
||||||
if block[start] != '#':
|
if block[start] != b'#'[0]:
|
||||||
return (start, (end + 1))
|
return (start, (end + 1))
|
||||||
start = end + 1
|
start = end + 1
|
||||||
|
|
||||||
@@ -311,22 +388,22 @@ class StreamInserter(object):
|
|||||||
"""Return the (start, end) indices of the last full line in
|
"""Return the (start, end) indices of the last full line in
|
||||||
block[:length] that isn't a comment, or raise IndexError if
|
block[:length] that isn't a comment, or raise IndexError if
|
||||||
there isn't one."""
|
there isn't one."""
|
||||||
end = block.rfind('\n')
|
end = block.rfind(b'\n')
|
||||||
if end <= 0:
|
if end <= 0:
|
||||||
raise IndexError
|
raise IndexError
|
||||||
while True:
|
while True:
|
||||||
start = block.rfind('\n', 0, end)
|
start = block.rfind(b'\n', 0, end)
|
||||||
if block[start + 1] != '#':
|
if block[start + 1] != b'#'[0]:
|
||||||
return ((start + 1), end)
|
return ((start + 1), end)
|
||||||
if start == -1:
|
if start == -1:
|
||||||
raise IndexError
|
raise IndexError
|
||||||
end = start
|
end = start
|
||||||
|
|
||||||
def _send_block(self, final = False):
|
def _send_block(self, final=False):
|
||||||
"""Send data currently in the block. The data sent will
|
"""Send data currently in the block. The data sent will
|
||||||
consist of full lines only, so some might be left over."""
|
consist of full lines only, so some might be left over."""
|
||||||
# Build the full string to send
|
# Build the full string to send
|
||||||
block = "".join(self._block_data)
|
block = b"".join(self._block_data)
|
||||||
|
|
||||||
start_ts = self._interval_start
|
start_ts = self._interval_start
|
||||||
if start_ts is None:
|
if start_ts is None:
|
||||||
@@ -335,7 +412,7 @@ class StreamInserter(object):
|
|||||||
(spos, epos) = self._get_first_noncomment(block)
|
(spos, epos) = self._get_first_noncomment(block)
|
||||||
start_ts = extract_timestamp(block[spos:epos])
|
start_ts = extract_timestamp(block[spos:epos])
|
||||||
except (ValueError, IndexError):
|
except (ValueError, IndexError):
|
||||||
pass # no timestamp is OK, if we have no data
|
pass # no timestamp is OK, if we have no data
|
||||||
|
|
||||||
if final:
|
if final:
|
||||||
# For a final block, it must end in a newline, and the
|
# For a final block, it must end in a newline, and the
|
||||||
@@ -343,14 +420,14 @@ class StreamInserter(object):
|
|||||||
# or the timestamp of the last line plus epsilon.
|
# or the timestamp of the last line plus epsilon.
|
||||||
end_ts = self._interval_end
|
end_ts = self._interval_end
|
||||||
try:
|
try:
|
||||||
if block[-1] != '\n':
|
if block[-1] != b'\n'[0]:
|
||||||
raise ValueError("final block didn't end with a newline")
|
raise ValueError("final block didn't end with a newline")
|
||||||
if end_ts is None:
|
if end_ts is None:
|
||||||
(spos, epos) = self._get_last_noncomment(block)
|
(spos, epos) = self._get_last_noncomment(block)
|
||||||
end_ts = extract_timestamp(block[spos:epos])
|
end_ts = extract_timestamp(block[spos:epos])
|
||||||
end_ts += self._end_epsilon
|
end_ts += nilmdb.utils.time.epsilon
|
||||||
except (ValueError, IndexError):
|
except (ValueError, IndexError):
|
||||||
pass # no timestamp is OK, if we have no data
|
pass # no timestamp is OK, if we have no data
|
||||||
self._block_data = []
|
self._block_data = []
|
||||||
self._block_len = 0
|
self._block_len = 0
|
||||||
|
|
||||||
@@ -366,7 +443,7 @@ class StreamInserter(object):
|
|||||||
(spos, epos) = self._get_last_noncomment(block)
|
(spos, epos) = self._get_last_noncomment(block)
|
||||||
end_ts = extract_timestamp(block[spos:epos])
|
end_ts = extract_timestamp(block[spos:epos])
|
||||||
except (ValueError, IndexError):
|
except (ValueError, IndexError):
|
||||||
# If we found no timestamp, give up; we'll send this
|
# If we found no timestamp, give up; we could send this
|
||||||
# block later when we have more data.
|
# block later when we have more data.
|
||||||
return
|
return
|
||||||
if spos == 0:
|
if spos == 0:
|
||||||
@@ -377,7 +454,7 @@ class StreamInserter(object):
|
|||||||
# the server complain so that the error is the same
|
# the server complain so that the error is the same
|
||||||
# as if we hadn't done this chunking.
|
# as if we hadn't done this chunking.
|
||||||
end_ts = self._interval_end
|
end_ts = self._interval_end
|
||||||
self._block_data = [ block[spos:] ]
|
self._block_data = [block[spos:]]
|
||||||
self._block_len = (epos - spos)
|
self._block_len = (epos - spos)
|
||||||
block = block[:spos]
|
block = block[:spos]
|
||||||
|
|
||||||
@@ -385,7 +462,7 @@ class StreamInserter(object):
|
|||||||
self._interval_start = end_ts
|
self._interval_start = end_ts
|
||||||
|
|
||||||
# Double check endpoints
|
# Double check endpoints
|
||||||
if start_ts is None or end_ts is None:
|
if (start_ts is None or end_ts is None) or (start_ts == end_ts):
|
||||||
# If the block has no non-comment lines, it's OK
|
# If the block has no non-comment lines, it's OK
|
||||||
try:
|
try:
|
||||||
self._get_first_noncomment(block)
|
self._get_first_noncomment(block)
|
||||||
@@ -394,7 +471,7 @@ class StreamInserter(object):
|
|||||||
raise ClientError("have data to send, but no start/end times")
|
raise ClientError("have data to send, but no start/end times")
|
||||||
|
|
||||||
# Send it
|
# Send it
|
||||||
params = { "path": self._path,
|
self.last_response = self._client.stream_insert_block(
|
||||||
"start": float_time_to_string(start_ts),
|
self._path, block, start_ts, end_ts, binary=False)
|
||||||
"end": float_time_to_string(end_ts) }
|
|
||||||
self.last_response = self._http.put("stream/insert", block, params)
|
return
|
||||||
|
|||||||
@@ -1,33 +1,41 @@
|
|||||||
"""HTTP client errors"""
|
"""HTTP client errors"""
|
||||||
|
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import sprintf
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
class Error(Exception):
|
||||||
"""Base exception for both ClientError and ServerError responses"""
|
"""Base exception for both ClientError and ServerError responses"""
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
status = "Unspecified error",
|
status="Unspecified error",
|
||||||
message = None,
|
message=None,
|
||||||
url = None,
|
url=None,
|
||||||
traceback = None):
|
traceback=None):
|
||||||
Exception.__init__(self, status)
|
super().__init__(status)
|
||||||
self.status = status # e.g. "400 Bad Request"
|
self.status = status # e.g. "400 Bad Request"
|
||||||
self.message = message # textual message from the server
|
self.message = message # textual message from the server
|
||||||
self.url = url # URL we were requesting
|
self.url = url # URL we were requesting
|
||||||
self.traceback = traceback # server traceback, if available
|
self.traceback = traceback # server traceback, if available
|
||||||
|
|
||||||
def _format_error(self, show_url):
|
def _format_error(self, show_url):
|
||||||
s = sprintf("[%s]", self.status)
|
s = sprintf("[%s]", self.status)
|
||||||
if self.message:
|
if self.message:
|
||||||
s += sprintf(" %s", self.message)
|
s += sprintf(" %s", self.message)
|
||||||
if show_url and self.url: # pragma: no cover
|
if show_url and self.url:
|
||||||
s += sprintf(" (%s)", self.url)
|
s += sprintf(" (%s)", self.url)
|
||||||
if self.traceback: # pragma: no cover
|
if self.traceback:
|
||||||
s += sprintf("\nServer traceback:\n%s", self.traceback)
|
s += sprintf("\nServer traceback:\n%s", self.traceback)
|
||||||
return s
|
return s
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self._format_error(show_url = False)
|
return self._format_error(show_url=False)
|
||||||
def __repr__(self): # pragma: no cover
|
|
||||||
return self._format_error(show_url = True)
|
def __repr__(self):
|
||||||
|
return self._format_error(show_url=True)
|
||||||
|
|
||||||
|
|
||||||
class ClientError(Error):
|
class ClientError(Error):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ServerError(Error):
|
class ServerError(Error):
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,26 +1,25 @@
|
|||||||
"""HTTP client library"""
|
"""HTTP client library"""
|
||||||
|
|
||||||
import nilmdb.utils
|
import json
|
||||||
from nilmdb.client.errors import ClientError, ServerError, Error
|
import urllib.parse
|
||||||
|
|
||||||
import simplejson as json
|
|
||||||
import urlparse
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
class HTTPClient(object):
|
from nilmdb.client.errors import ClientError, ServerError, Error
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPClient():
|
||||||
"""Class to manage and perform HTTP requests from the client"""
|
"""Class to manage and perform HTTP requests from the client"""
|
||||||
def __init__(self, baseurl = "", post_json = False):
|
def __init__(self, baseurl="", post_json=False, verify_ssl=True):
|
||||||
"""If baseurl is supplied, all other functions that take
|
"""If baseurl is supplied, all other functions that take
|
||||||
a URL can be given a relative URL instead."""
|
a URL can be given a relative URL instead."""
|
||||||
# Verify / clean up URL
|
# Verify / clean up URL
|
||||||
reparsed = urlparse.urlparse(baseurl).geturl()
|
reparsed = urllib.parse.urlparse(baseurl).geturl()
|
||||||
if '://' not in reparsed:
|
if '://' not in reparsed:
|
||||||
reparsed = urlparse.urlparse("http://" + baseurl).geturl()
|
reparsed = urllib.parse.urlparse("http://" + baseurl).geturl()
|
||||||
self.baseurl = reparsed
|
self.baseurl = reparsed.rstrip('/') + '/'
|
||||||
|
|
||||||
# Build Requests session object, enable SSL verification
|
# Note whether we want SSL verification
|
||||||
self.session = requests.Session()
|
self.verify_ssl = verify_ssl
|
||||||
self.session.verify = True
|
|
||||||
|
|
||||||
# Saved response, so that tests can verify a few things.
|
# Saved response, so that tests can verify a few things.
|
||||||
self._last_response = {}
|
self._last_response = {}
|
||||||
@@ -33,44 +32,64 @@ class HTTPClient(object):
|
|||||||
# Default variables for exception. We use the entire body as
|
# Default variables for exception. We use the entire body as
|
||||||
# the default message, in case we can't extract it from a JSON
|
# the default message, in case we can't extract it from a JSON
|
||||||
# response.
|
# response.
|
||||||
args = { "url" : url,
|
args = {
|
||||||
"status" : str(code),
|
"url": url,
|
||||||
"message" : body,
|
"status": str(code),
|
||||||
"traceback" : None }
|
"message": body,
|
||||||
|
"traceback": None
|
||||||
|
}
|
||||||
try:
|
try:
|
||||||
# Fill with server-provided data if we can
|
# Fill with server-provided data if we can
|
||||||
jsonerror = json.loads(body)
|
jsonerror = json.loads(body)
|
||||||
args["status"] = jsonerror["status"]
|
args["status"] = jsonerror["status"]
|
||||||
args["message"] = jsonerror["message"]
|
args["message"] = jsonerror["message"]
|
||||||
args["traceback"] = jsonerror["traceback"]
|
args["traceback"] = jsonerror["traceback"]
|
||||||
except Exception: # pragma: no cover
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if code >= 400 and code <= 499:
|
if 400 <= code <= 499:
|
||||||
raise ClientError(**args)
|
raise ClientError(**args)
|
||||||
else: # pragma: no cover
|
else:
|
||||||
if code >= 500 and code <= 599:
|
if 500 <= code <= 599:
|
||||||
if args["message"] is None:
|
if args["message"] is None:
|
||||||
args["message"] = ("(no message; try disabling " +
|
args["message"] = ("(no message; try disabling "
|
||||||
"response.stream option in " +
|
"response.stream option in "
|
||||||
"nilmdb.server for better debugging)")
|
"nilmdb.server for better debugging)")
|
||||||
raise ServerError(**args)
|
raise ServerError(**args)
|
||||||
else:
|
else:
|
||||||
raise Error(**args)
|
raise Error(**args)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.session.close()
|
pass
|
||||||
|
|
||||||
def _do_req(self, method, url, query_data, body_data, stream, headers):
|
def _do_req(self, method, url, query_data, body_data, stream, headers):
|
||||||
url = urlparse.urljoin(self.baseurl, url)
|
url = urllib.parse.urljoin(self.baseurl, url)
|
||||||
try:
|
try:
|
||||||
response = self.session.request(method, url,
|
# Create a new session, ensure we send "Connection: close",
|
||||||
params = query_data,
|
# and explicitly close connection after the transfer.
|
||||||
data = body_data,
|
# This is to avoid HTTP/1.1 persistent connections
|
||||||
stream = stream,
|
# (keepalive), because they have fundamental race
|
||||||
headers = headers)
|
# conditions when there are delays between requests:
|
||||||
|
# a new request may be sent at the same instant that the
|
||||||
|
# server decides to timeout the connection.
|
||||||
|
session = requests.Session()
|
||||||
|
if headers is None:
|
||||||
|
headers = {}
|
||||||
|
headers["Connection"] = "close"
|
||||||
|
response = session.request(method, url,
|
||||||
|
params=query_data,
|
||||||
|
data=body_data,
|
||||||
|
stream=stream,
|
||||||
|
headers=headers,
|
||||||
|
verify=self.verify_ssl)
|
||||||
|
|
||||||
|
# Close the connection. If it's a generator (stream =
|
||||||
|
# True), the requests library shouldn't actually close the
|
||||||
|
# HTTP connection until all data has been read from the
|
||||||
|
# response.
|
||||||
|
session.close()
|
||||||
except requests.RequestException as e:
|
except requests.RequestException as e:
|
||||||
raise ServerError(status = "502 Error", url = url,
|
raise ServerError(status="502 Error", url=url,
|
||||||
message = str(e.message))
|
message=str(e))
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
self._handle_error(url, response.status_code, response.content)
|
self._handle_error(url, response.status_code, response.content)
|
||||||
self._last_response = response
|
self._last_response = response
|
||||||
@@ -81,53 +100,90 @@ class HTTPClient(object):
|
|||||||
return (response, False)
|
return (response, False)
|
||||||
|
|
||||||
# Normal versions that return data directly
|
# Normal versions that return data directly
|
||||||
def _req(self, method, url, query = None, body = None, headers = None):
|
def _req(self, method, url, query=None, body=None, headers=None):
|
||||||
"""
|
"""
|
||||||
Make a request and return the body data as a string or parsed
|
Make a request and return the body data as a string or parsed
|
||||||
JSON object, or raise an error if it contained an error.
|
JSON object, or raise an error if it contained an error.
|
||||||
"""
|
"""
|
||||||
(response, isjson) = self._do_req(method, url, query, body,
|
(response, isjson) = self._do_req(method, url, query, body,
|
||||||
stream = False, headers = headers)
|
stream=False, headers=headers)
|
||||||
if isjson:
|
if isjson:
|
||||||
return json.loads(response.content)
|
return json.loads(response.content)
|
||||||
return response.content
|
return response.content
|
||||||
|
|
||||||
def get(self, url, params = None):
|
def get(self, url, params=None):
|
||||||
"""Simple GET (parameters in URL)"""
|
"""Simple GET (parameters in URL)"""
|
||||||
return self._req("GET", url, params, None)
|
return self._req("GET", url, params, None)
|
||||||
|
|
||||||
def post(self, url, params = None):
|
def post(self, url, params=None):
|
||||||
"""Simple POST (parameters in body)"""
|
"""Simple POST (parameters in body)"""
|
||||||
if self.post_json:
|
if self.post_json:
|
||||||
return self._req("POST", url, None,
|
return self._req("POST", url, None,
|
||||||
json.dumps(params),
|
json.dumps(params),
|
||||||
{ 'Content-type': 'application/json' })
|
{'Content-type': 'application/json'})
|
||||||
else:
|
else:
|
||||||
return self._req("POST", url, None, params)
|
return self._req("POST", url, None, params)
|
||||||
|
|
||||||
def put(self, url, data, params = None):
|
def put(self, url, data, params=None,
|
||||||
|
content_type="application/octet-stream"):
|
||||||
"""Simple PUT (parameters in URL, data in body)"""
|
"""Simple PUT (parameters in URL, data in body)"""
|
||||||
return self._req("PUT", url, params, data)
|
h = {'Content-type': content_type}
|
||||||
|
return self._req("PUT", url, query=params, body=data, headers=h)
|
||||||
|
|
||||||
# Generator versions that return data one line at a time.
|
# Generator versions that return data one line at a time.
|
||||||
def _req_gen(self, method, url, query = None, body = None, headers = None):
|
def _req_gen(self, method, url, query=None, body=None,
|
||||||
|
headers=None, binary=False):
|
||||||
"""
|
"""
|
||||||
Make a request and return a generator that gives back strings
|
Make a request and return a generator that gives back strings
|
||||||
or JSON decoded lines of the body data, or raise an error if
|
or JSON decoded lines of the body data, or raise an error if
|
||||||
it contained an eror.
|
it contained an eror.
|
||||||
"""
|
"""
|
||||||
(response, isjson) = self._do_req(method, url, query, body,
|
(response, isjson) = self._do_req(method, url, query, body,
|
||||||
stream = True, headers = headers)
|
stream=True, headers=headers)
|
||||||
if isjson:
|
|
||||||
for line in response.iter_lines():
|
# Like the iter_lines function in Requests, but only splits on
|
||||||
|
# the specified line ending.
|
||||||
|
def lines(source, ending):
|
||||||
|
pending = None
|
||||||
|
for chunk in source:
|
||||||
|
if pending is not None:
|
||||||
|
chunk = pending + chunk
|
||||||
|
tmp = chunk.split(ending)
|
||||||
|
lines = tmp[:-1]
|
||||||
|
if chunk.endswith(ending):
|
||||||
|
pending = None
|
||||||
|
else:
|
||||||
|
pending = tmp[-1]
|
||||||
|
for line in lines:
|
||||||
|
yield line
|
||||||
|
if pending is not None:
|
||||||
|
yield pending
|
||||||
|
|
||||||
|
# Yield the chunks or lines as requested
|
||||||
|
if binary:
|
||||||
|
for chunk in response.iter_content(chunk_size=65536):
|
||||||
|
yield chunk
|
||||||
|
elif isjson:
|
||||||
|
for line in lines(response.iter_content(chunk_size=1),
|
||||||
|
ending=b'\r\n'):
|
||||||
yield json.loads(line)
|
yield json.loads(line)
|
||||||
else:
|
else:
|
||||||
for line in response.iter_lines():
|
for line in lines(response.iter_content(chunk_size=65536),
|
||||||
|
ending=b'\n'):
|
||||||
yield line
|
yield line
|
||||||
|
|
||||||
def get_gen(self, url, params = None):
|
def get_gen(self, url, params=None, binary=False):
|
||||||
"""Simple GET (parameters in URL) returning a generator"""
|
"""Simple GET (parameters in URL) returning a generator"""
|
||||||
return self._req_gen("GET", url, params)
|
return self._req_gen("GET", url, params, binary=binary)
|
||||||
|
|
||||||
|
def post_gen(self, url, params=None):
|
||||||
|
"""Simple POST (parameters in body) returning a generator"""
|
||||||
|
if self.post_json:
|
||||||
|
return self._req_gen("POST", url, None,
|
||||||
|
json.dumps(params),
|
||||||
|
{'Content-type': 'application/json'})
|
||||||
|
else:
|
||||||
|
return self._req_gen("POST", url, None, params)
|
||||||
|
|
||||||
# Not much use for a POST or PUT generator, since they don't
|
# Not much use for a POST or PUT generator, since they don't
|
||||||
# return much data.
|
# return much data.
|
||||||
|
|||||||
263
nilmdb/client/numpyclient.py
Normal file
263
nilmdb/client/numpyclient.py
Normal file
@@ -0,0 +1,263 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""Provide a NumpyClient class that is based on normal Client, but has
|
||||||
|
additional methods for extracting and inserting data via Numpy arrays."""
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
|
||||||
|
import numpy
|
||||||
|
|
||||||
|
import nilmdb.utils
|
||||||
|
import nilmdb.client.client
|
||||||
|
import nilmdb.client.httpclient
|
||||||
|
from nilmdb.client.errors import ClientError
|
||||||
|
|
||||||
|
|
||||||
|
def layout_to_dtype(layout):
|
||||||
|
ltype = layout.split('_')[0]
|
||||||
|
lcount = int(layout.split('_')[1])
|
||||||
|
if ltype.startswith('int'):
|
||||||
|
atype = '<i' + str(int(ltype[3:]) // 8)
|
||||||
|
elif ltype.startswith('uint'):
|
||||||
|
atype = '<u' + str(int(ltype[4:]) // 8)
|
||||||
|
elif ltype.startswith('float'):
|
||||||
|
atype = '<f' + str(int(ltype[5:]) // 8)
|
||||||
|
else:
|
||||||
|
raise ValueError("bad layout")
|
||||||
|
if lcount == 1:
|
||||||
|
dtype = [('timestamp', '<i8'), ('data', atype)]
|
||||||
|
else:
|
||||||
|
dtype = [('timestamp', '<i8'), ('data', atype, lcount)]
|
||||||
|
return numpy.dtype(dtype)
|
||||||
|
|
||||||
|
|
||||||
|
class NumpyClient(nilmdb.client.client.Client):
|
||||||
|
"""Subclass of nilmdb.client.Client that adds additional methods for
|
||||||
|
extracting and inserting data via Numpy arrays."""
|
||||||
|
|
||||||
|
def _get_dtype(self, path, layout):
|
||||||
|
if layout is None:
|
||||||
|
streams = self.stream_list(path)
|
||||||
|
if len(streams) != 1:
|
||||||
|
raise ClientError("can't get layout for path: " + path)
|
||||||
|
layout = streams[0][1]
|
||||||
|
return layout_to_dtype(layout)
|
||||||
|
|
||||||
|
def stream_extract_numpy(self, path, start=None, end=None,
|
||||||
|
layout=None, maxrows=100000,
|
||||||
|
structured=False):
|
||||||
|
"""
|
||||||
|
Extract data from a stream. Returns a generator that yields
|
||||||
|
Numpy arrays of up to 'maxrows' of data each.
|
||||||
|
|
||||||
|
If 'layout' is None, it is read using stream_info.
|
||||||
|
|
||||||
|
If 'structured' is False, all data is converted to float64
|
||||||
|
and returned in a flat 2D array. Otherwise, data is returned
|
||||||
|
as a structured dtype in a 1D array.
|
||||||
|
"""
|
||||||
|
dtype = self._get_dtype(path, layout)
|
||||||
|
|
||||||
|
def to_numpy(data):
|
||||||
|
a = numpy.fromstring(data, dtype)
|
||||||
|
if structured:
|
||||||
|
return a
|
||||||
|
return numpy.c_[a['timestamp'], a['data']]
|
||||||
|
|
||||||
|
chunks = []
|
||||||
|
total_len = 0
|
||||||
|
maxsize = dtype.itemsize * maxrows
|
||||||
|
for data in self.stream_extract(path, start, end, binary=True):
|
||||||
|
# Add this block of binary data
|
||||||
|
chunks.append(data)
|
||||||
|
total_len += len(data)
|
||||||
|
|
||||||
|
# See if we have enough to make the requested Numpy array
|
||||||
|
while total_len >= maxsize:
|
||||||
|
assembled = b"".join(chunks)
|
||||||
|
total_len -= maxsize
|
||||||
|
chunks = [assembled[maxsize:]]
|
||||||
|
block = assembled[:maxsize]
|
||||||
|
yield to_numpy(block)
|
||||||
|
|
||||||
|
if total_len:
|
||||||
|
yield to_numpy(b"".join(chunks))
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def stream_insert_numpy_context(self, path, start=None, end=None,
|
||||||
|
layout=None):
|
||||||
|
"""Return a context manager that allows data to be efficiently
|
||||||
|
inserted into a stream in a piecewise manner. Data is
|
||||||
|
provided as Numpy arrays, and is aggregated and sent to the
|
||||||
|
server in larger or smaller chunks as necessary. Data format
|
||||||
|
must match the database layout for the given path.
|
||||||
|
|
||||||
|
For more details, see help for
|
||||||
|
nilmdb.client.numpyclient.StreamInserterNumpy
|
||||||
|
|
||||||
|
If 'layout' is not None, use it as the layout rather than
|
||||||
|
querying the database.
|
||||||
|
"""
|
||||||
|
dtype = self._get_dtype(path, layout)
|
||||||
|
ctx = StreamInserterNumpy(self, path, start, end, dtype)
|
||||||
|
yield ctx
|
||||||
|
ctx.finalize()
|
||||||
|
ctx.destroy()
|
||||||
|
|
||||||
|
def stream_insert_numpy(self, path, data, start=None, end=None,
|
||||||
|
layout=None):
|
||||||
|
"""Insert data into a stream. data should be a Numpy array
|
||||||
|
which will be passed through stream_insert_numpy_context to
|
||||||
|
break it into chunks etc. See the help for that function
|
||||||
|
for details."""
|
||||||
|
with self.stream_insert_numpy_context(path, start, end, layout) as ctx:
|
||||||
|
if isinstance(data, numpy.ndarray):
|
||||||
|
ctx.insert(data)
|
||||||
|
else:
|
||||||
|
for chunk in data:
|
||||||
|
ctx.insert(chunk)
|
||||||
|
return ctx.last_response
|
||||||
|
|
||||||
|
|
||||||
|
class StreamInserterNumpy(nilmdb.client.client.StreamInserter):
|
||||||
|
"""Object returned by stream_insert_numpy_context() that manages
|
||||||
|
the insertion of rows of data into a particular path.
|
||||||
|
|
||||||
|
See help for nilmdb.client.client.StreamInserter for details.
|
||||||
|
The only difference is that, instead of ASCII formatted data,
|
||||||
|
this context manager can take Numpy arrays, which are either
|
||||||
|
structured (1D with complex dtype) or flat (2D with simple dtype).
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Soft limit of how many bytes to send per HTTP request.
|
||||||
|
_max_data = 2 * 1024 * 1024
|
||||||
|
|
||||||
|
def __init__(self, client, path, start, end, dtype):
|
||||||
|
"""
|
||||||
|
'client' is the client object. 'path' is the database path
|
||||||
|
to insert to. 'start' and 'end' are used for the first
|
||||||
|
contiguous interval and may be None. 'dtype' is the Numpy
|
||||||
|
dtype for this stream.
|
||||||
|
"""
|
||||||
|
super(StreamInserterNumpy, self).__init__(client, path, start, end)
|
||||||
|
self._dtype = dtype
|
||||||
|
|
||||||
|
# Max rows to send at once
|
||||||
|
self._max_rows = self._max_data // self._dtype.itemsize
|
||||||
|
|
||||||
|
# List of the current arrays we're building up to send
|
||||||
|
self._block_arrays = []
|
||||||
|
self._block_rows = 0
|
||||||
|
|
||||||
|
def insert(self, array):
|
||||||
|
"""Insert Numpy data, which must match the layout type."""
|
||||||
|
if not isinstance(array, numpy.ndarray):
|
||||||
|
array = numpy.array(array)
|
||||||
|
if array.ndim == 1:
|
||||||
|
# Already a structured array; just verify the type
|
||||||
|
if array.dtype != self._dtype:
|
||||||
|
raise ValueError("wrong dtype for 1D (structured) array")
|
||||||
|
elif array.ndim == 2:
|
||||||
|
# Convert to structured array
|
||||||
|
sarray = numpy.zeros(array.shape[0], dtype=self._dtype)
|
||||||
|
try:
|
||||||
|
sarray['timestamp'] = array[:, 0]
|
||||||
|
# Need the squeeze in case sarray['data'] is 1 dimensional
|
||||||
|
sarray['data'] = numpy.squeeze(array[:, 1:])
|
||||||
|
except (IndexError, ValueError):
|
||||||
|
raise ValueError("wrong number of fields for this data type")
|
||||||
|
array = sarray
|
||||||
|
else:
|
||||||
|
raise ValueError("wrong number of dimensions in array")
|
||||||
|
|
||||||
|
length = len(array)
|
||||||
|
maxrows = self._max_rows
|
||||||
|
|
||||||
|
if length == 0:
|
||||||
|
return
|
||||||
|
if length > maxrows:
|
||||||
|
# This is more than twice what we wanted to send, so split
|
||||||
|
# it up. This is a bit inefficient, but the user really
|
||||||
|
# shouldn't be providing this much data at once.
|
||||||
|
for cut in range(0, length, maxrows):
|
||||||
|
self.insert(array[cut:(cut + maxrows)])
|
||||||
|
return
|
||||||
|
|
||||||
|
# Add this array to our list
|
||||||
|
self._block_arrays.append(array)
|
||||||
|
self._block_rows += length
|
||||||
|
|
||||||
|
# Send if it's too long
|
||||||
|
if self._block_rows >= maxrows:
|
||||||
|
self._send_block(final=False)
|
||||||
|
|
||||||
|
def _send_block(self, final=False):
|
||||||
|
"""Send the data current stored up. One row might be left
|
||||||
|
over if we need its timestamp saved."""
|
||||||
|
|
||||||
|
# Build the full array to send
|
||||||
|
if self._block_rows == 0:
|
||||||
|
array = numpy.zeros(0, dtype=self._dtype)
|
||||||
|
else:
|
||||||
|
array = numpy.hstack(self._block_arrays)
|
||||||
|
|
||||||
|
# Get starting timestamp
|
||||||
|
start_ts = self._interval_start
|
||||||
|
if start_ts is None:
|
||||||
|
# Pull start from the first row
|
||||||
|
try:
|
||||||
|
start_ts = array['timestamp'][0]
|
||||||
|
except IndexError:
|
||||||
|
pass # no timestamp is OK, if we have no data
|
||||||
|
|
||||||
|
# Get ending timestamp
|
||||||
|
if final:
|
||||||
|
# For a final block, the timestamp is either the
|
||||||
|
# user-provided end, or the timestamp of the last line
|
||||||
|
# plus epsilon.
|
||||||
|
end_ts = self._interval_end
|
||||||
|
if end_ts is None:
|
||||||
|
try:
|
||||||
|
end_ts = array['timestamp'][-1]
|
||||||
|
end_ts += nilmdb.utils.time.epsilon
|
||||||
|
except IndexError:
|
||||||
|
pass # no timestamp is OK, if we have no data
|
||||||
|
self._block_arrays = []
|
||||||
|
self._block_rows = 0
|
||||||
|
|
||||||
|
# Next block is completely fresh
|
||||||
|
self._interval_start = None
|
||||||
|
self._interval_end = None
|
||||||
|
else:
|
||||||
|
# An intermediate block. We need to save the last row
|
||||||
|
# for the next block, and use its timestamp as the ending
|
||||||
|
# timestamp for this one.
|
||||||
|
if len(array) < 2:
|
||||||
|
# Not enough data to send an intermediate block
|
||||||
|
return
|
||||||
|
end_ts = array['timestamp'][-1]
|
||||||
|
if self._interval_end is not None and end_ts > self._interval_end:
|
||||||
|
# User gave us bad endpoints; send it anyway, and let
|
||||||
|
# the server complain so that the error is the same
|
||||||
|
# as if we hadn't done this chunking.
|
||||||
|
end_ts = self._interval_end
|
||||||
|
self._block_arrays = [array[-1:]]
|
||||||
|
self._block_rows = 1
|
||||||
|
array = array[:-1]
|
||||||
|
|
||||||
|
# Next block continues where this one ended
|
||||||
|
self._interval_start = end_ts
|
||||||
|
|
||||||
|
# If we have no endpoints, or equal endpoints, it's OK as long
|
||||||
|
# as there's no data to send
|
||||||
|
if (start_ts is None or end_ts is None) or (start_ts == end_ts):
|
||||||
|
if not array:
|
||||||
|
return
|
||||||
|
raise ClientError("have data to send, but invalid start/end times")
|
||||||
|
|
||||||
|
# Send it
|
||||||
|
data = array.tostring()
|
||||||
|
self.last_response = self._client.stream_insert_block(
|
||||||
|
self._path, data, start_ts, end_ts, binary=True)
|
||||||
|
|
||||||
|
return
|
||||||
@@ -1,64 +1,127 @@
|
|||||||
"""Command line client functionality"""
|
"""Command line client functionality"""
|
||||||
|
|
||||||
import nilmdb.client
|
|
||||||
|
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
from nilmdb.utils import datetime_tz
|
|
||||||
import nilmdb.utils.time
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
import signal
|
||||||
import argparse
|
import argparse
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
|
import nilmdb.client
|
||||||
|
from nilmdb.utils.printf import fprintf, sprintf
|
||||||
|
import nilmdb.utils.time
|
||||||
|
|
||||||
|
import argcomplete
|
||||||
|
import datetime_tz
|
||||||
|
|
||||||
# Valid subcommands. Defined in separate files just to break
|
# Valid subcommands. Defined in separate files just to break
|
||||||
# things up -- they're still called with Cmdline as self.
|
# things up -- they're still called with Cmdline as self.
|
||||||
subcommands = [ "help", "info", "create", "list", "metadata",
|
subcommands = ["help", "info", "create", "rename", "list", "intervals",
|
||||||
"insert", "extract", "remove", "destroy" ]
|
"metadata", "insert", "extract", "remove", "destroy"]
|
||||||
|
|
||||||
# Import the subcommand modules
|
# Import the subcommand modules
|
||||||
subcmd_mods = {}
|
subcmd_mods = {}
|
||||||
for cmd in subcommands:
|
for cmd in subcommands:
|
||||||
subcmd_mods[cmd] = __import__("nilmdb.cmdline." + cmd, fromlist = [ cmd ])
|
subcmd_mods[cmd] = __import__("nilmdb.cmdline." + cmd, fromlist=[cmd])
|
||||||
|
|
||||||
|
|
||||||
class JimArgumentParser(argparse.ArgumentParser):
|
class JimArgumentParser(argparse.ArgumentParser):
|
||||||
|
def parse_args(self, args=None, namespace=None):
|
||||||
|
# Look for --version anywhere and change it to just "nilmtool
|
||||||
|
# --version". This makes "nilmtool cmd --version" work, which
|
||||||
|
# is needed by help2man.
|
||||||
|
if "--version" in (args or sys.argv[1:]):
|
||||||
|
args = ["--version"]
|
||||||
|
return argparse.ArgumentParser.parse_args(self, args, namespace)
|
||||||
|
|
||||||
def error(self, message):
|
def error(self, message):
|
||||||
self.print_usage(sys.stderr)
|
self.print_usage(sys.stderr)
|
||||||
self.exit(2, sprintf("error: %s\n", message))
|
self.exit(2, sprintf("error: %s\n", message))
|
||||||
|
|
||||||
class Cmdline(object):
|
|
||||||
|
|
||||||
def __init__(self, argv = None):
|
class Complete():
|
||||||
|
# Completion helpers, for using argcomplete (see
|
||||||
|
# extras/nilmtool-bash-completion.sh)
|
||||||
|
def escape(self, s):
|
||||||
|
quote_chars = ["\\", "\"", "'", " "]
|
||||||
|
for char in quote_chars:
|
||||||
|
s = s.replace(char, "\\" + char)
|
||||||
|
return s
|
||||||
|
|
||||||
|
def none(self, prefix, parsed_args, **kwargs):
|
||||||
|
return []
|
||||||
|
rate = none
|
||||||
|
time = none
|
||||||
|
url = none
|
||||||
|
|
||||||
|
def path(self, prefix, parsed_args, **kwargs):
|
||||||
|
client = nilmdb.client.Client(parsed_args.url)
|
||||||
|
return (self.escape(s[0])
|
||||||
|
for s in client.stream_list()
|
||||||
|
if s[0].startswith(prefix))
|
||||||
|
|
||||||
|
def layout(self, prefix, parsed_args, **kwargs):
|
||||||
|
types = ["int8", "int16", "int32", "int64",
|
||||||
|
"uint8", "uint16", "uint32", "uint64",
|
||||||
|
"float32", "float64"]
|
||||||
|
layouts = []
|
||||||
|
for i in range(1, 10):
|
||||||
|
layouts.extend([(t + "_" + str(i)) for t in types])
|
||||||
|
return (l for l in layouts if l.startswith(prefix))
|
||||||
|
|
||||||
|
def meta_key(self, prefix, parsed_args, **kwargs):
|
||||||
|
return (kv.split('=')[0] for kv
|
||||||
|
in self.meta_keyval(prefix, parsed_args, **kwargs))
|
||||||
|
|
||||||
|
def meta_keyval(self, prefix, parsed_args, **kwargs):
|
||||||
|
client = nilmdb.client.Client(parsed_args.url)
|
||||||
|
path = parsed_args.path
|
||||||
|
if not path:
|
||||||
|
return []
|
||||||
|
results = []
|
||||||
|
for (k, v) in client.stream_get_metadata(path).items():
|
||||||
|
kv = self.escape(k + '=' + v)
|
||||||
|
if kv.startswith(prefix):
|
||||||
|
results.append(kv)
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
class Cmdline():
|
||||||
|
|
||||||
|
def __init__(self, argv=None):
|
||||||
self.argv = argv or sys.argv[1:]
|
self.argv = argv or sys.argv[1:]
|
||||||
self.client = None
|
self.client = None
|
||||||
self.def_url = os.environ.get("NILMDB_URL", "http://localhost:12380")
|
self.def_url = os.environ.get("NILMDB_URL", "http://localhost/nilmdb/")
|
||||||
self.subcmd = {}
|
self.subcmd = {}
|
||||||
|
self.complete = Complete()
|
||||||
|
self.complete_output_stream = None # overridden by test suite
|
||||||
|
|
||||||
def arg_time(self, toparse):
|
def arg_time(self, toparse):
|
||||||
"""Parse a time string argument"""
|
"""Parse a time string argument"""
|
||||||
try:
|
try:
|
||||||
return nilmdb.utils.time.parse_time(toparse).totimestamp()
|
return nilmdb.utils.time.parse_time(toparse)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(sprintf("%s \"%s\"",
|
raise argparse.ArgumentTypeError(sprintf("%s \"%s\"",
|
||||||
str(e), toparse))
|
str(e), toparse))
|
||||||
|
|
||||||
|
# Set up the parser
|
||||||
def parser_setup(self):
|
def parser_setup(self):
|
||||||
self.parser = JimArgumentParser(add_help = False,
|
self.parser = JimArgumentParser(add_help=False,
|
||||||
formatter_class = def_form)
|
formatter_class=def_form)
|
||||||
|
|
||||||
group = self.parser.add_argument_group("General options")
|
group = self.parser.add_argument_group("General options")
|
||||||
group.add_argument("-h", "--help", action='help',
|
group.add_argument("-h", "--help", action='help',
|
||||||
help='show this help message and exit')
|
help='show this help message and exit')
|
||||||
group.add_argument("-V", "--version", action="version",
|
group.add_argument("-v", "--version", action="version",
|
||||||
version = nilmdb.__version__)
|
version=nilmdb.__version__)
|
||||||
|
|
||||||
group = self.parser.add_argument_group("Server")
|
group = self.parser.add_argument_group("Server")
|
||||||
group.add_argument("-u", "--url", action="store",
|
group.add_argument("-u", "--url", action="store",
|
||||||
default=self.def_url,
|
default=self.def_url,
|
||||||
help="NilmDB server URL (default: %(default)s)")
|
help="NilmDB server URL (default: %(default)s)"
|
||||||
|
).completer = self.complete.url
|
||||||
|
|
||||||
sub = self.parser.add_subparsers(
|
sub = self.parser.add_subparsers(
|
||||||
title="Commands", dest="command",
|
title="Commands", dest="command", required=True,
|
||||||
description="Use 'help command' or 'command --help' for more "
|
description="Use 'help command' or 'command --help' for more "
|
||||||
"details on a particular command.")
|
"details on a particular command.")
|
||||||
|
|
||||||
@@ -73,12 +136,18 @@ class Cmdline(object):
|
|||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
# Set SIGPIPE to its default handler -- we don't need Python
|
||||||
|
# to catch it for us.
|
||||||
|
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
||||||
|
|
||||||
# Clear cached timezone, so that we can pick up timezone changes
|
# Clear cached timezone, so that we can pick up timezone changes
|
||||||
# while running this from the test suite.
|
# while running this from the test suite.
|
||||||
datetime_tz._localtz = None
|
datetime_tz._localtz = None
|
||||||
|
|
||||||
# Run parser
|
# Run parser
|
||||||
self.parser_setup()
|
self.parser_setup()
|
||||||
|
argcomplete.autocomplete(self.parser, exit_method=sys.exit,
|
||||||
|
output_stream=self.complete_output_stream)
|
||||||
self.args = self.parser.parse_args(self.argv)
|
self.args = self.parser.parse_args(self.argv)
|
||||||
|
|
||||||
# Run arg verify handler if there is one
|
# Run arg verify handler if there is one
|
||||||
@@ -91,7 +160,7 @@ class Cmdline(object):
|
|||||||
# unless the particular command requests that we don't.
|
# unless the particular command requests that we don't.
|
||||||
if "no_test_connect" not in self.args:
|
if "no_test_connect" not in self.args:
|
||||||
try:
|
try:
|
||||||
server_version = self.client.version()
|
self.client.version()
|
||||||
except nilmdb.client.Error as e:
|
except nilmdb.client.Error as e:
|
||||||
self.die("error connecting to server: %s", str(e))
|
self.die("error connecting to server: %s", str(e))
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
from nilmdb.utils.printf import *
|
from argparse import RawDescriptionHelpFormatter as raw_form
|
||||||
|
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
|
|
||||||
from argparse import RawDescriptionHelpFormatter as raw_form
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("create", help="Create a new stream",
|
cmd = sub.add_parser("create", help="Create a new stream",
|
||||||
formatter_class = raw_form,
|
formatter_class=raw_form,
|
||||||
description="""
|
description="""
|
||||||
Create a new empty stream at the specified path and with the specified
|
Create a new empty stream at the specified path and with the specified
|
||||||
layout type.
|
layout type.
|
||||||
@@ -19,14 +19,17 @@ Layout types are of the format: type_count
|
|||||||
For example, 'float32_8' means the data for this stream has 8 columns of
|
For example, 'float32_8' means the data for this stream has 8 columns of
|
||||||
32-bit floating point values.
|
32-bit floating point values.
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(handler = cmd_create)
|
cmd.set_defaults(handler=cmd_create)
|
||||||
group = cmd.add_argument_group("Required arguments")
|
group = cmd.add_argument_group("Required arguments")
|
||||||
group.add_argument("path",
|
group.add_argument("path",
|
||||||
help="Path (in database) of new stream, e.g. /foo/bar")
|
help="Path (in database) of new stream, e.g. /foo/bar",
|
||||||
|
).completer = self.complete.path
|
||||||
group.add_argument("layout",
|
group.add_argument("layout",
|
||||||
help="Layout type for new stream, e.g. float32_8")
|
help="Layout type for new stream, e.g. float32_8",
|
||||||
|
).completer = self.complete.layout
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_create(self):
|
def cmd_create(self):
|
||||||
"""Create new stream"""
|
"""Create new stream"""
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -1,25 +1,52 @@
|
|||||||
from nilmdb.utils.printf import *
|
import fnmatch
|
||||||
import nilmdb.client
|
|
||||||
|
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
|
import nilmdb.client
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("destroy", help="Delete a stream and all data",
|
cmd = sub.add_parser("destroy", help="Delete a stream and all data",
|
||||||
formatter_class = def_form,
|
formatter_class=def_form,
|
||||||
description="""
|
description="""
|
||||||
Destroy the stream at the specified path. All
|
Destroy the stream at the specified path.
|
||||||
data and metadata related to the stream is
|
The stream must be empty. All metadata
|
||||||
permanently deleted.
|
related to the stream is permanently deleted.
|
||||||
|
|
||||||
|
Wildcards and multiple paths are supported.
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(handler = cmd_destroy)
|
cmd.set_defaults(handler=cmd_destroy)
|
||||||
|
group = cmd.add_argument_group("Options")
|
||||||
|
group.add_argument("-R", "--remove", action="store_true",
|
||||||
|
help="Remove all data before destroying stream")
|
||||||
|
group.add_argument("-q", "--quiet", action="store_true",
|
||||||
|
help="Don't display names when destroying "
|
||||||
|
"multiple paths")
|
||||||
group = cmd.add_argument_group("Required arguments")
|
group = cmd.add_argument_group("Required arguments")
|
||||||
group.add_argument("path",
|
group.add_argument("path", nargs='+',
|
||||||
help="Path of the stream to delete, e.g. /foo/bar")
|
help="Path of the stream to delete, e.g. /foo/bar/*",
|
||||||
|
).completer = self.complete.path
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_destroy(self):
|
def cmd_destroy(self):
|
||||||
"""Destroy stream"""
|
"""Destroy stream"""
|
||||||
try:
|
streams = [s[0] for s in self.client.stream_list()]
|
||||||
self.client.stream_destroy(self.args.path)
|
paths = []
|
||||||
except nilmdb.client.ClientError as e:
|
for path in self.args.path:
|
||||||
self.die("error destroying stream: %s", str(e))
|
new = fnmatch.filter(streams, path)
|
||||||
|
if not new:
|
||||||
|
self.die("error: no stream matched path: %s", path)
|
||||||
|
paths.extend(new)
|
||||||
|
|
||||||
|
for path in paths:
|
||||||
|
if not self.args.quiet and len(paths) > 1:
|
||||||
|
printf("Destroying %s\n", path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self.args.remove:
|
||||||
|
self.client.stream_remove(path)
|
||||||
|
self.client.stream_destroy(path)
|
||||||
|
except nilmdb.client.ClientError as e:
|
||||||
|
self.die("error destroying stream: %s", str(e))
|
||||||
|
|||||||
@@ -1,41 +1,56 @@
|
|||||||
from __future__ import print_function
|
import sys
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("extract", help="Extract data",
|
cmd = sub.add_parser("extract", help="Extract data",
|
||||||
description="""
|
description="""
|
||||||
Extract data from a stream.
|
Extract data from a stream.
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(verify = cmd_extract_verify,
|
cmd.set_defaults(verify=cmd_extract_verify,
|
||||||
handler = cmd_extract)
|
handler=cmd_extract)
|
||||||
|
|
||||||
group = cmd.add_argument_group("Data selection")
|
group = cmd.add_argument_group("Data selection")
|
||||||
group.add_argument("path",
|
group.add_argument("path",
|
||||||
help="Path of stream, e.g. /foo/bar")
|
help="Path of stream, e.g. /foo/bar",
|
||||||
|
).completer = self.complete.path
|
||||||
group.add_argument("-s", "--start", required=True,
|
group.add_argument("-s", "--start", required=True,
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Starting timestamp (free-form, inclusive)")
|
help="Starting timestamp (free-form, inclusive)",
|
||||||
|
).completer = self.complete.time
|
||||||
group.add_argument("-e", "--end", required=True,
|
group.add_argument("-e", "--end", required=True,
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Ending timestamp (free-form, noninclusive)")
|
help="Ending timestamp (free-form, noninclusive)",
|
||||||
|
).completer = self.complete.time
|
||||||
|
|
||||||
group = cmd.add_argument_group("Output format")
|
group = cmd.add_argument_group("Output format")
|
||||||
|
group.add_argument("-B", "--binary", action="store_true",
|
||||||
|
help="Raw binary output")
|
||||||
group.add_argument("-b", "--bare", action="store_true",
|
group.add_argument("-b", "--bare", action="store_true",
|
||||||
help="Exclude timestamps from output lines")
|
help="Exclude timestamps from output lines")
|
||||||
group.add_argument("-a", "--annotate", action="store_true",
|
group.add_argument("-a", "--annotate", action="store_true",
|
||||||
help="Include comments with some information "
|
help="Include comments with some information "
|
||||||
"about the stream")
|
"about the stream")
|
||||||
|
group.add_argument("-m", "--markup", action="store_true",
|
||||||
|
help="Include comments with interval starts and ends")
|
||||||
group.add_argument("-T", "--timestamp-raw", action="store_true",
|
group.add_argument("-T", "--timestamp-raw", action="store_true",
|
||||||
help="Show raw timestamps in annotated information")
|
help="Show raw timestamps in annotated information")
|
||||||
group.add_argument("-c", "--count", action="store_true",
|
group.add_argument("-c", "--count", action="store_true",
|
||||||
help="Just output a count of matched data points")
|
help="Just output a count of matched data points")
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_extract_verify(self):
|
def cmd_extract_verify(self):
|
||||||
if self.args.start is not None and self.args.end is not None:
|
if self.args.start > self.args.end:
|
||||||
if self.args.start > self.args.end:
|
self.parser.error("start is after end")
|
||||||
self.parser.error("start is after end")
|
|
||||||
|
if self.args.binary:
|
||||||
|
if (self.args.bare or self.args.annotate or self.args.markup or
|
||||||
|
self.args.timestamp_raw or self.args.count):
|
||||||
|
self.parser.error("--binary cannot be combined with other options")
|
||||||
|
|
||||||
|
|
||||||
def cmd_extract(self):
|
def cmd_extract(self):
|
||||||
streams = self.client.stream_list(self.args.path)
|
streams = self.client.stream_list(self.args.path)
|
||||||
@@ -44,9 +59,9 @@ def cmd_extract(self):
|
|||||||
layout = streams[0][1]
|
layout = streams[0][1]
|
||||||
|
|
||||||
if self.args.timestamp_raw:
|
if self.args.timestamp_raw:
|
||||||
time_string = nilmdb.utils.time.float_time_to_string
|
time_string = nilmdb.utils.time.timestamp_to_string
|
||||||
else:
|
else:
|
||||||
time_string = nilmdb.utils.time.format_time
|
time_string = nilmdb.utils.time.timestamp_to_human
|
||||||
|
|
||||||
if self.args.annotate:
|
if self.args.annotate:
|
||||||
printf("# path: %s\n", self.args.path)
|
printf("# path: %s\n", self.args.path)
|
||||||
@@ -55,15 +70,23 @@ def cmd_extract(self):
|
|||||||
printf("# end: %s\n", time_string(self.args.end))
|
printf("# end: %s\n", time_string(self.args.end))
|
||||||
|
|
||||||
printed = False
|
printed = False
|
||||||
|
if self.args.binary:
|
||||||
|
printer = sys.stdout.buffer.write
|
||||||
|
else:
|
||||||
|
printer = lambda x: print(x.decode('utf-8'))
|
||||||
|
bare = self.args.bare
|
||||||
|
count = self.args.count
|
||||||
for dataline in self.client.stream_extract(self.args.path,
|
for dataline in self.client.stream_extract(self.args.path,
|
||||||
self.args.start,
|
self.args.start,
|
||||||
self.args.end,
|
self.args.end,
|
||||||
self.args.count):
|
self.args.count,
|
||||||
if self.args.bare and not self.args.count:
|
self.args.markup,
|
||||||
|
self.args.binary):
|
||||||
|
if bare and not count:
|
||||||
# Strip timestamp (first element). Doesn't make sense
|
# Strip timestamp (first element). Doesn't make sense
|
||||||
# if we are only returning a count.
|
# if we are only returning a count.
|
||||||
dataline = ' '.join(dataline.split(' ')[1:])
|
dataline = b' '.join(dataline.split(b' ')[1:])
|
||||||
print(dataline)
|
printer(dataline)
|
||||||
printed = True
|
printed = True
|
||||||
if not printed:
|
if not printed:
|
||||||
if self.args.annotate:
|
if self.args.annotate:
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
from nilmdb.utils.printf import *
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("help", help="Show detailed help for a command",
|
cmd = sub.add_parser("help", help="Show detailed help for a command",
|
||||||
@@ -9,14 +7,15 @@ def setup(self, sub):
|
|||||||
Show help for a command. 'help command' is
|
Show help for a command. 'help command' is
|
||||||
the same as 'command --help'.
|
the same as 'command --help'.
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(handler = cmd_help)
|
cmd.set_defaults(handler=cmd_help)
|
||||||
cmd.set_defaults(no_test_connect = True)
|
cmd.set_defaults(no_test_connect=True)
|
||||||
cmd.add_argument("command", nargs="?",
|
cmd.add_argument("command", nargs="?",
|
||||||
help="Command to get help about")
|
help="Command to get help about")
|
||||||
cmd.add_argument("rest", nargs=argparse.REMAINDER,
|
cmd.add_argument("rest", nargs=argparse.REMAINDER,
|
||||||
help=argparse.SUPPRESS)
|
help=argparse.SUPPRESS)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_help(self):
|
def cmd_help(self):
|
||||||
if self.args.command in self.subcmd:
|
if self.args.command in self.subcmd:
|
||||||
self.subcmd[self.args.command].print_help()
|
self.subcmd[self.args.command].print_help()
|
||||||
|
|||||||
@@ -1,19 +1,21 @@
|
|||||||
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import printf
|
||||||
from nilmdb.utils import human_size
|
from nilmdb.utils import human_size
|
||||||
|
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("info", help="Server information",
|
cmd = sub.add_parser("info", help="Server information",
|
||||||
formatter_class = def_form,
|
formatter_class=def_form,
|
||||||
description="""
|
description="""
|
||||||
List information about the server, like
|
List information about the server, like
|
||||||
version.
|
version.
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(handler = cmd_info)
|
cmd.set_defaults(handler=cmd_info)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_info(self):
|
def cmd_info(self):
|
||||||
"""Print info about the server"""
|
"""Print info about the server"""
|
||||||
printf("Client version: %s\n", nilmdb.__version__)
|
printf("Client version: %s\n", nilmdb.__version__)
|
||||||
@@ -21,5 +23,8 @@ def cmd_info(self):
|
|||||||
printf("Server URL: %s\n", self.client.geturl())
|
printf("Server URL: %s\n", self.client.geturl())
|
||||||
dbinfo = self.client.dbinfo()
|
dbinfo = self.client.dbinfo()
|
||||||
printf("Server database path: %s\n", dbinfo["path"])
|
printf("Server database path: %s\n", dbinfo["path"])
|
||||||
printf("Server database size: %s\n", human_size(dbinfo["size"]))
|
for (desc, field) in [("used by NilmDB", "size"),
|
||||||
printf("Server database free space: %s\n", human_size(dbinfo["free"]))
|
("used by other", "other"),
|
||||||
|
("reserved", "reserved"),
|
||||||
|
("free", "free")]:
|
||||||
|
printf("Server disk space %s: %s\n", desc, human_size(dbinfo[field]))
|
||||||
|
|||||||
@@ -1,17 +1,18 @@
|
|||||||
from nilmdb.utils.printf import *
|
import sys
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
import nilmdb.utils.timestamper as timestamper
|
import nilmdb.utils.timestamper as timestamper
|
||||||
import nilmdb.utils.time
|
import nilmdb.utils.time
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("insert", help="Insert data",
|
cmd = sub.add_parser("insert", help="Insert data",
|
||||||
description="""
|
description="""
|
||||||
Insert data into a stream.
|
Insert data into a stream.
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(verify = cmd_insert_verify,
|
cmd.set_defaults(verify=cmd_insert_verify,
|
||||||
handler = cmd_insert)
|
handler=cmd_insert)
|
||||||
cmd.add_argument("-q", "--quiet", action='store_true',
|
cmd.add_argument("-q", "--quiet", action='store_true',
|
||||||
help='suppress unnecessary messages')
|
help='suppress unnecessary messages')
|
||||||
|
|
||||||
@@ -25,7 +26,8 @@ def setup(self, sub):
|
|||||||
group.add_argument("-t", "--timestamp", action="store_true",
|
group.add_argument("-t", "--timestamp", action="store_true",
|
||||||
help="Add timestamps to each line")
|
help="Add timestamps to each line")
|
||||||
group.add_argument("-r", "--rate", type=float,
|
group.add_argument("-r", "--rate", type=float,
|
||||||
help="Data rate, in Hz")
|
help="Data rate, in Hz",
|
||||||
|
).completer = self.complete.rate
|
||||||
|
|
||||||
group = cmd.add_argument_group("Start time",
|
group = cmd.add_argument_group("Start time",
|
||||||
description="""
|
description="""
|
||||||
@@ -39,7 +41,8 @@ def setup(self, sub):
|
|||||||
exc = group.add_mutually_exclusive_group()
|
exc = group.add_mutually_exclusive_group()
|
||||||
exc.add_argument("-s", "--start",
|
exc.add_argument("-s", "--start",
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Starting timestamp (free-form)")
|
help="Starting timestamp (free-form)",
|
||||||
|
).completer = self.complete.time
|
||||||
exc.add_argument("-f", "--filename", action="store_true",
|
exc.add_argument("-f", "--filename", action="store_true",
|
||||||
help="Use filename to determine start time")
|
help="Use filename to determine start time")
|
||||||
|
|
||||||
@@ -52,26 +55,31 @@ def setup(self, sub):
|
|||||||
timezone.""")
|
timezone.""")
|
||||||
group.add_argument("-e", "--end",
|
group.add_argument("-e", "--end",
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Ending timestamp (free-form)")
|
help="Ending timestamp (free-form)",
|
||||||
|
).completer = self.complete.time
|
||||||
|
|
||||||
group = cmd.add_argument_group("Required parameters")
|
group = cmd.add_argument_group("Required parameters")
|
||||||
group.add_argument("path",
|
group.add_argument("path",
|
||||||
help="Path of stream, e.g. /foo/bar")
|
help="Path of stream, e.g. /foo/bar",
|
||||||
group.add_argument("file", nargs = '?', default='-',
|
).completer = self.complete.path
|
||||||
|
group.add_argument("file", nargs='?', default='-',
|
||||||
help="File to insert (default: - (stdin))")
|
help="File to insert (default: - (stdin))")
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_insert_verify(self):
|
def cmd_insert_verify(self):
|
||||||
if self.args.timestamp:
|
if self.args.timestamp:
|
||||||
if not self.args.rate:
|
if not self.args.rate:
|
||||||
self.die("error: --rate is needed, but was not specified")
|
self.die("error: --rate is needed, but was not specified")
|
||||||
if not self.args.filename and self.args.start is None:
|
if not self.args.filename and self.args.start is None:
|
||||||
self.die("error: need --start or --filename when adding timestamps")
|
self.die("error: need --start or --filename "
|
||||||
|
"when adding timestamps")
|
||||||
else:
|
else:
|
||||||
if self.args.start is None or self.args.end is None:
|
if self.args.start is None or self.args.end is None:
|
||||||
self.die("error: when not adding timestamps, --start and "
|
self.die("error: when not adding timestamps, --start and "
|
||||||
"--end are required")
|
"--end are required")
|
||||||
|
|
||||||
|
|
||||||
def cmd_insert(self):
|
def cmd_insert(self):
|
||||||
# Find requested stream
|
# Find requested stream
|
||||||
streams = self.client.stream_list(self.args.path)
|
streams = self.client.stream_list(self.args.path)
|
||||||
@@ -83,7 +91,7 @@ def cmd_insert(self):
|
|||||||
try:
|
try:
|
||||||
filename = arg.file
|
filename = arg.file
|
||||||
if filename == '-':
|
if filename == '-':
|
||||||
infile = sys.stdin
|
infile = sys.stdin.buffer
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
infile = open(filename, "rb")
|
infile = open(filename, "rb")
|
||||||
@@ -92,7 +100,7 @@ def cmd_insert(self):
|
|||||||
|
|
||||||
if arg.start is None:
|
if arg.start is None:
|
||||||
try:
|
try:
|
||||||
arg.start = nilmdb.utils.time.parse_time(filename).totimestamp()
|
arg.start = nilmdb.utils.time.parse_time(filename)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
self.die("error extracting start time from filename '%s'",
|
self.die("error extracting start time from filename '%s'",
|
||||||
filename)
|
filename)
|
||||||
@@ -100,16 +108,16 @@ def cmd_insert(self):
|
|||||||
if arg.timestamp:
|
if arg.timestamp:
|
||||||
data = timestamper.TimestamperRate(infile, arg.start, arg.rate)
|
data = timestamper.TimestamperRate(infile, arg.start, arg.rate)
|
||||||
else:
|
else:
|
||||||
data = iter(lambda: infile.read(1048576), '')
|
data = iter(lambda: infile.read(1048576), b'')
|
||||||
|
|
||||||
# Print info
|
# Print info
|
||||||
if not arg.quiet:
|
if not arg.quiet:
|
||||||
printf(" Input file: %s\n", filename)
|
printf(" Input file: %s\n", filename)
|
||||||
printf(" Start time: %s\n",
|
printf(" Start time: %s\n",
|
||||||
nilmdb.utils.time.format_time(arg.start))
|
nilmdb.utils.time.timestamp_to_human(arg.start))
|
||||||
if arg.end:
|
if arg.end:
|
||||||
printf(" End time: %s\n",
|
printf(" End time: %s\n",
|
||||||
nilmdb.utils.time.format_time(arg.end))
|
nilmdb.utils.time.timestamp_to_human(arg.end))
|
||||||
if arg.timestamp:
|
if arg.timestamp:
|
||||||
printf("Timestamper: %s\n", str(data))
|
printf("Timestamper: %s\n", str(data))
|
||||||
|
|
||||||
|
|||||||
76
nilmdb/cmdline/intervals.py
Normal file
76
nilmdb/cmdline/intervals.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
|
import nilmdb.utils.time
|
||||||
|
from nilmdb.utils.interval import Interval
|
||||||
|
|
||||||
|
|
||||||
|
def setup(self, sub):
|
||||||
|
cmd = sub.add_parser("intervals", help="List intervals",
|
||||||
|
formatter_class=def_form,
|
||||||
|
description="""
|
||||||
|
List intervals in a stream, similar to
|
||||||
|
'list --detail path'.
|
||||||
|
|
||||||
|
If '--diff diffpath' is provided, only
|
||||||
|
interval ranges that are present in 'path'
|
||||||
|
and not present in 'diffpath' are printed.
|
||||||
|
""")
|
||||||
|
cmd.set_defaults(verify=cmd_intervals_verify,
|
||||||
|
handler=cmd_intervals)
|
||||||
|
|
||||||
|
group = cmd.add_argument_group("Stream selection")
|
||||||
|
group.add_argument("path", metavar="PATH",
|
||||||
|
help="List intervals for this path",
|
||||||
|
).completer = self.complete.path
|
||||||
|
group.add_argument("-d", "--diff", metavar="PATH",
|
||||||
|
help="Subtract intervals from this path",
|
||||||
|
).completer = self.complete.path
|
||||||
|
|
||||||
|
group = cmd.add_argument_group("Interval details")
|
||||||
|
group.add_argument("-s", "--start",
|
||||||
|
metavar="TIME", type=self.arg_time,
|
||||||
|
help="Starting timestamp for intervals "
|
||||||
|
"(free-form, inclusive)",
|
||||||
|
).completer = self.complete.time
|
||||||
|
group.add_argument("-e", "--end",
|
||||||
|
metavar="TIME", type=self.arg_time,
|
||||||
|
help="Ending timestamp for intervals "
|
||||||
|
"(free-form, noninclusive)",
|
||||||
|
).completer = self.complete.time
|
||||||
|
|
||||||
|
group = cmd.add_argument_group("Misc options")
|
||||||
|
group.add_argument("-T", "--timestamp-raw", action="store_true",
|
||||||
|
help="Show raw timestamps when printing times")
|
||||||
|
group.add_argument("-o", "--optimize", action="store_true",
|
||||||
|
help="Optimize (merge adjacent) intervals")
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_intervals_verify(self):
|
||||||
|
if self.args.start is not None and self.args.end is not None:
|
||||||
|
if self.args.start >= self.args.end:
|
||||||
|
self.parser.error("start must precede end")
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_intervals(self):
|
||||||
|
"""List intervals in a stream"""
|
||||||
|
if self.args.timestamp_raw:
|
||||||
|
time_string = nilmdb.utils.time.timestamp_to_string
|
||||||
|
else:
|
||||||
|
time_string = nilmdb.utils.time.timestamp_to_human
|
||||||
|
|
||||||
|
try:
|
||||||
|
intervals = (Interval(start, end) for (start, end) in
|
||||||
|
self.client.stream_intervals(self.args.path,
|
||||||
|
self.args.start,
|
||||||
|
self.args.end,
|
||||||
|
self.args.diff))
|
||||||
|
if self.args.optimize:
|
||||||
|
intervals = nilmdb.utils.interval.optimize(intervals)
|
||||||
|
for i in intervals:
|
||||||
|
printf("[ %s -> %s ]\n", time_string(i.start), time_string(i.end))
|
||||||
|
|
||||||
|
except nilmdb.client.ClientError as e:
|
||||||
|
self.die("error listing intervals: %s", str(e))
|
||||||
@@ -1,28 +1,25 @@
|
|||||||
from nilmdb.utils.printf import *
|
import fnmatch
|
||||||
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
import nilmdb.utils.time
|
import nilmdb.utils.time
|
||||||
|
|
||||||
import fnmatch
|
|
||||||
import argparse
|
|
||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("list", help="List streams",
|
cmd = sub.add_parser("list", help="List streams",
|
||||||
formatter_class = def_form,
|
formatter_class=def_form,
|
||||||
description="""
|
description="""
|
||||||
List streams available in the database,
|
List streams available in the database,
|
||||||
optionally filtering by layout or path. Wildcards
|
optionally filtering by path. Wildcards
|
||||||
are accepted.
|
are accepted; non-matching paths or wildcards
|
||||||
|
are ignored.
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(verify = cmd_list_verify,
|
cmd.set_defaults(verify=cmd_list_verify,
|
||||||
handler = cmd_list)
|
handler=cmd_list)
|
||||||
|
|
||||||
group = cmd.add_argument_group("Stream filtering")
|
group = cmd.add_argument_group("Stream filtering")
|
||||||
group.add_argument("-p", "--path", metavar="PATH", default="*",
|
group.add_argument("path", metavar="PATH", default=["*"], nargs='*',
|
||||||
help="Match only this path (-p can be omitted)")
|
).completer = self.complete.path
|
||||||
group.add_argument("path_positional", default="*",
|
|
||||||
nargs="?", help=argparse.SUPPRESS)
|
|
||||||
group.add_argument("-l", "--layout", default="*",
|
|
||||||
help="Match only this stream layout")
|
|
||||||
|
|
||||||
group = cmd.add_argument_group("Interval info")
|
group = cmd.add_argument_group("Interval info")
|
||||||
group.add_argument("-E", "--ext", action="store_true",
|
group.add_argument("-E", "--ext", action="store_true",
|
||||||
@@ -35,68 +32,74 @@ def setup(self, sub):
|
|||||||
group.add_argument("-s", "--start",
|
group.add_argument("-s", "--start",
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Starting timestamp for intervals "
|
help="Starting timestamp for intervals "
|
||||||
"(free-form, inclusive)")
|
"(free-form, inclusive)",
|
||||||
|
).completer = self.complete.time
|
||||||
group.add_argument("-e", "--end",
|
group.add_argument("-e", "--end",
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Ending timestamp for intervals "
|
help="Ending timestamp for intervals "
|
||||||
"(free-form, noninclusive)")
|
"(free-form, noninclusive)",
|
||||||
|
).completer = self.complete.time
|
||||||
|
|
||||||
group = cmd.add_argument_group("Misc options")
|
group = cmd.add_argument_group("Misc options")
|
||||||
group.add_argument("-T", "--timestamp-raw", action="store_true",
|
group.add_argument("-T", "--timestamp-raw", action="store_true",
|
||||||
help="Show raw timestamps when printing times")
|
help="Show raw timestamps when printing times")
|
||||||
|
group.add_argument("-l", "--layout", action="store_true",
|
||||||
|
help="Show layout type next to path name")
|
||||||
|
group.add_argument("-n", "--no-decim", action="store_true",
|
||||||
|
help="Skip paths containing \"~decim-\"")
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
def cmd_list_verify(self):
|
|
||||||
# A hidden "path_positional" argument lets the user leave off the
|
|
||||||
# "-p" when specifying the path. Handle it here.
|
|
||||||
got_opt = self.args.path != "*"
|
|
||||||
got_pos = self.args.path_positional != "*"
|
|
||||||
if got_pos:
|
|
||||||
if got_opt:
|
|
||||||
self.parser.error("too many paths specified")
|
|
||||||
else:
|
|
||||||
self.args.path = self.args.path_positional
|
|
||||||
|
|
||||||
|
def cmd_list_verify(self):
|
||||||
if self.args.start is not None and self.args.end is not None:
|
if self.args.start is not None and self.args.end is not None:
|
||||||
if self.args.start >= self.args.end:
|
if self.args.start >= self.args.end:
|
||||||
self.parser.error("start must precede end")
|
self.parser.error("start must precede end")
|
||||||
|
|
||||||
if self.args.start is not None or self.args.end is not None:
|
if self.args.start is not None or self.args.end is not None:
|
||||||
if not self.args.detail:
|
if not self.args.detail:
|
||||||
self.parser.error("--start and --end only make sense with --detail")
|
self.parser.error("--start and --end only make sense "
|
||||||
|
"with --detail")
|
||||||
|
|
||||||
|
|
||||||
def cmd_list(self):
|
def cmd_list(self):
|
||||||
"""List available streams"""
|
"""List available streams"""
|
||||||
streams = self.client.stream_list(extended = True)
|
streams = self.client.stream_list(extended=True)
|
||||||
|
|
||||||
if self.args.timestamp_raw:
|
if self.args.timestamp_raw:
|
||||||
time_string = nilmdb.utils.time.float_time_to_string
|
time_string = nilmdb.utils.time.timestamp_to_string
|
||||||
else:
|
else:
|
||||||
time_string = nilmdb.utils.time.format_time
|
time_string = nilmdb.utils.time.timestamp_to_human
|
||||||
|
|
||||||
for stream in streams:
|
for argpath in self.args.path:
|
||||||
(path, layout, int_min, int_max, rows, seconds) = stream[:6]
|
for stream in streams:
|
||||||
if not (fnmatch.fnmatch(path, self.args.path) and
|
(path, layout, int_min, int_max, rows, time) = stream[:6]
|
||||||
fnmatch.fnmatch(layout, self.args.layout)):
|
if not fnmatch.fnmatch(path, argpath):
|
||||||
continue
|
continue
|
||||||
|
if self.args.no_decim and "~decim-" in path:
|
||||||
|
continue
|
||||||
|
|
||||||
printf("%s %s\n", path, layout)
|
if self.args.layout:
|
||||||
|
printf("%s %s\n", path, layout)
|
||||||
if self.args.ext:
|
|
||||||
if int_min is None or int_max is None:
|
|
||||||
printf(" interval extents: (no data)\n")
|
|
||||||
else:
|
else:
|
||||||
printf(" interval extents: %s -> %s\n",
|
printf("%s\n", path)
|
||||||
time_string(int_min), time_string(int_max))
|
|
||||||
printf(" total data: %d rows, %.6f seconds\n",
|
|
||||||
rows or 0, seconds or 0);
|
|
||||||
|
|
||||||
if self.args.detail:
|
if self.args.ext:
|
||||||
printed = False
|
if int_min is None or int_max is None:
|
||||||
for (start, end) in self.client.stream_intervals(
|
printf(" interval extents: (no data)\n")
|
||||||
path, self.args.start, self.args.end):
|
else:
|
||||||
printf(" [ %s -> %s ]\n", time_string(start), time_string(end))
|
printf(" interval extents: %s -> %s\n",
|
||||||
printed = True
|
time_string(int_min), time_string(int_max))
|
||||||
if not printed:
|
printf(" total data: %d rows, %.6f seconds\n",
|
||||||
printf(" (no intervals)\n")
|
rows or 0,
|
||||||
|
nilmdb.utils.time.timestamp_to_seconds(time or 0))
|
||||||
|
|
||||||
|
if self.args.detail:
|
||||||
|
printed = False
|
||||||
|
for (start, end) in self.client.stream_intervals(
|
||||||
|
path, self.args.start, self.args.end):
|
||||||
|
printf(" [ %s -> %s ]\n",
|
||||||
|
time_string(start), time_string(end))
|
||||||
|
printed = True
|
||||||
|
if not printed:
|
||||||
|
printf(" (no intervals)\n")
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import printf
|
||||||
import nilmdb
|
import nilmdb
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("metadata", help="Get or set stream metadata",
|
cmd = sub.add_parser("metadata", help="Get or set stream metadata",
|
||||||
description="""
|
description="""
|
||||||
@@ -9,25 +10,34 @@ def setup(self, sub):
|
|||||||
a stream.
|
a stream.
|
||||||
""",
|
""",
|
||||||
usage="%(prog)s path [-g [key ...] | "
|
usage="%(prog)s path [-g [key ...] | "
|
||||||
"-s key=value [...] | -u key=value [...]]")
|
"-s key=value [...] | -u key=value [...]] | "
|
||||||
cmd.set_defaults(handler = cmd_metadata)
|
"-d [key ...]")
|
||||||
|
cmd.set_defaults(handler=cmd_metadata)
|
||||||
|
|
||||||
group = cmd.add_argument_group("Required arguments")
|
group = cmd.add_argument_group("Required arguments")
|
||||||
group.add_argument("path",
|
group.add_argument("path",
|
||||||
help="Path of stream, e.g. /foo/bar")
|
help="Path of stream, e.g. /foo/bar",
|
||||||
|
).completer = self.complete.path
|
||||||
|
|
||||||
group = cmd.add_argument_group("Actions")
|
group = cmd.add_argument_group("Actions")
|
||||||
exc = group.add_mutually_exclusive_group()
|
exc = group.add_mutually_exclusive_group()
|
||||||
exc.add_argument("-g", "--get", nargs="*", metavar="key",
|
exc.add_argument("-g", "--get", nargs="*", metavar="key",
|
||||||
help="Get metadata for specified keys (default all)")
|
help="Get metadata for specified keys (default all)",
|
||||||
|
).completer = self.complete.meta_key
|
||||||
exc.add_argument("-s", "--set", nargs="+", metavar="key=value",
|
exc.add_argument("-s", "--set", nargs="+", metavar="key=value",
|
||||||
help="Replace all metadata with provided "
|
help="Replace all metadata with provided "
|
||||||
"key=value pairs")
|
"key=value pairs",
|
||||||
|
).completer = self.complete.meta_keyval
|
||||||
exc.add_argument("-u", "--update", nargs="+", metavar="key=value",
|
exc.add_argument("-u", "--update", nargs="+", metavar="key=value",
|
||||||
help="Update metadata using provided "
|
help="Update metadata using provided "
|
||||||
"key=value pairs")
|
"key=value pairs",
|
||||||
|
).completer = self.complete.meta_keyval
|
||||||
|
exc.add_argument("-d", "--delete", nargs="*", metavar="key",
|
||||||
|
help="Delete metadata for specified keys (default all)",
|
||||||
|
).completer = self.complete.meta_key
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_metadata(self):
|
def cmd_metadata(self):
|
||||||
"""Manipulate metadata"""
|
"""Manipulate metadata"""
|
||||||
if self.args.set is not None or self.args.update is not None:
|
if self.args.set is not None or self.args.update is not None:
|
||||||
@@ -52,15 +62,29 @@ def cmd_metadata(self):
|
|||||||
handler(self.args.path, data)
|
handler(self.args.path, data)
|
||||||
except nilmdb.client.ClientError as e:
|
except nilmdb.client.ClientError as e:
|
||||||
self.die("error setting/updating metadata: %s", str(e))
|
self.die("error setting/updating metadata: %s", str(e))
|
||||||
|
elif self.args.delete is not None:
|
||||||
|
# Delete (by setting values to empty strings)
|
||||||
|
keys = None
|
||||||
|
if self.args.delete:
|
||||||
|
keys = list(self.args.delete)
|
||||||
|
try:
|
||||||
|
data = self.client.stream_get_metadata(self.args.path, keys)
|
||||||
|
for key in data:
|
||||||
|
data[key] = ""
|
||||||
|
self.client.stream_update_metadata(self.args.path, data)
|
||||||
|
except nilmdb.client.ClientError as e:
|
||||||
|
self.die("error deleting metadata: %s", str(e))
|
||||||
else:
|
else:
|
||||||
# Get (or unspecified)
|
# Get (or unspecified)
|
||||||
keys = self.args.get or None
|
keys = None
|
||||||
|
if self.args.get:
|
||||||
|
keys = list(self.args.get)
|
||||||
try:
|
try:
|
||||||
data = self.client.stream_get_metadata(self.args.path, keys)
|
data = self.client.stream_get_metadata(self.args.path, keys)
|
||||||
except nilmdb.client.ClientError as e:
|
except nilmdb.client.ClientError as e:
|
||||||
self.die("error getting metadata: %s", str(e))
|
self.die("error getting metadata: %s", str(e))
|
||||||
for key, value in sorted(data.items()):
|
for key, value in sorted(data.items()):
|
||||||
# Omit nonexistant keys
|
# Print nonexistant keys as having empty value
|
||||||
if value is None:
|
if value is None:
|
||||||
value = ""
|
value = ""
|
||||||
printf("%s=%s\n", key, value)
|
printf("%s=%s\n", key, value)
|
||||||
|
|||||||
@@ -1,37 +1,59 @@
|
|||||||
from nilmdb.utils.printf import *
|
import fnmatch
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
import nilmdb.client
|
import nilmdb.client
|
||||||
|
|
||||||
|
|
||||||
def setup(self, sub):
|
def setup(self, sub):
|
||||||
cmd = sub.add_parser("remove", help="Remove data",
|
cmd = sub.add_parser("remove", help="Remove data",
|
||||||
description="""
|
description="""
|
||||||
Remove all data from a specified time range within a
|
Remove all data from a specified time range within a
|
||||||
stream.
|
stream. If multiple streams or wildcards are
|
||||||
|
provided, the same time range is removed from all
|
||||||
|
streams.
|
||||||
""")
|
""")
|
||||||
cmd.set_defaults(handler = cmd_remove)
|
cmd.set_defaults(handler=cmd_remove)
|
||||||
|
|
||||||
group = cmd.add_argument_group("Data selection")
|
group = cmd.add_argument_group("Data selection")
|
||||||
group.add_argument("path",
|
group.add_argument("path", nargs='+',
|
||||||
help="Path of stream, e.g. /foo/bar")
|
help="Path of stream, e.g. /foo/bar/*",
|
||||||
|
).completer = self.complete.path
|
||||||
group.add_argument("-s", "--start", required=True,
|
group.add_argument("-s", "--start", required=True,
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Starting timestamp (free-form, inclusive)")
|
help="Starting timestamp (free-form, inclusive)",
|
||||||
|
).completer = self.complete.time
|
||||||
group.add_argument("-e", "--end", required=True,
|
group.add_argument("-e", "--end", required=True,
|
||||||
metavar="TIME", type=self.arg_time,
|
metavar="TIME", type=self.arg_time,
|
||||||
help="Ending timestamp (free-form, noninclusive)")
|
help="Ending timestamp (free-form, noninclusive)",
|
||||||
|
).completer = self.complete.time
|
||||||
|
|
||||||
group = cmd.add_argument_group("Output format")
|
group = cmd.add_argument_group("Output format")
|
||||||
|
group.add_argument("-q", "--quiet", action="store_true",
|
||||||
|
help="Don't display names when removing "
|
||||||
|
"from multiple paths")
|
||||||
group.add_argument("-c", "--count", action="store_true",
|
group.add_argument("-c", "--count", action="store_true",
|
||||||
help="Output number of data points removed")
|
help="Output number of data points removed")
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def cmd_remove(self):
|
def cmd_remove(self):
|
||||||
|
streams = [s[0] for s in self.client.stream_list()]
|
||||||
|
paths = []
|
||||||
|
for path in self.args.path:
|
||||||
|
new = fnmatch.filter(streams, path)
|
||||||
|
if not new:
|
||||||
|
self.die("error: no stream matched path: %s", path)
|
||||||
|
paths.extend(new)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
count = self.client.stream_remove(self.args.path,
|
for path in paths:
|
||||||
self.args.start, self.args.end)
|
if not self.args.quiet and len(paths) > 1:
|
||||||
|
printf("Removing from %s\n", path)
|
||||||
|
count = self.client.stream_remove(path,
|
||||||
|
self.args.start, self.args.end)
|
||||||
|
if self.args.count:
|
||||||
|
printf("%d\n", count)
|
||||||
except nilmdb.client.ClientError as e:
|
except nilmdb.client.ClientError as e:
|
||||||
self.die("error removing data: %s", str(e))
|
self.die("error removing data: %s", str(e))
|
||||||
|
|
||||||
if self.args.count:
|
|
||||||
printf("%d\n", count)
|
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|||||||
32
nilmdb/cmdline/rename.py
Normal file
32
nilmdb/cmdline/rename.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
from argparse import ArgumentDefaultsHelpFormatter as def_form
|
||||||
|
|
||||||
|
import nilmdb.client
|
||||||
|
|
||||||
|
|
||||||
|
def setup(self, sub):
|
||||||
|
cmd = sub.add_parser("rename", help="Rename a stream",
|
||||||
|
formatter_class=def_form,
|
||||||
|
description="""
|
||||||
|
Rename a stream.
|
||||||
|
|
||||||
|
Only the stream's path is renamed; no
|
||||||
|
metadata is changed.
|
||||||
|
""")
|
||||||
|
cmd.set_defaults(handler=cmd_rename)
|
||||||
|
group = cmd.add_argument_group("Required arguments")
|
||||||
|
group.add_argument("oldpath",
|
||||||
|
help="Old path, e.g. /foo/old",
|
||||||
|
).completer = self.complete.path
|
||||||
|
group.add_argument("newpath",
|
||||||
|
help="New path, e.g. /foo/bar/new",
|
||||||
|
).completer = self.complete.path
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_rename(self):
|
||||||
|
"""Rename a stream"""
|
||||||
|
try:
|
||||||
|
self.client.stream_rename(self.args.oldpath, self.args.newpath)
|
||||||
|
except nilmdb.client.ClientError as e:
|
||||||
|
self.die("error renaming stream: %s", str(e))
|
||||||
3
nilmdb/fsck/__init__.py
Normal file
3
nilmdb/fsck/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
"""nilmdb.fsck"""
|
||||||
|
|
||||||
|
from nilmdb.fsck.fsck import Fsck
|
||||||
466
nilmdb/fsck/fsck.py
Normal file
466
nilmdb/fsck/fsck.py
Normal file
@@ -0,0 +1,466 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
raise Exception("todo: fix path bytes issues")
|
||||||
|
|
||||||
|
"""Check database consistency, with some ability to fix problems.
|
||||||
|
This should be able to fix cases where a database gets corrupted due
|
||||||
|
to unexpected system shutdown, and detect other cases that may cause
|
||||||
|
NilmDB to return errors when trying to manipulate the database."""
|
||||||
|
|
||||||
|
import nilmdb.utils
|
||||||
|
import nilmdb.server
|
||||||
|
import nilmdb.client.numpyclient
|
||||||
|
from nilmdb.utils.interval import IntervalError
|
||||||
|
from nilmdb.server.interval import Interval, IntervalSet
|
||||||
|
from nilmdb.utils.printf import printf, fprintf, sprintf
|
||||||
|
from nilmdb.utils.time import timestamp_to_string
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
import sqlite3
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import progressbar
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
import shutil
|
||||||
|
import pickle
|
||||||
|
import numpy
|
||||||
|
|
||||||
|
class FsckError(Exception):
|
||||||
|
def __init__(self, msg = "", *args):
|
||||||
|
if args:
|
||||||
|
msg = sprintf(msg, *args)
|
||||||
|
Exception.__init__(self, msg)
|
||||||
|
class FixableFsckError(FsckError):
|
||||||
|
def __init__(self, msg = "", *args):
|
||||||
|
if args:
|
||||||
|
msg = sprintf(msg, *args)
|
||||||
|
FsckError.__init__(self, "%s\nThis may be fixable with \"--fix\".", msg)
|
||||||
|
class RetryFsck(FsckError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def log(format, *args):
|
||||||
|
printf(format, *args)
|
||||||
|
|
||||||
|
def err(format, *args):
|
||||||
|
fprintf(sys.stderr, format, *args)
|
||||||
|
|
||||||
|
# Decorator that retries a function if it returns a specific value
|
||||||
|
def retry_if_raised(exc, message = None, max_retries = 100):
|
||||||
|
def f1(func):
|
||||||
|
def f2(*args, **kwargs):
|
||||||
|
for n in range(max_retries):
|
||||||
|
try:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
except exc as e:
|
||||||
|
if message:
|
||||||
|
log("%s\n\n", message)
|
||||||
|
raise Exception("Max number of retries (%d) exceeded; giving up")
|
||||||
|
return f2
|
||||||
|
return f1
|
||||||
|
|
||||||
|
class Progress(object):
|
||||||
|
def __init__(self, maxval):
|
||||||
|
if maxval == 0:
|
||||||
|
maxval = 1
|
||||||
|
self.bar = progressbar.ProgressBar(
|
||||||
|
maxval = maxval,
|
||||||
|
widgets = [ progressbar.Percentage(), ' ',
|
||||||
|
progressbar.Bar(), ' ',
|
||||||
|
progressbar.ETA() ])
|
||||||
|
if self.bar.term_width == 0:
|
||||||
|
self.bar.term_width = 75
|
||||||
|
def __enter__(self):
|
||||||
|
self.bar.start()
|
||||||
|
self.last_update = 0
|
||||||
|
return self
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
if exc_type is None:
|
||||||
|
self.bar.finish()
|
||||||
|
else:
|
||||||
|
printf("\n")
|
||||||
|
def update(self, val):
|
||||||
|
self.bar.update(val)
|
||||||
|
|
||||||
|
class Fsck(object):
|
||||||
|
|
||||||
|
def __init__(self, path, fix = False):
|
||||||
|
self.basepath = path
|
||||||
|
self.sqlpath = os.path.join(path, "data.sql")
|
||||||
|
self.bulkpath = os.path.join(path, "data")
|
||||||
|
self.bulklock = os.path.join(path, "data.lock")
|
||||||
|
self.fix = fix
|
||||||
|
|
||||||
|
### Main checks
|
||||||
|
|
||||||
|
@retry_if_raised(RetryFsck, "Something was fixed: restarting fsck")
|
||||||
|
def check(self, skip_data = False):
|
||||||
|
self.bulk = None
|
||||||
|
self.sql = None
|
||||||
|
try:
|
||||||
|
self.check_paths()
|
||||||
|
self.check_sql()
|
||||||
|
self.check_streams()
|
||||||
|
self.check_intervals()
|
||||||
|
if skip_data:
|
||||||
|
log("skipped data check\n")
|
||||||
|
else:
|
||||||
|
self.check_data()
|
||||||
|
finally:
|
||||||
|
if self.bulk:
|
||||||
|
self.bulk.close()
|
||||||
|
if self.sql:
|
||||||
|
self.sql.commit()
|
||||||
|
self.sql.close()
|
||||||
|
log("ok\n")
|
||||||
|
|
||||||
|
### Check basic path structure
|
||||||
|
|
||||||
|
def check_paths(self):
|
||||||
|
log("checking paths\n")
|
||||||
|
if self.bulk:
|
||||||
|
self.bulk.close()
|
||||||
|
if not os.path.isfile(self.sqlpath):
|
||||||
|
raise FsckError("SQL database missing (%s)", self.sqlpath)
|
||||||
|
if not os.path.isdir(self.bulkpath):
|
||||||
|
raise FsckError("Bulk data directory missing (%s)", self.bulkpath)
|
||||||
|
with open(self.bulklock, "w") as lockfile:
|
||||||
|
if not nilmdb.utils.lock.exclusive_lock(lockfile):
|
||||||
|
raise FsckError('Database already locked by another process\n'
|
||||||
|
'Make sure all other processes that might be '
|
||||||
|
'using the database are stopped.\n'
|
||||||
|
'Restarting apache will cause it to unlock '
|
||||||
|
'the db until a request is received.')
|
||||||
|
# unlocked immediately
|
||||||
|
self.bulk = nilmdb.server.bulkdata.BulkData(self.basepath)
|
||||||
|
|
||||||
|
### Check SQL database health
|
||||||
|
|
||||||
|
def check_sql(self):
|
||||||
|
log("checking sqlite database\n")
|
||||||
|
|
||||||
|
self.sql = sqlite3.connect(self.sqlpath)
|
||||||
|
with self.sql:
|
||||||
|
cur = self.sql.cursor()
|
||||||
|
ver = cur.execute("PRAGMA user_version").fetchone()[0]
|
||||||
|
good = max(nilmdb.server.nilmdb._sql_schema_updates.keys())
|
||||||
|
if ver != good:
|
||||||
|
raise FsckError("database version %d too old, should be %d",
|
||||||
|
ver, good)
|
||||||
|
self.stream_path = {}
|
||||||
|
self.stream_layout = {}
|
||||||
|
log(" loading paths\n")
|
||||||
|
result = cur.execute("SELECT id, path, layout FROM streams")
|
||||||
|
for r in result:
|
||||||
|
if r[0] in self.stream_path:
|
||||||
|
raise FsckError("duplicated ID %d in stream IDs", r[0])
|
||||||
|
self.stream_path[r[0]] = r[1]
|
||||||
|
self.stream_layout[r[0]] = r[2]
|
||||||
|
|
||||||
|
log(" loading intervals\n")
|
||||||
|
self.stream_interval = defaultdict(list)
|
||||||
|
result = cur.execute("SELECT stream_id, start_time, end_time, "
|
||||||
|
"start_pos, end_pos FROM ranges "
|
||||||
|
"ORDER BY start_time")
|
||||||
|
for r in result:
|
||||||
|
if r[0] not in self.stream_path:
|
||||||
|
raise FsckError("interval ID %d not in streams", k)
|
||||||
|
self.stream_interval[r[0]].append((r[1], r[2], r[3], r[4]))
|
||||||
|
|
||||||
|
log(" loading metadata\n")
|
||||||
|
self.stream_meta = defaultdict(dict)
|
||||||
|
result = cur.execute("SELECT stream_id, key, value FROM metadata")
|
||||||
|
for r in result:
|
||||||
|
if r[0] not in self.stream_path:
|
||||||
|
raise FsckError("metadata ID %d not in streams", k)
|
||||||
|
if r[1] in self.stream_meta[r[0]]:
|
||||||
|
raise FsckError("duplicate metadata key '%s' for stream %d",
|
||||||
|
r[1], r[0])
|
||||||
|
self.stream_meta[r[0]][r[1]] = r[2]
|
||||||
|
|
||||||
|
### Check streams and basic interval overlap
|
||||||
|
|
||||||
|
def check_streams(self):
|
||||||
|
ids = list(self.stream_path.keys())
|
||||||
|
log("checking %s streams\n", "{:,d}".format(len(ids)))
|
||||||
|
with Progress(len(ids)) as pbar:
|
||||||
|
for i, sid in enumerate(ids):
|
||||||
|
pbar.update(i)
|
||||||
|
path = self.stream_path[sid]
|
||||||
|
|
||||||
|
# unique path, valid layout
|
||||||
|
if list(self.stream_path.values()).count(path) != 1:
|
||||||
|
raise FsckError("duplicated path %s", path)
|
||||||
|
layout = self.stream_layout[sid].split('_')[0]
|
||||||
|
if layout not in ('int8', 'int16', 'int32', 'int64',
|
||||||
|
'uint8', 'uint16', 'uint32', 'uint64',
|
||||||
|
'float32', 'float64'):
|
||||||
|
raise FsckError("bad layout %s for %s", layout, path)
|
||||||
|
count = int(self.stream_layout[sid].split('_')[1])
|
||||||
|
if count < 1 or count > 1024:
|
||||||
|
raise FsckError("bad count %d for %s", count, path)
|
||||||
|
|
||||||
|
# must exist in bulkdata
|
||||||
|
bulk = self.bulkpath + path
|
||||||
|
if not os.path.isdir(bulk):
|
||||||
|
raise FsckError("%s: missing bulkdata dir", path)
|
||||||
|
if not nilmdb.server.bulkdata.Table.exists(bulk):
|
||||||
|
raise FsckError("%s: bad bulkdata table", path)
|
||||||
|
|
||||||
|
# intervals don't overlap. Abuse IntervalSet to check
|
||||||
|
# for intervals in file positions, too.
|
||||||
|
timeiset = IntervalSet()
|
||||||
|
posiset = IntervalSet()
|
||||||
|
for (stime, etime, spos, epos) in self.stream_interval[sid]:
|
||||||
|
new = Interval(stime, etime)
|
||||||
|
try:
|
||||||
|
timeiset += new
|
||||||
|
except IntervalError:
|
||||||
|
raise FsckError("%s: overlap in intervals:\n"
|
||||||
|
"set: %s\nnew: %s",
|
||||||
|
path, str(timeiset), str(new))
|
||||||
|
if spos != epos:
|
||||||
|
new = Interval(spos, epos)
|
||||||
|
try:
|
||||||
|
posiset += new
|
||||||
|
except IntervalError:
|
||||||
|
raise FsckError("%s: overlap in file offsets:\n"
|
||||||
|
"set: %s\nnew: %s",
|
||||||
|
path, str(posiset), str(new))
|
||||||
|
|
||||||
|
# check bulkdata
|
||||||
|
self.check_bulkdata(sid, path, bulk)
|
||||||
|
|
||||||
|
# Check that we can open bulkdata
|
||||||
|
try:
|
||||||
|
tab = None
|
||||||
|
try:
|
||||||
|
tab = nilmdb.server.bulkdata.Table(bulk)
|
||||||
|
except Exception as e:
|
||||||
|
raise FsckError("%s: can't open bulkdata: %s",
|
||||||
|
path, str(e))
|
||||||
|
finally:
|
||||||
|
if tab:
|
||||||
|
tab.close()
|
||||||
|
|
||||||
|
### Check that bulkdata is good enough to be opened
|
||||||
|
|
||||||
|
@retry_if_raised(RetryFsck)
|
||||||
|
def check_bulkdata(self, sid, path, bulk):
|
||||||
|
with open(os.path.join(bulk, "_format"), "rb") as f:
|
||||||
|
fmt = pickle.load(f)
|
||||||
|
if fmt["version"] != 3:
|
||||||
|
raise FsckError("%s: bad or unsupported bulkdata version %d",
|
||||||
|
path, fmt["version"])
|
||||||
|
row_per_file = int(fmt["rows_per_file"])
|
||||||
|
files_per_dir = int(fmt["files_per_dir"])
|
||||||
|
layout = fmt["layout"]
|
||||||
|
if layout != self.stream_layout[sid]:
|
||||||
|
raise FsckError("%s: layout mismatch %s != %s", path,
|
||||||
|
layout, self.stream_layout[sid])
|
||||||
|
|
||||||
|
# Every file should have a size that's the multiple of the row size
|
||||||
|
rkt = nilmdb.server.rocket.Rocket(layout, None)
|
||||||
|
row_size = rkt.binary_size
|
||||||
|
rkt.close()
|
||||||
|
|
||||||
|
# Find all directories
|
||||||
|
regex = re.compile("^[0-9a-f]{4,}$")
|
||||||
|
subdirs = sorted(filter(regex.search, os.listdir(bulk)),
|
||||||
|
key = lambda x: int(x, 16), reverse = True)
|
||||||
|
for subdir in subdirs:
|
||||||
|
# Find all files in that dir
|
||||||
|
subpath = os.path.join(bulk, subdir)
|
||||||
|
files = list(filter(regex.search, os.listdir(subpath)))
|
||||||
|
if not files:
|
||||||
|
self.fix_empty_subdir(subpath)
|
||||||
|
raise RetryFsck
|
||||||
|
# Verify that their size is a multiple of the row size
|
||||||
|
for filename in files:
|
||||||
|
filepath = os.path.join(subpath, filename)
|
||||||
|
offset = os.path.getsize(filepath)
|
||||||
|
if offset % row_size:
|
||||||
|
self.fix_bad_filesize(path, filepath, offset, row_size)
|
||||||
|
|
||||||
|
def fix_empty_subdir(self, subpath):
|
||||||
|
msg = sprintf("bulkdata path %s is missing data files", subpath)
|
||||||
|
if not self.fix:
|
||||||
|
raise FixableFsckError(msg)
|
||||||
|
# Try to fix it by just deleting whatever is present,
|
||||||
|
# as long as it's only ".removed" files.
|
||||||
|
err("\n%s\n", msg)
|
||||||
|
for fn in os.listdir(subpath):
|
||||||
|
if not fn.endswith(".removed"):
|
||||||
|
raise FsckError("can't fix automatically: please manually "
|
||||||
|
"remove the file %s and try again",
|
||||||
|
os.path.join(subpath, fn))
|
||||||
|
# Remove the whole thing
|
||||||
|
err("Removing empty subpath\n")
|
||||||
|
shutil.rmtree(subpath)
|
||||||
|
raise RetryFsck
|
||||||
|
|
||||||
|
def fix_bad_filesize(self, path, filepath, offset, row_size):
|
||||||
|
extra = offset % row_size
|
||||||
|
msg = sprintf("%s: size of file %s (%d) is not a multiple" +
|
||||||
|
" of row size (%d): %d extra bytes present",
|
||||||
|
path, filepath, offset, row_size, extra)
|
||||||
|
if not self.fix:
|
||||||
|
raise FixableFsckError(msg)
|
||||||
|
# Try to fix it by just truncating the file
|
||||||
|
err("\n%s\n", msg)
|
||||||
|
newsize = offset - extra
|
||||||
|
err("Truncating file to %d bytes and retrying\n", newsize)
|
||||||
|
with open(filepath, "r+b") as f:
|
||||||
|
f.truncate(newsize)
|
||||||
|
raise RetryFsck
|
||||||
|
|
||||||
|
### Check interval endpoints
|
||||||
|
|
||||||
|
def check_intervals(self):
|
||||||
|
total_ints = sum(len(x) for x in list(self.stream_interval.values()))
|
||||||
|
log("checking %s intervals\n", "{:,d}".format(total_ints))
|
||||||
|
done = 0
|
||||||
|
with Progress(total_ints) as pbar:
|
||||||
|
for sid in self.stream_interval:
|
||||||
|
try:
|
||||||
|
bulk = self.bulkpath + self.stream_path[sid]
|
||||||
|
tab = nilmdb.server.bulkdata.Table(bulk)
|
||||||
|
def update(x):
|
||||||
|
pbar.update(done + x)
|
||||||
|
ints = self.stream_interval[sid]
|
||||||
|
done += self.check_table_intervals(sid, ints, tab, update)
|
||||||
|
finally:
|
||||||
|
tab.close()
|
||||||
|
|
||||||
|
def check_table_intervals(self, sid, ints, tab, update):
|
||||||
|
# look in the table to make sure we can pick out the interval's
|
||||||
|
# endpoints
|
||||||
|
path = self.stream_path[sid]
|
||||||
|
tab.file_open.cache_remove_all()
|
||||||
|
for (i, intv) in enumerate(ints):
|
||||||
|
update(i)
|
||||||
|
(stime, etime, spos, epos) = intv
|
||||||
|
if spos == epos and spos >= 0 and spos <= tab.nrows:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
srow = tab[spos]
|
||||||
|
erow = tab[epos-1]
|
||||||
|
except Exception as e:
|
||||||
|
self.fix_bad_interval(sid, intv, tab, str(e))
|
||||||
|
raise RetryFsck
|
||||||
|
return len(ints)
|
||||||
|
|
||||||
|
def fix_bad_interval(self, sid, intv, tab, msg):
|
||||||
|
path = self.stream_path[sid]
|
||||||
|
msg = sprintf("%s: interval %s error accessing rows: %s",
|
||||||
|
path, str(intv), str(msg))
|
||||||
|
if not self.fix:
|
||||||
|
raise FixableFsckError(msg)
|
||||||
|
err("\n%s\n", msg)
|
||||||
|
|
||||||
|
(stime, etime, spos, epos) = intv
|
||||||
|
# If it's just that the end pos is more than the number of rows
|
||||||
|
# in the table, lower end pos and truncate interval time too.
|
||||||
|
if spos < tab.nrows and epos >= tab.nrows:
|
||||||
|
err("end position is past endrows, but it can be truncated\n")
|
||||||
|
err("old end: time %d, pos %d\n", etime, epos)
|
||||||
|
new_epos = tab.nrows
|
||||||
|
new_etime = tab[new_epos-1] + 1
|
||||||
|
err("new end: time %d, pos %d\n", new_etime, new_epos)
|
||||||
|
if stime < new_etime:
|
||||||
|
# Change it in SQL
|
||||||
|
with self.sql:
|
||||||
|
cur = self.sql.cursor()
|
||||||
|
cur.execute("UPDATE ranges SET end_time=?, end_pos=? "
|
||||||
|
"WHERE stream_id=? AND start_time=? AND "
|
||||||
|
"end_time=? AND start_pos=? AND end_pos=?",
|
||||||
|
(new_etime, new_epos, sid, stime, etime,
|
||||||
|
spos, epos))
|
||||||
|
if cur.rowcount != 1:
|
||||||
|
raise FsckError("failed to fix SQL database")
|
||||||
|
raise RetryFsck
|
||||||
|
err("actually it can't be truncated; times are bad too")
|
||||||
|
|
||||||
|
# Otherwise, the only hope is to delete the interval entirely.
|
||||||
|
err("*** Deleting the entire interval from SQL.\n")
|
||||||
|
err("This may leave stale data on disk. To fix that, copy all\n")
|
||||||
|
err("data from this stream to a new stream, then remove all data\n")
|
||||||
|
err("from and destroy %s.\n", path)
|
||||||
|
with self.sql:
|
||||||
|
cur = self.sql.cursor()
|
||||||
|
cur.execute("DELETE FROM ranges WHERE "
|
||||||
|
"stream_id=? AND start_time=? AND "
|
||||||
|
"end_time=? AND start_pos=? AND end_pos=?",
|
||||||
|
(sid, stime, etime, spos, epos))
|
||||||
|
if cur.rowcount != 1:
|
||||||
|
raise FsckError("failed to remove interval")
|
||||||
|
raise RetryFsck
|
||||||
|
|
||||||
|
### Check data in each interval
|
||||||
|
|
||||||
|
def check_data(self):
|
||||||
|
total_rows = sum(sum((y[3] - y[2]) for y in x)
|
||||||
|
for x in list(self.stream_interval.values()))
|
||||||
|
log("checking %s rows of data\n", "{:,d}".format(total_rows))
|
||||||
|
done = 0
|
||||||
|
with Progress(total_rows) as pbar:
|
||||||
|
for sid in self.stream_interval:
|
||||||
|
try:
|
||||||
|
bulk = self.bulkpath + self.stream_path[sid]
|
||||||
|
tab = nilmdb.server.bulkdata.Table(bulk)
|
||||||
|
def update(x):
|
||||||
|
pbar.update(done + x)
|
||||||
|
ints = self.stream_interval[sid]
|
||||||
|
done += self.check_table_data(sid, ints, tab, update)
|
||||||
|
finally:
|
||||||
|
tab.close()
|
||||||
|
|
||||||
|
def check_table_data(self, sid, ints, tab, update):
|
||||||
|
# Pull out all of the interval's data and verify that it's
|
||||||
|
# monotonic.
|
||||||
|
maxrows = 100000
|
||||||
|
path = self.stream_path[sid]
|
||||||
|
layout = self.stream_layout[sid]
|
||||||
|
dtype = nilmdb.client.numpyclient.layout_to_dtype(layout)
|
||||||
|
tab.file_open.cache_remove_all()
|
||||||
|
done = 0
|
||||||
|
for intv in ints:
|
||||||
|
last_ts = None
|
||||||
|
(stime, etime, spos, epos) = intv
|
||||||
|
|
||||||
|
# Break interval into maxrows-sized chunks
|
||||||
|
next_start = spos
|
||||||
|
while next_start < epos:
|
||||||
|
start = next_start
|
||||||
|
stop = min(start + maxrows, epos)
|
||||||
|
count = stop - start
|
||||||
|
next_start = stop
|
||||||
|
|
||||||
|
# Get raw data, convert to NumPy arary
|
||||||
|
try:
|
||||||
|
raw = tab.get_data(start, stop, binary = True)
|
||||||
|
data = numpy.fromstring(raw, dtype)
|
||||||
|
except Exception as e:
|
||||||
|
raise FsckError("%s: failed to grab rows %d through %d: %s",
|
||||||
|
path, start, stop, repr(e))
|
||||||
|
|
||||||
|
# Verify that timestamps are monotonic
|
||||||
|
if (numpy.diff(data['timestamp']) <= 0).any():
|
||||||
|
raise FsckError("%s: non-monotonic timestamp(s) in rows "
|
||||||
|
"%d through %d", path, start, stop)
|
||||||
|
first_ts = data['timestamp'][0]
|
||||||
|
if last_ts is not None and first_ts <= last_ts:
|
||||||
|
raise FsckError("%s: first interval timestamp %d is not "
|
||||||
|
"greater than the previous last interval "
|
||||||
|
"timestamp %d, at row %d",
|
||||||
|
path, first_ts, last_ts, start)
|
||||||
|
last_ts = data['timestamp'][-1]
|
||||||
|
|
||||||
|
# These are probably fixable, by removing the offending
|
||||||
|
# intervals. But I'm not going to bother implementing
|
||||||
|
# that yet.
|
||||||
|
|
||||||
|
# Done
|
||||||
|
done += count
|
||||||
|
update(done)
|
||||||
|
return done
|
||||||
27
nilmdb/scripts/nilmdb_fsck.py
Executable file
27
nilmdb/scripts/nilmdb_fsck.py
Executable file
@@ -0,0 +1,27 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
import nilmdb.fsck
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main entry point for the 'nilmdb-fsck' command line script"""
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description = 'Check database consistency',
|
||||||
|
formatter_class = argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
parser.add_argument("-v", "--version", action="version",
|
||||||
|
version = nilmdb.__version__)
|
||||||
|
parser.add_argument("-f", "--fix", action="store_true",
|
||||||
|
default=False, help = 'Fix errors when possible '
|
||||||
|
'(which may involve removing data)')
|
||||||
|
parser.add_argument("-n", "--no-data", action="store_true",
|
||||||
|
default=False, help = 'Skip the slow full-data check')
|
||||||
|
parser.add_argument('database', help = 'Database directory')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
nilmdb.fsck.Fsck(args.database, args.fix).check(skip_data = args.no_data)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,38 +1,43 @@
|
|||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
|
|
||||||
import nilmdb.server
|
|
||||||
import argparse
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import socket
|
import socket
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
import cherrypy
|
||||||
|
|
||||||
|
import nilmdb.server
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Main entry point for the 'nilmdb-server' command line script"""
|
"""Main entry point for the 'nilmdb-server' command line script"""
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description = 'Run the NilmDB server',
|
description='Run the NilmDB server',
|
||||||
formatter_class = argparse.ArgumentDefaultsHelpFormatter)
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
|
||||||
parser.add_argument("-V", "--version", action="version",
|
parser.add_argument("-v", "--version", action="version",
|
||||||
version = nilmdb.__version__)
|
version=nilmdb.__version__)
|
||||||
|
|
||||||
group = parser.add_argument_group("Standard options")
|
group = parser.add_argument_group("Standard options")
|
||||||
group.add_argument('-a', '--address',
|
group.add_argument('-a', '--address',
|
||||||
help = 'Only listen on the given address',
|
help='Only listen on the given address',
|
||||||
default = '0.0.0.0')
|
default='0.0.0.0')
|
||||||
group.add_argument('-p', '--port', help = 'Listen on the given port',
|
group.add_argument('-p', '--port', help='Listen on the given port',
|
||||||
type = int, default = 12380)
|
type=int, default=12380)
|
||||||
group.add_argument('-d', '--database', help = 'Database directory',
|
group.add_argument('-d', '--database', help='Database directory',
|
||||||
default = os.path.join(os.getcwd(), "db"))
|
default="./db")
|
||||||
group.add_argument('-q', '--quiet', help = 'Silence output',
|
group.add_argument('-q', '--quiet', help='Silence output',
|
||||||
action = 'store_true')
|
action='store_true')
|
||||||
group.add_argument('-t', '--traceback',
|
group.add_argument('-t', '--traceback',
|
||||||
help = 'Provide tracebacks in client errors',
|
help='Provide tracebacks in client errors',
|
||||||
action = 'store_true', default = False)
|
action='store_true', default=False)
|
||||||
|
|
||||||
group = parser.add_argument_group("Debug options")
|
group = parser.add_argument_group("Debug options")
|
||||||
group.add_argument('-y', '--yappi', help = 'Run under yappi profiler and '
|
group.add_argument('-y', '--yappi', help='Run under yappi profiler and '
|
||||||
'invoke interactive shell afterwards',
|
'invoke interactive shell afterwards',
|
||||||
action = 'store_true')
|
action='store_true')
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@@ -41,47 +46,51 @@ def main():
|
|||||||
db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(args.database)
|
db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(args.database)
|
||||||
|
|
||||||
# Configure the server
|
# Configure the server
|
||||||
if args.quiet:
|
if not args.quiet:
|
||||||
embedded = True
|
cherrypy._cpconfig.environments['embedded']['log.screen'] = True
|
||||||
else:
|
|
||||||
embedded = False
|
|
||||||
server = nilmdb.server.Server(db,
|
server = nilmdb.server.Server(db,
|
||||||
host = args.address,
|
host=args.address,
|
||||||
port = args.port,
|
port=args.port,
|
||||||
embedded = embedded,
|
force_traceback=args.traceback)
|
||||||
force_traceback = args.traceback)
|
|
||||||
|
|
||||||
# Print info
|
# Print info
|
||||||
if not args.quiet:
|
if not args.quiet:
|
||||||
print "Version: %s" % nilmdb.__version__
|
print("Version: %s" % nilmdb.__version__)
|
||||||
print "Database: %s" % (os.path.realpath(args.database))
|
print("Database: %s" % (os.path.realpath(args.database)))
|
||||||
if args.address == '0.0.0.0' or args.address == '::':
|
if args.address == '0.0.0.0' or args.address == '::':
|
||||||
host = socket.getfqdn()
|
host = socket.getfqdn()
|
||||||
else:
|
else:
|
||||||
host = args.address
|
host = args.address
|
||||||
print "Server URL: http://%s:%d/" % ( host, args.port)
|
print("Server URL: http://%s:%d/" % (host, args.port))
|
||||||
print "----"
|
print("----")
|
||||||
|
|
||||||
# Run it
|
# Run it
|
||||||
if args.yappi:
|
try:
|
||||||
print "Running in yappi"
|
if args.yappi:
|
||||||
try:
|
print("Running in yappi")
|
||||||
import yappi
|
try:
|
||||||
yappi.start()
|
import yappi
|
||||||
server.start(blocking = True)
|
yappi.start()
|
||||||
finally:
|
server.start(blocking=True)
|
||||||
yappi.stop()
|
finally:
|
||||||
yappi.print_stats(sort_type = yappi.SORTTYPE_TTOT, limit = 50)
|
yappi.stop()
|
||||||
from IPython import embed
|
stats = yappi.get_func_stats()
|
||||||
embed(header = "Use the yappi object to explore further, "
|
stats.sort("ttot")
|
||||||
"quit to exit")
|
stats.print_all()
|
||||||
else:
|
from IPython import embed
|
||||||
server.start(blocking = True)
|
embed(header="Use the `yappi` or `stats` object to explore "
|
||||||
|
"further, quit to exit")
|
||||||
|
else:
|
||||||
|
server.start(blocking=True)
|
||||||
|
except nilmdb.server.serverutil.CherryPyExit:
|
||||||
|
print("Exiting due to CherryPy error", file=sys.stderr)
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
if not args.quiet:
|
||||||
|
print("Closing database")
|
||||||
|
db.close()
|
||||||
|
|
||||||
# Clean up
|
|
||||||
if not args.quiet:
|
|
||||||
print "Closing database"
|
|
||||||
db.close()
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -2,9 +2,11 @@
|
|||||||
|
|
||||||
import nilmdb.cmdline
|
import nilmdb.cmdline
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Main entry point for the 'nilmtool' command line script"""
|
"""Main entry point for the 'nilmtool' command line script"""
|
||||||
nilmdb.cmdline.Cmdline().run()
|
nilmdb.cmdline.Cmdline().run()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -1,21 +1,9 @@
|
|||||||
"""nilmdb.server"""
|
"""nilmdb.server"""
|
||||||
|
|
||||||
from __future__ import absolute_import
|
# Set up pyximport to automatically rebuild Cython modules if needed.
|
||||||
|
import pyximport
|
||||||
# Try to set up pyximport to automatically rebuild Cython modules. If
|
pyximport.install(inplace=True, build_in_temp=False)
|
||||||
# this doesn't work, it's OK, as long as the modules were built externally.
|
|
||||||
# (e.g. python setup.py build_ext --inplace)
|
|
||||||
try: # pragma: no cover
|
|
||||||
import Cython
|
|
||||||
import distutils.version
|
|
||||||
if (distutils.version.LooseVersion(Cython.__version__) <
|
|
||||||
distutils.version.LooseVersion("0.17")): # pragma: no cover
|
|
||||||
raise ImportError("Cython version too old")
|
|
||||||
import pyximport
|
|
||||||
pyximport.install(inplace = True, build_in_temp = False)
|
|
||||||
except (ImportError, TypeError): # pragma: no cover
|
|
||||||
pass
|
|
||||||
|
|
||||||
from nilmdb.server.nilmdb import NilmDB
|
from nilmdb.server.nilmdb import NilmDB
|
||||||
from nilmdb.server.server import Server
|
from nilmdb.server.server import Server, wsgi_application
|
||||||
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
|
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
|
||||||
|
|||||||
@@ -1,60 +1,138 @@
|
|||||||
# Fixed record size bulk data storage
|
# Fixed record size bulk data storage
|
||||||
|
|
||||||
# Need absolute_import so that "import nilmdb" won't pull in
|
|
||||||
# nilmdb.py, but will pull the parent nilmdb module instead.
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import division
|
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
from nilmdb.utils.time import float_time_to_string as ftts
|
|
||||||
import nilmdb.utils
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import cPickle as pickle
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
import pickle
|
||||||
|
import tempfile
|
||||||
|
|
||||||
#from . import pyrocket as rocket
|
from nilmdb.utils.printf import sprintf
|
||||||
|
from nilmdb.utils.time import timestamp_to_string
|
||||||
|
import nilmdb.utils
|
||||||
|
|
||||||
|
import nilmdb.utils.lock
|
||||||
from . import rocket
|
from . import rocket
|
||||||
|
|
||||||
# Up to 256 open file descriptors at any given time.
|
# Up to 256 open file descriptors at any given time.
|
||||||
# These variables are global so they can be used in the decorator arguments.
|
# These variables are global so they can be used in the decorator arguments.
|
||||||
table_cache_size = 16
|
table_cache_size = 32
|
||||||
fd_cache_size = 16
|
fd_cache_size = 8
|
||||||
|
|
||||||
@nilmdb.utils.must_close(wrap_verify = False)
|
|
||||||
class BulkData(object):
|
@nilmdb.utils.must_close(wrap_verify=False)
|
||||||
|
class BulkData():
|
||||||
def __init__(self, basepath, **kwargs):
|
def __init__(self, basepath, **kwargs):
|
||||||
self.basepath = basepath
|
if isinstance(basepath, str):
|
||||||
self.root = os.path.join(self.basepath, "data")
|
self.basepath = self._encode_filename(basepath)
|
||||||
|
else:
|
||||||
|
self.basepath = basepath
|
||||||
|
self.root = os.path.join(self.basepath, b"data")
|
||||||
|
self.lock = self.root + b".lock"
|
||||||
|
self.lockfile = None
|
||||||
|
|
||||||
# Tuneables
|
# Tuneables
|
||||||
if "file_size" in kwargs:
|
if "file_size" in kwargs and kwargs["file_size"] is not None:
|
||||||
self.file_size = kwargs["file_size"]
|
self.file_size = kwargs["file_size"]
|
||||||
else:
|
else:
|
||||||
# Default to approximately 128 MiB per file
|
# Default to approximately 128 MiB per file
|
||||||
self.file_size = 128 * 1024 * 1024
|
self.file_size = 128 * 1024 * 1024
|
||||||
|
|
||||||
if "files_per_dir" in kwargs:
|
if "files_per_dir" in kwargs and kwargs["files_per_dir"] is not None:
|
||||||
self.files_per_dir = kwargs["files_per_dir"]
|
self.files_per_dir = kwargs["files_per_dir"]
|
||||||
else:
|
else:
|
||||||
# 32768 files per dir should work even on FAT32
|
# 32768 files per dir should work even on FAT32
|
||||||
self.files_per_dir = 32768
|
self.files_per_dir = 32768
|
||||||
|
|
||||||
|
if "initial_nrows" in kwargs and kwargs["initial_nrows"] is not None:
|
||||||
|
self.initial_nrows = kwargs["initial_nrows"]
|
||||||
|
else:
|
||||||
|
# First row is 0
|
||||||
|
self.initial_nrows = 0
|
||||||
|
|
||||||
# Make root path
|
# Make root path
|
||||||
if not os.path.isdir(self.root):
|
if not os.path.isdir(self.root):
|
||||||
os.mkdir(self.root)
|
os.mkdir(self.root)
|
||||||
|
|
||||||
|
# Create the lock
|
||||||
|
self.lockfile = open(self.lock, "w")
|
||||||
|
if not nilmdb.utils.lock.exclusive_lock(self.lockfile):
|
||||||
|
raise IOError('database at "' +
|
||||||
|
self._decode_filename(self.basepath) +
|
||||||
|
'" is already locked by another process')
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.getnode.cache_remove_all()
|
self.getnode.cache_remove_all()
|
||||||
|
if self.lockfile:
|
||||||
|
nilmdb.utils.lock.exclusive_unlock(self.lockfile)
|
||||||
|
self.lockfile.close()
|
||||||
|
try:
|
||||||
|
os.unlink(self.lock)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
self.lockfile = None
|
||||||
|
|
||||||
def _encode_filename(self, path):
|
def _encode_filename(self, path):
|
||||||
# Encode all paths to UTF-8, regardless of sys.getfilesystemencoding(),
|
# Translate unicode strings to raw bytes, if needed. We
|
||||||
# because we want to be able to represent all code points and the user
|
# always manipulate paths internally as bytes.
|
||||||
# will never be directly exposed to filenames. We can then do path
|
return path.encode('utf-8')
|
||||||
# manipulations on the UTF-8 directly.
|
|
||||||
if isinstance(path, unicode):
|
def _decode_filename(self, path):
|
||||||
return path.encode('utf-8')
|
# Translate raw bytes to unicode strings, escaping if needed
|
||||||
return path
|
return path.decode('utf-8', errors='backslashreplace')
|
||||||
|
|
||||||
|
def _create_check_ospath(self, ospath):
|
||||||
|
if ospath[-1:] == b'/':
|
||||||
|
raise ValueError("invalid path; should not end with a /")
|
||||||
|
if Table.exists(ospath):
|
||||||
|
raise ValueError("stream already exists at this path")
|
||||||
|
if os.path.isdir(ospath):
|
||||||
|
# Look for any files in subdirectories. Fully empty subdirectories
|
||||||
|
# are OK; they might be there during a rename
|
||||||
|
for (root, dirs, files) in os.walk(ospath):
|
||||||
|
if files:
|
||||||
|
raise ValueError(
|
||||||
|
"non-empty subdirs of this path already exist")
|
||||||
|
|
||||||
|
def _create_parents(self, unicodepath):
|
||||||
|
"""Verify the path name, and create parent directories if they
|
||||||
|
don't exist. Returns a list of elements that got created."""
|
||||||
|
path = self._encode_filename(unicodepath)
|
||||||
|
|
||||||
|
if path[0:1] != b'/':
|
||||||
|
raise ValueError("paths must start with / ")
|
||||||
|
[group, node] = path.rsplit(b"/", 1)
|
||||||
|
if group == b'':
|
||||||
|
raise ValueError("invalid path; path must contain at least one "
|
||||||
|
"folder")
|
||||||
|
if node == b'':
|
||||||
|
raise ValueError("invalid path; should not end with a /")
|
||||||
|
if not Table.valid_path(path):
|
||||||
|
raise ValueError("path name is invalid or contains reserved words")
|
||||||
|
|
||||||
|
# Create the table's base dir. Note that we make a
|
||||||
|
# distinction here between NilmDB paths (always Unix style,
|
||||||
|
# split apart manually) and OS paths (built up with
|
||||||
|
# os.path.join)
|
||||||
|
|
||||||
|
# Make directories leading up to this one
|
||||||
|
elements = path.lstrip(b'/').split(b'/')
|
||||||
|
made_dirs = []
|
||||||
|
try:
|
||||||
|
# Make parent elements
|
||||||
|
for i in range(len(elements)):
|
||||||
|
ospath = os.path.join(self.root, *elements[0:i])
|
||||||
|
if Table.exists(ospath):
|
||||||
|
raise ValueError("path is subdir of existing node")
|
||||||
|
if not os.path.isdir(ospath):
|
||||||
|
os.mkdir(ospath)
|
||||||
|
made_dirs.append(ospath)
|
||||||
|
except Exception:
|
||||||
|
# Remove paths that we created
|
||||||
|
for ospath in reversed(made_dirs):
|
||||||
|
os.rmdir(ospath)
|
||||||
|
raise
|
||||||
|
|
||||||
|
return elements
|
||||||
|
|
||||||
def create(self, unicodepath, layout_name):
|
def create(self, unicodepath, layout_name):
|
||||||
"""
|
"""
|
||||||
@@ -67,32 +145,11 @@ class BulkData(object):
|
|||||||
|
|
||||||
layout_name: string for nilmdb.layout.get_named(), e.g. 'float32_8'
|
layout_name: string for nilmdb.layout.get_named(), e.g. 'float32_8'
|
||||||
"""
|
"""
|
||||||
path = self._encode_filename(unicodepath)
|
elements = self._create_parents(unicodepath)
|
||||||
|
|
||||||
if path[0] != '/':
|
|
||||||
raise ValueError("paths must start with /")
|
|
||||||
[ group, node ] = path.rsplit("/", 1)
|
|
||||||
if group == '':
|
|
||||||
raise ValueError("invalid path; path must contain at least one "
|
|
||||||
"folder")
|
|
||||||
|
|
||||||
# Create the table. Note that we make a distinction here
|
|
||||||
# between NilmDB paths (always Unix style, split apart
|
|
||||||
# manually) and OS paths (built up with os.path.join)
|
|
||||||
|
|
||||||
# Make directories leading up to this one
|
|
||||||
elements = path.lstrip('/').split('/')
|
|
||||||
for i in range(len(elements)):
|
|
||||||
ospath = os.path.join(self.root, *elements[0:i])
|
|
||||||
if Table.exists(ospath):
|
|
||||||
raise ValueError("path is subdir of existing node")
|
|
||||||
if not os.path.isdir(ospath):
|
|
||||||
os.mkdir(ospath)
|
|
||||||
|
|
||||||
# Make the final dir
|
# Make the final dir
|
||||||
ospath = os.path.join(self.root, *elements)
|
ospath = os.path.join(self.root, *elements)
|
||||||
if os.path.isdir(ospath):
|
self._create_check_ospath(ospath)
|
||||||
raise ValueError("subdirs of this path already exist")
|
|
||||||
os.mkdir(ospath)
|
os.mkdir(ospath)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -102,24 +159,78 @@ class BulkData(object):
|
|||||||
|
|
||||||
# Open and cache it
|
# Open and cache it
|
||||||
self.getnode(unicodepath)
|
self.getnode(unicodepath)
|
||||||
except:
|
except Exception:
|
||||||
exc_info = sys.exc_info()
|
exc_info = sys.exc_info()
|
||||||
try:
|
try:
|
||||||
os.rmdir(ospath)
|
os.rmdir(ospath)
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
raise exc_info[1], None, exc_info[2]
|
raise exc_info[1].with_traceback(exc_info[2])
|
||||||
|
|
||||||
# Success
|
# Success
|
||||||
return
|
return
|
||||||
|
|
||||||
|
def _remove_leaves(self, unicodepath):
|
||||||
|
"""Remove empty directories starting at the leaves of unicodepath"""
|
||||||
|
path = self._encode_filename(unicodepath)
|
||||||
|
elements = path.lstrip(b'/').split(b'/')
|
||||||
|
for i in reversed(list(range(len(elements)))):
|
||||||
|
ospath = os.path.join(self.root, *elements[0:i+1])
|
||||||
|
try:
|
||||||
|
os.rmdir(ospath)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def rename(self, oldunicodepath, newunicodepath):
|
||||||
|
"""Move entire tree from 'oldunicodepath' to
|
||||||
|
'newunicodepath'"""
|
||||||
|
oldpath = self._encode_filename(oldunicodepath)
|
||||||
|
newpath = self._encode_filename(newunicodepath)
|
||||||
|
|
||||||
|
# Get OS paths
|
||||||
|
oldelements = oldpath.lstrip(b'/').split(b'/')
|
||||||
|
oldospath = os.path.join(self.root, *oldelements)
|
||||||
|
newelements = newpath.lstrip(b'/').split(b'/')
|
||||||
|
newospath = os.path.join(self.root, *newelements)
|
||||||
|
|
||||||
|
# Basic checks
|
||||||
|
if oldospath == newospath:
|
||||||
|
raise ValueError("old and new paths are the same")
|
||||||
|
|
||||||
|
# Remove Table object at old path from cache
|
||||||
|
self.getnode.cache_remove(self, oldunicodepath)
|
||||||
|
|
||||||
|
# Move the table to a temporary location
|
||||||
|
tmpdir = tempfile.mkdtemp(prefix=b"rename-", dir=self.root)
|
||||||
|
tmppath = os.path.join(tmpdir, b"table")
|
||||||
|
os.rename(oldospath, tmppath)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check destination path
|
||||||
|
self._create_check_ospath(newospath)
|
||||||
|
|
||||||
|
# Create parent dirs for new location
|
||||||
|
self._create_parents(newunicodepath)
|
||||||
|
|
||||||
|
# Move table into new location
|
||||||
|
os.rename(tmppath, newospath)
|
||||||
|
except Exception:
|
||||||
|
# On failure, move the table back to original path
|
||||||
|
os.rename(tmppath, oldospath)
|
||||||
|
os.rmdir(tmpdir)
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Prune old dirs
|
||||||
|
self._remove_leaves(oldunicodepath)
|
||||||
|
os.rmdir(tmpdir)
|
||||||
|
|
||||||
def destroy(self, unicodepath):
|
def destroy(self, unicodepath):
|
||||||
"""Fully remove all data at a particular path. No way to undo
|
"""Fully remove all data at a particular path. No way to undo
|
||||||
it! The group/path structure is removed, too."""
|
it! The group/path structure is removed, too."""
|
||||||
path = self._encode_filename(unicodepath)
|
path = self._encode_filename(unicodepath)
|
||||||
|
|
||||||
# Get OS path
|
# Get OS path
|
||||||
elements = path.lstrip('/').split('/')
|
elements = path.lstrip(b'/').split(b'/')
|
||||||
ospath = os.path.join(self.root, *elements)
|
ospath = os.path.join(self.root, *elements)
|
||||||
|
|
||||||
# Remove Table object from cache
|
# Remove Table object from cache
|
||||||
@@ -128,41 +239,42 @@ class BulkData(object):
|
|||||||
# Remove the contents of the target directory
|
# Remove the contents of the target directory
|
||||||
if not Table.exists(ospath):
|
if not Table.exists(ospath):
|
||||||
raise ValueError("nothing at that path")
|
raise ValueError("nothing at that path")
|
||||||
for (root, dirs, files) in os.walk(ospath, topdown = False):
|
for (root, dirs, files) in os.walk(ospath, topdown=False):
|
||||||
for name in files:
|
for name in files:
|
||||||
os.remove(os.path.join(root, name))
|
os.remove(os.path.join(root, name))
|
||||||
for name in dirs:
|
for name in dirs:
|
||||||
os.rmdir(os.path.join(root, name))
|
os.rmdir(os.path.join(root, name))
|
||||||
|
|
||||||
# Remove empty parent directories
|
# Remove leftover empty directories
|
||||||
for i in reversed(range(len(elements))):
|
self._remove_leaves(unicodepath)
|
||||||
ospath = os.path.join(self.root, *elements[0:i+1])
|
|
||||||
try:
|
|
||||||
os.rmdir(ospath)
|
|
||||||
except OSError:
|
|
||||||
break
|
|
||||||
|
|
||||||
# Cache open tables
|
# Cache open tables
|
||||||
@nilmdb.utils.lru_cache(size = table_cache_size,
|
@nilmdb.utils.lru_cache(size=table_cache_size,
|
||||||
onremove = lambda x: x.close())
|
onremove=lambda x: x.close())
|
||||||
def getnode(self, unicodepath):
|
def getnode(self, unicodepath):
|
||||||
"""Return a Table object corresponding to the given database
|
"""Return a Table object corresponding to the given database
|
||||||
path, which must exist."""
|
path, which must exist."""
|
||||||
path = self._encode_filename(unicodepath)
|
path = self._encode_filename(unicodepath)
|
||||||
elements = path.lstrip('/').split('/')
|
elements = path.lstrip(b'/').split(b'/')
|
||||||
ospath = os.path.join(self.root, *elements)
|
ospath = os.path.join(self.root, *elements)
|
||||||
return Table(ospath)
|
return Table(ospath, self.initial_nrows)
|
||||||
|
|
||||||
@nilmdb.utils.must_close(wrap_verify = False)
|
|
||||||
class Table(object):
|
@nilmdb.utils.must_close(wrap_verify=False)
|
||||||
|
class Table():
|
||||||
"""Tools to help access a single table (data at a specific OS path)."""
|
"""Tools to help access a single table (data at a specific OS path)."""
|
||||||
# See design.md for design details
|
# See design.md for design details
|
||||||
|
|
||||||
# Class methods, to help keep format details in this class.
|
# Class methods, to help keep format details in this class.
|
||||||
|
@classmethod
|
||||||
|
def valid_path(cls, root):
|
||||||
|
"""Return True if a root path is a valid name"""
|
||||||
|
return b"_format" not in root.split(b"/")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def exists(cls, root):
|
def exists(cls, root):
|
||||||
"""Return True if a table appears to exist at this OS path"""
|
"""Return True if a table appears to exist at this OS path"""
|
||||||
return os.path.isfile(os.path.join(root, "_format"))
|
return os.path.isfile(os.path.join(root, b"_format"))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, root, layout, file_size, files_per_dir):
|
def create(cls, root, layout, file_size, files_per_dir):
|
||||||
@@ -175,37 +287,30 @@ class Table(object):
|
|||||||
rows_per_file = max(file_size // rkt.binary_size, 1)
|
rows_per_file = max(file_size // rkt.binary_size, 1)
|
||||||
rkt.close()
|
rkt.close()
|
||||||
|
|
||||||
fmt = { "rows_per_file": rows_per_file,
|
fmt = {
|
||||||
"files_per_dir": files_per_dir,
|
"rows_per_file": rows_per_file,
|
||||||
"layout": layout,
|
"files_per_dir": files_per_dir,
|
||||||
"version": 2 }
|
"layout": layout,
|
||||||
with open(os.path.join(root, "_format"), "wb") as f:
|
"version": 3
|
||||||
|
}
|
||||||
|
with open(os.path.join(root, b"_format"), "wb") as f:
|
||||||
pickle.dump(fmt, f, 2)
|
pickle.dump(fmt, f, 2)
|
||||||
|
|
||||||
# Normal methods
|
# Normal methods
|
||||||
def __init__(self, root):
|
def __init__(self, root, initial_nrows=0):
|
||||||
"""'root' is the full OS path to the directory of this table"""
|
"""'root' is the full OS path to the directory of this table"""
|
||||||
self.root = root
|
self.root = root
|
||||||
|
self.initial_nrows = initial_nrows
|
||||||
|
|
||||||
# Load the format
|
# Load the format
|
||||||
with open(os.path.join(self.root, "_format"), "rb") as f:
|
with open(os.path.join(self.root, b"_format"), "rb") as f:
|
||||||
fmt = pickle.load(f)
|
fmt = pickle.load(f)
|
||||||
|
|
||||||
if fmt["version"] == 1: # pragma: no cover
|
if fmt["version"] != 3:
|
||||||
# We can handle this old version by converting from
|
# Old versions used floating point timestamps, which aren't
|
||||||
# struct_fmt back to layout name.
|
# valid anymore.
|
||||||
compat = { "<dHHHHHH": "uint16_6",
|
raise NotImplementedError("old version " + str(fmt["version"]) +
|
||||||
"<dHHHHHHHHH": "uint16_9",
|
" bulk data store is not supported")
|
||||||
"<dffffffff": "float32_8" }
|
|
||||||
if fmt["struct_fmt"] in compat:
|
|
||||||
fmt["version"] = 2
|
|
||||||
fmt["layout"] = compat[fmt["struct_fmt"]]
|
|
||||||
else:
|
|
||||||
raise NotImplementedError("old version 1 data with format "
|
|
||||||
+ fmt["struct_fmt"] + " is no good")
|
|
||||||
elif fmt["version"] != 2: # pragma: no cover (just future proofing)
|
|
||||||
raise NotImplementedError("version " + str(fmt["version"]) +
|
|
||||||
" bulk data store not supported")
|
|
||||||
|
|
||||||
self.rows_per_file = fmt["rows_per_file"]
|
self.rows_per_file = fmt["rows_per_file"]
|
||||||
self.files_per_dir = fmt["files_per_dir"]
|
self.files_per_dir = fmt["files_per_dir"]
|
||||||
@@ -231,31 +336,38 @@ class Table(object):
|
|||||||
# greater than the row number of any piece of data that
|
# greater than the row number of any piece of data that
|
||||||
# currently exists, not necessarily all data that _ever_
|
# currently exists, not necessarily all data that _ever_
|
||||||
# existed.
|
# existed.
|
||||||
regex = re.compile("^[0-9a-f]{4,}$")
|
regex = re.compile(b"^[0-9a-f]{4,}$")
|
||||||
|
|
||||||
# Find the last directory. We sort and loop through all of them,
|
# Find the last directory. We sort and loop through all of them,
|
||||||
# starting with the numerically greatest, because the dirs could be
|
# starting with the numerically greatest, because the dirs could be
|
||||||
# empty if something was deleted.
|
# empty if something was deleted but the directory was unexpectedly
|
||||||
|
# not deleted.
|
||||||
subdirs = sorted(filter(regex.search, os.listdir(self.root)),
|
subdirs = sorted(filter(regex.search, os.listdir(self.root)),
|
||||||
key = lambda x: int(x, 16), reverse = True)
|
key=lambda x: int(x, 16), reverse=True)
|
||||||
|
|
||||||
for subdir in subdirs:
|
for subdir in subdirs:
|
||||||
# Now find the last file in that dir
|
# Now find the last file in that dir
|
||||||
path = os.path.join(self.root, subdir)
|
path = os.path.join(self.root, subdir)
|
||||||
files = filter(regex.search, os.listdir(path))
|
files = list(filter(regex.search, os.listdir(path)))
|
||||||
if not files: # pragma: no cover (shouldn't occur)
|
if not files:
|
||||||
# Empty dir: try the next one
|
# Empty dir: try the next one
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Find the numerical max
|
# Find the numerical max
|
||||||
filename = max(files, key = lambda x: int(x, 16))
|
filename = max(files, key=lambda x: int(x, 16))
|
||||||
offset = os.path.getsize(os.path.join(self.root, subdir, filename))
|
offset = os.path.getsize(os.path.join(self.root, subdir, filename))
|
||||||
|
|
||||||
# Convert to row number
|
# Convert to row number
|
||||||
return self._row_from_offset(subdir, filename, offset)
|
return self._row_from_offset(subdir, filename, offset)
|
||||||
|
|
||||||
# No files, so no data
|
# No files, so no data. We typically start at row 0 in this
|
||||||
return 0
|
# case, although initial_nrows is specified during some tests
|
||||||
|
# to exercise other parts of the code better. Since we have
|
||||||
|
# no files yet, round initial_nrows up so it points to a row
|
||||||
|
# that would begin a new file.
|
||||||
|
nrows = ((self.initial_nrows + (self.rows_per_file - 1)) //
|
||||||
|
self.rows_per_file) * self.rows_per_file
|
||||||
|
return nrows
|
||||||
|
|
||||||
def _offset_from_row(self, row):
|
def _offset_from_row(self, row):
|
||||||
"""Return a (subdir, filename, offset, count) tuple:
|
"""Return a (subdir, filename, offset, count) tuple:
|
||||||
@@ -268,8 +380,8 @@ class Table(object):
|
|||||||
filenum = row // self.rows_per_file
|
filenum = row // self.rows_per_file
|
||||||
# It's OK if these format specifiers are too short; the filenames
|
# It's OK if these format specifiers are too short; the filenames
|
||||||
# will just get longer but will still sort correctly.
|
# will just get longer but will still sort correctly.
|
||||||
dirname = sprintf("%04x", filenum // self.files_per_dir)
|
dirname = sprintf(b"%04x", filenum // self.files_per_dir)
|
||||||
filename = sprintf("%04x", filenum % self.files_per_dir)
|
filename = sprintf(b"%04x", filenum % self.files_per_dir)
|
||||||
offset = (row % self.rows_per_file) * self.row_size
|
offset = (row % self.rows_per_file) * self.row_size
|
||||||
count = self.rows_per_file - (row % self.rows_per_file)
|
count = self.rows_per_file - (row % self.rows_per_file)
|
||||||
return (dirname, filename, offset, count)
|
return (dirname, filename, offset, count)
|
||||||
@@ -277,14 +389,14 @@ class Table(object):
|
|||||||
def _row_from_offset(self, subdir, filename, offset):
|
def _row_from_offset(self, subdir, filename, offset):
|
||||||
"""Return the row number that corresponds to the given
|
"""Return the row number that corresponds to the given
|
||||||
'subdir/filename' and byte-offset within that file."""
|
'subdir/filename' and byte-offset within that file."""
|
||||||
if (offset % self.row_size) != 0: # pragma: no cover
|
if (offset % self.row_size) != 0:
|
||||||
# this shouldn't occur, unless there is some corruption somewhere
|
# this shouldn't occur, unless there is some corruption somewhere
|
||||||
raise ValueError("file offset is not a multiple of data size")
|
raise ValueError("file offset is not a multiple of data size")
|
||||||
filenum = int(subdir, 16) * self.files_per_dir + int(filename, 16)
|
filenum = int(subdir, 16) * self.files_per_dir + int(filename, 16)
|
||||||
row = (filenum * self.rows_per_file) + (offset // self.row_size)
|
row = (filenum * self.rows_per_file) + (offset // self.row_size)
|
||||||
return row
|
return row
|
||||||
|
|
||||||
def _remove_or_truncate_file(self, subdir, filename, offset = 0):
|
def _remove_or_truncate_file(self, subdir, filename, offset=0):
|
||||||
"""Remove the given file, and remove the subdirectory too
|
"""Remove the given file, and remove the subdirectory too
|
||||||
if it's empty. If offset is nonzero, truncate the file
|
if it's empty. If offset is nonzero, truncate the file
|
||||||
to that size instead."""
|
to that size instead."""
|
||||||
@@ -300,12 +412,12 @@ class Table(object):
|
|||||||
# Try deleting subdir, too
|
# Try deleting subdir, too
|
||||||
try:
|
try:
|
||||||
os.rmdir(os.path.join(self.root, subdir))
|
os.rmdir(os.path.join(self.root, subdir))
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Cache open files
|
# Cache open files
|
||||||
@nilmdb.utils.lru_cache(size = fd_cache_size,
|
@nilmdb.utils.lru_cache(size=fd_cache_size,
|
||||||
onremove = lambda f: f.close())
|
onremove=lambda f: f.close())
|
||||||
def file_open(self, subdir, filename):
|
def file_open(self, subdir, filename):
|
||||||
"""Open and map a given 'subdir/filename' (relative to self.root).
|
"""Open and map a given 'subdir/filename' (relative to self.root).
|
||||||
Will be automatically closed when evicted from the cache."""
|
Will be automatically closed when evicted from the cache."""
|
||||||
@@ -318,38 +430,23 @@ class Table(object):
|
|||||||
return rocket.Rocket(self.layout,
|
return rocket.Rocket(self.layout,
|
||||||
os.path.join(self.root, subdir, filename))
|
os.path.join(self.root, subdir, filename))
|
||||||
|
|
||||||
def append(self, data):
|
def append_data(self, data, start, end, binary=False):
|
||||||
"""Append the data and flush it to disk.
|
|
||||||
data is a nested Python list [[row],[row],[...]]"""
|
|
||||||
remaining = len(data)
|
|
||||||
dataiter = iter(data)
|
|
||||||
while remaining:
|
|
||||||
# See how many rows we can fit into the current file, and open it
|
|
||||||
(subdir, fname, offset, count) = self._offset_from_row(self.nrows)
|
|
||||||
if count > remaining:
|
|
||||||
count = remaining
|
|
||||||
|
|
||||||
f = self.file_open(subdir, fname)
|
|
||||||
|
|
||||||
# Write the data
|
|
||||||
written = f.append_iter(count, dataiter)
|
|
||||||
if written != count: # pragma: no cover
|
|
||||||
raise Exception("Didn't write the expected number of rows: "
|
|
||||||
+ str(written) + " != " + str(count))
|
|
||||||
remaining -= count
|
|
||||||
self.nrows += count
|
|
||||||
|
|
||||||
def append_string(self, data, start, end):
|
|
||||||
"""Parse the formatted string in 'data', according to the
|
"""Parse the formatted string in 'data', according to the
|
||||||
current layout, and append it to the table. If any timestamps
|
current layout, and append it to the table. If any timestamps
|
||||||
are non-monotonic, or don't fall between 'start' and 'end',
|
are non-monotonic, or don't fall between 'start' and 'end',
|
||||||
a ValueError is raised.
|
a ValueError is raised.
|
||||||
|
|
||||||
|
Note that data is always of 'bytes' type.
|
||||||
|
|
||||||
|
If 'binary' is True, the data should be in raw binary format
|
||||||
|
instead: little-endian, matching the current table's layout,
|
||||||
|
including the int64 timestamp.
|
||||||
|
|
||||||
If this function succeeds, it returns normally. Otherwise,
|
If this function succeeds, it returns normally. Otherwise,
|
||||||
the table is reverted back to its original state by truncating
|
the table is reverted back to its original state by truncating
|
||||||
or deleting files as necessary."""
|
or deleting files as necessary."""
|
||||||
data_offset = 0
|
data_offset = 0
|
||||||
last_timestamp = -1e12
|
last_timestamp = nilmdb.utils.time.min_timestamp
|
||||||
tot_rows = self.nrows
|
tot_rows = self.nrows
|
||||||
count = 0
|
count = 0
|
||||||
linenum = 0
|
linenum = 0
|
||||||
@@ -357,40 +454,57 @@ class Table(object):
|
|||||||
while data_offset < len(data):
|
while data_offset < len(data):
|
||||||
# See how many rows we can fit into the current file,
|
# See how many rows we can fit into the current file,
|
||||||
# and open it
|
# and open it
|
||||||
(subdir, fname, offset, count) = self._offset_from_row(tot_rows)
|
(subdir, fname, offs, count) = self._offset_from_row(tot_rows)
|
||||||
f = self.file_open(subdir, fname)
|
f = self.file_open(subdir, fname)
|
||||||
|
|
||||||
# Ask the rocket object to parse and append up to "count"
|
# Ask the rocket object to parse and append up to "count"
|
||||||
# rows of data, verifying things along the way.
|
# rows of data, verifying things along the way.
|
||||||
try:
|
try:
|
||||||
|
if binary:
|
||||||
|
appender = f.append_binary
|
||||||
|
else:
|
||||||
|
appender = f.append_string
|
||||||
(added_rows, data_offset, last_timestamp, linenum
|
(added_rows, data_offset, last_timestamp, linenum
|
||||||
) = f.append_string(count, data, data_offset, linenum,
|
) = appender(count, data, data_offset, linenum,
|
||||||
start, end, last_timestamp)
|
start, end, last_timestamp)
|
||||||
except rocket.ParseError as e:
|
except rocket.ParseError as e:
|
||||||
(linenum, errtype, obj) = e.args
|
(linenum, colnum, errtype, obj) = e.args
|
||||||
|
if binary:
|
||||||
|
where = "byte %d: " % (linenum)
|
||||||
|
else:
|
||||||
|
where = "line %d, column %d: " % (linenum, colnum)
|
||||||
|
# Extract out the error line, add column marker
|
||||||
|
try:
|
||||||
|
if binary:
|
||||||
|
raise IndexError
|
||||||
|
bad = data.splitlines()[linenum-1]
|
||||||
|
bad += b'\n' + b' ' * (colnum - 1) + b'^'
|
||||||
|
except IndexError:
|
||||||
|
bad = b""
|
||||||
if errtype == rocket.ERR_NON_MONOTONIC:
|
if errtype == rocket.ERR_NON_MONOTONIC:
|
||||||
err = sprintf("line %d: timestamp is not monotonically "
|
err = "timestamp is not monotonically increasing"
|
||||||
"increasing", linenum)
|
|
||||||
elif errtype == rocket.ERR_OUT_OF_INTERVAL:
|
elif errtype == rocket.ERR_OUT_OF_INTERVAL:
|
||||||
if obj < start:
|
if obj < start:
|
||||||
err = sprintf("line %d: Data timestamp %s < "
|
err = sprintf("Data timestamp %s < start time %s",
|
||||||
"start time %s", linenum,
|
timestamp_to_string(obj),
|
||||||
ftts(obj), ftts(start))
|
timestamp_to_string(start))
|
||||||
else:
|
else:
|
||||||
err = sprintf("line %d: Data timestamp %s >= "
|
err = sprintf("Data timestamp %s >= end time %s",
|
||||||
"end time %s", linenum,
|
timestamp_to_string(obj),
|
||||||
ftts(obj), ftts(end))
|
timestamp_to_string(end))
|
||||||
else:
|
else:
|
||||||
err = sprintf("line %d: %s", linenum, str(obj))
|
err = str(obj)
|
||||||
raise ValueError("error parsing input data: " + err)
|
bad_str = bad.decode('utf-8', errors='backslashreplace')
|
||||||
|
raise ValueError("error parsing input data: " +
|
||||||
|
where + err + "\n" + bad_str)
|
||||||
tot_rows += added_rows
|
tot_rows += added_rows
|
||||||
except Exception:
|
except Exception:
|
||||||
# Some failure, so try to roll things back by truncating or
|
# Some failure, so try to roll things back by truncating or
|
||||||
# deleting files that we may have appended data to.
|
# deleting files that we may have appended data to.
|
||||||
cleanpos = self.nrows
|
cleanpos = self.nrows
|
||||||
while cleanpos <= tot_rows:
|
while cleanpos <= tot_rows:
|
||||||
(subdir, fname, offset, count) = self._offset_from_row(cleanpos)
|
(subdir, fname, offs, count) = self._offset_from_row(cleanpos)
|
||||||
self._remove_or_truncate_file(subdir, fname, offset)
|
self._remove_or_truncate_file(subdir, fname, offs)
|
||||||
cleanpos += count
|
cleanpos += count
|
||||||
# Re-raise original exception
|
# Re-raise original exception
|
||||||
raise
|
raise
|
||||||
@@ -398,15 +512,11 @@ class Table(object):
|
|||||||
# Success, so update self.nrows accordingly
|
# Success, so update self.nrows accordingly
|
||||||
self.nrows = tot_rows
|
self.nrows = tot_rows
|
||||||
|
|
||||||
def _get_data(self, start, stop, as_string):
|
def get_data(self, start, stop, binary=False):
|
||||||
"""Extract data corresponding to Python range [n:m],
|
"""Extract data corresponding to Python range [n:m],
|
||||||
and returns a numeric list or formatted string,
|
and returns a formatted string"""
|
||||||
depending on as_string."""
|
if (start is None or stop is None or
|
||||||
if (start is None or
|
start > stop or start < 0 or stop > self.nrows):
|
||||||
stop is None or
|
|
||||||
start > stop or
|
|
||||||
start < 0 or
|
|
||||||
stop > self.nrows):
|
|
||||||
raise IndexError("Index out of range")
|
raise IndexError("Index out of range")
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
@@ -417,42 +527,21 @@ class Table(object):
|
|||||||
if count > remaining:
|
if count > remaining:
|
||||||
count = remaining
|
count = remaining
|
||||||
f = self.file_open(subdir, filename)
|
f = self.file_open(subdir, filename)
|
||||||
if as_string:
|
if binary:
|
||||||
ret.append(f.extract_string(offset, count))
|
ret.append(f.extract_binary(offset, count))
|
||||||
else:
|
else:
|
||||||
ret.extend(f.extract_list(offset, count))
|
ret.append(f.extract_string(offset, count))
|
||||||
remaining -= count
|
remaining -= count
|
||||||
row += count
|
row += count
|
||||||
if as_string:
|
return b"".join(ret)
|
||||||
return "".join(ret)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def get_as_text(self, start, stop):
|
def __getitem__(self, row):
|
||||||
"""Extract data corresponding to Python range [n:m],
|
"""Extract timestamps from a row, with table[n] notation."""
|
||||||
and returns a formatted string"""
|
if row < 0 or row >= self.nrows:
|
||||||
return self._get_data(start, stop, True)
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
"""Extract data and return it. Supports simple indexing
|
|
||||||
(table[n]) and range slices (table[n:m]). Returns a nested
|
|
||||||
Python list [[row],[row],[...]]"""
|
|
||||||
|
|
||||||
# Handle simple slices
|
|
||||||
if isinstance(key, slice):
|
|
||||||
# Fall back to brute force if the slice isn't simple
|
|
||||||
try:
|
|
||||||
if (key.step is not None and key.step != 1):
|
|
||||||
raise IndexError
|
|
||||||
return self._get_data(key.start, key.stop, False)
|
|
||||||
except IndexError:
|
|
||||||
return [ self[x] for x in xrange(*key.indices(self.nrows)) ]
|
|
||||||
|
|
||||||
# Handle single points (inefficiently!)
|
|
||||||
if key < 0 or key >= self.nrows:
|
|
||||||
raise IndexError("Index out of range")
|
raise IndexError("Index out of range")
|
||||||
(subdir, filename, offset, count) = self._offset_from_row(key)
|
(subdir, filename, offset, count) = self._offset_from_row(row)
|
||||||
f = self.file_open(subdir, filename)
|
f = self.file_open(subdir, filename)
|
||||||
return f.extract_list(offset, 1)[0]
|
return f.extract_timestamp(offset)
|
||||||
|
|
||||||
def _remove_rows(self, subdir, filename, start, stop):
|
def _remove_rows(self, subdir, filename, start, stop):
|
||||||
"""Helper to mark specific rows as being removed from a
|
"""Helper to mark specific rows as being removed from a
|
||||||
@@ -467,12 +556,12 @@ class Table(object):
|
|||||||
# file. Only when the list covers the entire extent of the
|
# file. Only when the list covers the entire extent of the
|
||||||
# file will that file be removed.
|
# file will that file be removed.
|
||||||
datafile = os.path.join(self.root, subdir, filename)
|
datafile = os.path.join(self.root, subdir, filename)
|
||||||
cachefile = datafile + ".removed"
|
cachefile = datafile + b".removed"
|
||||||
try:
|
try:
|
||||||
with open(cachefile, "rb") as f:
|
with open(cachefile, "rb") as f:
|
||||||
ranges = pickle.load(f)
|
ranges = pickle.load(f)
|
||||||
cachefile_present = True
|
cachefile_present = True
|
||||||
except:
|
except Exception:
|
||||||
ranges = []
|
ranges = []
|
||||||
cachefile_present = False
|
cachefile_present = False
|
||||||
|
|
||||||
@@ -494,8 +583,9 @@ class Table(object):
|
|||||||
# Not connected; append previous and start again
|
# Not connected; append previous and start again
|
||||||
merged.append(prev)
|
merged.append(prev)
|
||||||
prev = new
|
prev = new
|
||||||
if prev is not None:
|
# Last range we were looking at goes into the file. We know
|
||||||
merged.append(prev)
|
# there was at least one (the one we just removed).
|
||||||
|
merged.append(prev)
|
||||||
|
|
||||||
# If the range covered the whole file, we can delete it now.
|
# If the range covered the whole file, we can delete it now.
|
||||||
# Note that the last file in a table may be only partially
|
# Note that the last file in a table may be only partially
|
||||||
@@ -504,7 +594,7 @@ class Table(object):
|
|||||||
# remainder will be filled on a subsequent append(), and things
|
# remainder will be filled on a subsequent append(), and things
|
||||||
# are generally easier if we don't have to special-case that.
|
# are generally easier if we don't have to special-case that.
|
||||||
if (len(merged) == 1 and
|
if (len(merged) == 1 and
|
||||||
merged[0][0] == 0 and merged[0][1] == self.rows_per_file):
|
merged[0][0] == 0 and merged[0][1] == self.rows_per_file):
|
||||||
# Delete files
|
# Delete files
|
||||||
if cachefile_present:
|
if cachefile_present:
|
||||||
os.remove(cachefile)
|
os.remove(cachefile)
|
||||||
@@ -543,11 +633,3 @@ class Table(object):
|
|||||||
self._remove_rows(subdir, filename, row_offset, row_offset + count)
|
self._remove_rows(subdir, filename, row_offset, row_offset + count)
|
||||||
remaining -= count
|
remaining -= count
|
||||||
row += count
|
row += count
|
||||||
|
|
||||||
class TimestampOnlyTable(object):
|
|
||||||
"""Helper that lets us pass a Tables object into bisect, by
|
|
||||||
returning only the timestamp when a particular row is requested."""
|
|
||||||
def __init__(self, table):
|
|
||||||
self.table = table
|
|
||||||
def __getitem__(self, index):
|
|
||||||
return self.table[index][0]
|
|
||||||
|
|||||||
@@ -1,12 +1,15 @@
|
|||||||
"""Exceptions"""
|
"""Exceptions"""
|
||||||
|
|
||||||
|
|
||||||
class NilmDBError(Exception):
|
class NilmDBError(Exception):
|
||||||
"""Base exception for NilmDB errors"""
|
"""Base exception for NilmDB errors"""
|
||||||
def __init__(self, message = "Unspecified error"):
|
def __init__(self, msg="Unspecified error"):
|
||||||
Exception.__init__(self, message)
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class StreamError(NilmDBError):
|
class StreamError(NilmDBError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class OverlapError(NilmDBError):
|
class OverlapError(NilmDBError):
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,5 +1,11 @@
|
|||||||
|
# cython: language_level=2
|
||||||
|
|
||||||
"""Interval, IntervalSet
|
"""Interval, IntervalSet
|
||||||
|
|
||||||
|
The Interval implemented here is just like
|
||||||
|
nilmdb.utils.interval.Interval, except implemented in Cython for
|
||||||
|
speed.
|
||||||
|
|
||||||
Represents an interval of time, and a set of such intervals.
|
Represents an interval of time, and a set of such intervals.
|
||||||
|
|
||||||
Intervals are half-open, ie. they include data points with timestamps
|
Intervals are half-open, ie. they include data points with timestamps
|
||||||
@@ -19,51 +25,54 @@ Intervals are half-open, ie. they include data points with timestamps
|
|||||||
# Fourth version is an optimized rb-tree that stores interval starts
|
# Fourth version is an optimized rb-tree that stores interval starts
|
||||||
# and ends directly in the tree, like bxinterval did.
|
# and ends directly in the tree, like bxinterval did.
|
||||||
|
|
||||||
from ..utils.time import float_time_to_string as ftts
|
from ..utils.time import min_timestamp as nilmdb_min_timestamp
|
||||||
|
from ..utils.time import max_timestamp as nilmdb_max_timestamp
|
||||||
|
from ..utils.time import timestamp_to_string
|
||||||
|
from ..utils.iterator import imerge
|
||||||
|
from ..utils.interval import IntervalError
|
||||||
|
import itertools
|
||||||
|
|
||||||
cimport rbtree
|
cimport rbtree
|
||||||
cdef extern from "stdint.h":
|
from libc.stdint cimport uint64_t, int64_t
|
||||||
ctypedef unsigned long long uint64_t
|
|
||||||
|
|
||||||
class IntervalError(Exception):
|
ctypedef int64_t timestamp_t
|
||||||
"""Error due to interval overlap, etc"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
cdef class Interval:
|
cdef class Interval:
|
||||||
"""Represents an interval of time."""
|
"""Represents an interval of time."""
|
||||||
|
|
||||||
cdef public double start, end
|
cdef public timestamp_t start, end
|
||||||
|
|
||||||
def __init__(self, double start, double end):
|
def __init__(self, timestamp_t start, timestamp_t end):
|
||||||
"""
|
"""
|
||||||
'start' and 'end' are arbitrary floats that represent time
|
'start' and 'end' are arbitrary numbers that represent time
|
||||||
"""
|
"""
|
||||||
if start >= end:
|
if start >= end:
|
||||||
# Explicitly disallow zero-width intervals (since they're half-open)
|
# Explicitly disallow zero-width intervals (since they're half-open)
|
||||||
raise IntervalError("start %s must precede end %s" % (start, end))
|
raise IntervalError("start %s must precede end %s" % (start, end))
|
||||||
self.start = float(start)
|
self.start = start
|
||||||
self.end = float(end)
|
self.end = end
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
s = repr(self.start) + ", " + repr(self.end)
|
s = repr(self.start) + ", " + repr(self.end)
|
||||||
return self.__class__.__name__ + "(" + s + ")"
|
return self.__class__.__name__ + "(" + s + ")"
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "[" + ftts(self.start) + " -> " + ftts(self.end) + ")"
|
return ("[" + timestamp_to_string(self.start) +
|
||||||
|
" -> " + timestamp_to_string(self.end) + ")")
|
||||||
|
|
||||||
def __cmp__(self, Interval other):
|
# Compare two intervals. If non-equal, order by start then end
|
||||||
"""Compare two intervals. If non-equal, order by start then end"""
|
def __lt__(self, Interval other):
|
||||||
if not isinstance(other, Interval):
|
return (self.start, self.end) < (other.start, other.end)
|
||||||
raise TypeError("bad type")
|
def __gt__(self, Interval other):
|
||||||
if self.start == other.start:
|
return (self.start, self.end) > (other.start, other.end)
|
||||||
if self.end < other.end:
|
def __le__(self, Interval other):
|
||||||
return -1
|
return (self.start, self.end) <= (other.start, other.end)
|
||||||
if self.end > other.end:
|
def __ge__(self, Interval other):
|
||||||
return 1
|
return (self.start, self.end) >= (other.start, other.end)
|
||||||
return 0
|
def __eq__(self, Interval other):
|
||||||
if self.start < other.start:
|
return (self.start, self.end) == (other.start, other.end)
|
||||||
return -1
|
def __ne__(self, Interval other):
|
||||||
return 1
|
return (self.start, self.end) != (other.start, other.end)
|
||||||
|
|
||||||
cpdef intersects(self, Interval other):
|
cpdef intersects(self, Interval other):
|
||||||
"""Return True if two Interval objects intersect"""
|
"""Return True if two Interval objects intersect"""
|
||||||
@@ -71,7 +80,7 @@ cdef class Interval:
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
cpdef subset(self, double start, double end):
|
cpdef subset(self, timestamp_t start, timestamp_t end):
|
||||||
"""Return a new Interval that is a subset of this one"""
|
"""Return a new Interval that is a subset of this one"""
|
||||||
# A subclass that tracks additional data might override this.
|
# A subclass that tracks additional data might override this.
|
||||||
if start < self.start or end > self.end:
|
if start < self.start or end > self.end:
|
||||||
@@ -93,14 +102,14 @@ cdef class DBInterval(Interval):
|
|||||||
db_end = 200, db_endpos = 20000
|
db_end = 200, db_endpos = 20000
|
||||||
"""
|
"""
|
||||||
|
|
||||||
cpdef public double db_start, db_end
|
cpdef public timestamp_t db_start, db_end
|
||||||
cpdef public uint64_t db_startpos, db_endpos
|
cpdef public uint64_t db_startpos, db_endpos
|
||||||
|
|
||||||
def __init__(self, start, end,
|
def __init__(self, start, end,
|
||||||
db_start, db_end,
|
db_start, db_end,
|
||||||
db_startpos, db_endpos):
|
db_startpos, db_endpos):
|
||||||
"""
|
"""
|
||||||
'db_start' and 'db_end' are arbitrary floats that represent
|
'db_start' and 'db_end' are arbitrary numbers that represent
|
||||||
time. They must be a strict superset of the time interval
|
time. They must be a strict superset of the time interval
|
||||||
covered by 'start' and 'end'. The 'db_startpos' and
|
covered by 'start' and 'end'. The 'db_startpos' and
|
||||||
'db_endpos' are arbitrary database position indicators that
|
'db_endpos' are arbitrary database position indicators that
|
||||||
@@ -120,7 +129,7 @@ cdef class DBInterval(Interval):
|
|||||||
s += ", " + repr(self.db_startpos) + ", " + repr(self.db_endpos)
|
s += ", " + repr(self.db_startpos) + ", " + repr(self.db_endpos)
|
||||||
return self.__class__.__name__ + "(" + s + ")"
|
return self.__class__.__name__ + "(" + s + ")"
|
||||||
|
|
||||||
cpdef subset(self, double start, double end):
|
cpdef subset(self, timestamp_t start, timestamp_t end):
|
||||||
"""
|
"""
|
||||||
Return a new DBInterval that is a subset of this one
|
Return a new DBInterval that is a subset of this one
|
||||||
"""
|
"""
|
||||||
@@ -264,21 +273,15 @@ cdef class IntervalSet:
|
|||||||
|
|
||||||
def __and__(self, other not None):
|
def __and__(self, other not None):
|
||||||
"""
|
"""
|
||||||
Compute a new IntervalSet from the intersection of two others
|
Compute a new IntervalSet from the intersection of this
|
||||||
|
IntervalSet with one other interval.
|
||||||
|
|
||||||
Output intervals are built as subsets of the intervals in the
|
Output intervals are built as subsets of the intervals in the
|
||||||
first argument (self).
|
first argument (self).
|
||||||
"""
|
"""
|
||||||
out = IntervalSet()
|
out = IntervalSet()
|
||||||
|
for i in self.intersection(other):
|
||||||
if not isinstance(other, IntervalSet):
|
out.tree.insert(rbtree.RBNode(i.start, i.end, i))
|
||||||
for i in self.intersection(other):
|
|
||||||
out.tree.insert(rbtree.RBNode(i.start, i.end, i))
|
|
||||||
else:
|
|
||||||
for x in other:
|
|
||||||
for i in self.intersection(x):
|
|
||||||
out.tree.insert(rbtree.RBNode(i.start, i.end, i))
|
|
||||||
|
|
||||||
return out
|
return out
|
||||||
|
|
||||||
def intersection(self, Interval interval not None, orig = False):
|
def intersection(self, Interval interval not None, orig = False):
|
||||||
@@ -295,23 +298,18 @@ cdef class IntervalSet:
|
|||||||
(potentially) subsetted to make the one that is being
|
(potentially) subsetted to make the one that is being
|
||||||
returned.
|
returned.
|
||||||
"""
|
"""
|
||||||
if not isinstance(interval, Interval):
|
if orig:
|
||||||
raise TypeError("bad type")
|
for n in self.tree.intersect(interval.start, interval.end):
|
||||||
for n in self.tree.intersect(interval.start, interval.end):
|
i = n.obj
|
||||||
i = n.obj
|
subset = i.subset(max(i.start, interval.start),
|
||||||
if i:
|
min(i.end, interval.end))
|
||||||
if i.start >= interval.start and i.end <= interval.end:
|
yield (subset, i)
|
||||||
if orig:
|
else:
|
||||||
yield (i, i)
|
for n in self.tree.intersect(interval.start, interval.end):
|
||||||
else:
|
i = n.obj
|
||||||
yield i
|
subset = i.subset(max(i.start, interval.start),
|
||||||
else:
|
min(i.end, interval.end))
|
||||||
subset = i.subset(max(i.start, interval.start),
|
yield subset
|
||||||
min(i.end, interval.end))
|
|
||||||
if orig:
|
|
||||||
yield (subset, i)
|
|
||||||
else:
|
|
||||||
yield subset
|
|
||||||
|
|
||||||
cpdef intersects(self, Interval other):
|
cpdef intersects(self, Interval other):
|
||||||
"""Return True if this IntervalSet intersects another interval"""
|
"""Return True if this IntervalSet intersects another interval"""
|
||||||
@@ -320,7 +318,7 @@ cdef class IntervalSet:
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def find_end(self, double t):
|
def find_end(self, timestamp_t t):
|
||||||
"""
|
"""
|
||||||
Return an Interval from this tree that ends at time t, or
|
Return an Interval from this tree that ends at time t, or
|
||||||
None if it doesn't exist.
|
None if it doesn't exist.
|
||||||
|
|||||||
@@ -1,201 +0,0 @@
|
|||||||
# cython: profile=False
|
|
||||||
|
|
||||||
import time
|
|
||||||
import sys
|
|
||||||
import inspect
|
|
||||||
import cStringIO
|
|
||||||
|
|
||||||
cdef enum:
|
|
||||||
max_value_count = 64
|
|
||||||
|
|
||||||
cimport cython
|
|
||||||
cimport libc.stdlib
|
|
||||||
cimport libc.stdio
|
|
||||||
cimport libc.string
|
|
||||||
|
|
||||||
class ParserError(Exception):
|
|
||||||
def __init__(self, line, message):
|
|
||||||
self.message = "line " + str(line) + ": " + message
|
|
||||||
Exception.__init__(self, self.message)
|
|
||||||
|
|
||||||
class FormatterError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Layout:
|
|
||||||
"""Represents a NILM database layout"""
|
|
||||||
|
|
||||||
def __init__(self, typestring):
|
|
||||||
"""Initialize this Layout object to handle the specified
|
|
||||||
type string"""
|
|
||||||
try:
|
|
||||||
[ datatype, count ] = typestring.split("_")
|
|
||||||
except:
|
|
||||||
raise KeyError("invalid layout string")
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.count = int(count)
|
|
||||||
except ValueError:
|
|
||||||
raise KeyError("invalid count")
|
|
||||||
if self.count < 1 or self.count > max_value_count:
|
|
||||||
raise KeyError("invalid count")
|
|
||||||
|
|
||||||
if datatype == 'uint16':
|
|
||||||
self.parse = self.parse_uint16
|
|
||||||
self.format_str = "%.6f" + " %d" * self.count
|
|
||||||
self.format = self.format_generic
|
|
||||||
elif datatype == 'float32':
|
|
||||||
self.parse = self.parse_float64
|
|
||||||
self.format_str = "%.6f" + " %.6e" * self.count
|
|
||||||
self.format = self.format_generic
|
|
||||||
elif datatype == 'float64':
|
|
||||||
self.parse = self.parse_float64
|
|
||||||
self.format_str = "%.6f" + " %.16e" * self.count
|
|
||||||
self.format = self.format_generic
|
|
||||||
else:
|
|
||||||
raise KeyError("invalid type")
|
|
||||||
|
|
||||||
self.datatype = datatype
|
|
||||||
|
|
||||||
# Parsers
|
|
||||||
def parse_float64(self, char *text):
|
|
||||||
cdef int n
|
|
||||||
cdef double ts
|
|
||||||
# Return doubles even in float32 case, since they're going into
|
|
||||||
# a Python array which would upconvert to double anyway.
|
|
||||||
result = [0] * (self.count + 1)
|
|
||||||
cdef char *end
|
|
||||||
ts = libc.stdlib.strtod(text, &end)
|
|
||||||
if end == text:
|
|
||||||
raise ValueError("bad timestamp")
|
|
||||||
result[0] = ts
|
|
||||||
for n in range(self.count):
|
|
||||||
text = end
|
|
||||||
result[n+1] = libc.stdlib.strtod(text, &end)
|
|
||||||
if end == text:
|
|
||||||
raise ValueError("wrong number of values")
|
|
||||||
n = 0
|
|
||||||
while end[n] == ' ':
|
|
||||||
n += 1
|
|
||||||
if end[n] != '\n' and end[n] != '#' and end[n] != '\0':
|
|
||||||
raise ValueError("extra data on line")
|
|
||||||
return (ts, result)
|
|
||||||
|
|
||||||
def parse_uint16(self, char *text):
|
|
||||||
cdef int n
|
|
||||||
cdef double ts
|
|
||||||
cdef int v
|
|
||||||
cdef char *end
|
|
||||||
result = [0] * (self.count + 1)
|
|
||||||
ts = libc.stdlib.strtod(text, &end)
|
|
||||||
if end == text:
|
|
||||||
raise ValueError("bad timestamp")
|
|
||||||
result[0] = ts
|
|
||||||
for n in range(self.count):
|
|
||||||
text = end
|
|
||||||
v = libc.stdlib.strtol(text, &end, 10)
|
|
||||||
if v < 0 or v > 65535:
|
|
||||||
raise ValueError("value out of range")
|
|
||||||
result[n+1] = v
|
|
||||||
if end == text:
|
|
||||||
raise ValueError("wrong number of values")
|
|
||||||
n = 0
|
|
||||||
while end[n] == ' ':
|
|
||||||
n += 1
|
|
||||||
if end[n] != '\n' and end[n] != '#' and end[n] != '\0':
|
|
||||||
raise ValueError("extra data on line")
|
|
||||||
return (ts, result)
|
|
||||||
|
|
||||||
# Formatters
|
|
||||||
def format_generic(self, d):
|
|
||||||
n = len(d) - 1
|
|
||||||
if n != self.count:
|
|
||||||
raise ValueError("wrong number of values for layout type: "
|
|
||||||
"got %d, wanted %d" % (n, self.count))
|
|
||||||
return (self.format_str % tuple(d)) + "\n"
|
|
||||||
|
|
||||||
# Get a layout by name
|
|
||||||
def get_named(typestring):
|
|
||||||
try:
|
|
||||||
return Layout(typestring)
|
|
||||||
except KeyError:
|
|
||||||
compat = { "PrepData": "float32_8",
|
|
||||||
"RawData": "uint16_6",
|
|
||||||
"RawNotchedData": "uint16_9" }
|
|
||||||
return Layout(compat[typestring])
|
|
||||||
|
|
||||||
class Parser(object):
|
|
||||||
"""Object that parses and stores ASCII data for inclusion into the
|
|
||||||
database"""
|
|
||||||
|
|
||||||
def __init__(self, layout):
|
|
||||||
if issubclass(layout.__class__, Layout):
|
|
||||||
self.layout = layout
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self.layout = get_named(layout)
|
|
||||||
except KeyError:
|
|
||||||
raise TypeError("unknown layout")
|
|
||||||
|
|
||||||
self.data = []
|
|
||||||
self.min_timestamp = None
|
|
||||||
self.max_timestamp = None
|
|
||||||
|
|
||||||
def parse(self, textdata):
|
|
||||||
"""
|
|
||||||
Parse the data, provided as lines of text, using the current
|
|
||||||
layout, into an internal data structure suitable for a
|
|
||||||
pytables 'table.append(parser.data)'.
|
|
||||||
"""
|
|
||||||
cdef double last_ts = -1e12, ts
|
|
||||||
cdef int n = 0, i
|
|
||||||
cdef char *line
|
|
||||||
|
|
||||||
indata = cStringIO.StringIO(textdata)
|
|
||||||
# Assume any parsing error is a real error.
|
|
||||||
# In the future we might want to skip completely empty lines,
|
|
||||||
# or partial lines right before EOF?
|
|
||||||
try:
|
|
||||||
self.data = []
|
|
||||||
for pyline in indata:
|
|
||||||
line = pyline
|
|
||||||
n += 1
|
|
||||||
if line[0] == '\#':
|
|
||||||
continue
|
|
||||||
(ts, row) = self.layout.parse(line)
|
|
||||||
if ts <= last_ts:
|
|
||||||
raise ValueError("timestamp is not "
|
|
||||||
"monotonically increasing")
|
|
||||||
last_ts = ts
|
|
||||||
self.data.append(row)
|
|
||||||
except (ValueError, IndexError, TypeError) as e:
|
|
||||||
raise ParserError(n, "error: " + e.message)
|
|
||||||
|
|
||||||
# Mark timestamp ranges
|
|
||||||
if len(self.data):
|
|
||||||
self.min_timestamp = self.data[0][0]
|
|
||||||
self.max_timestamp = self.data[-1][0]
|
|
||||||
|
|
||||||
class Formatter(object):
|
|
||||||
"""Object that formats database data into ASCII"""
|
|
||||||
|
|
||||||
def __init__(self, layout):
|
|
||||||
if issubclass(layout.__class__, Layout):
|
|
||||||
self.layout = layout
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self.layout = get_named(layout)
|
|
||||||
except KeyError:
|
|
||||||
raise TypeError("unknown layout")
|
|
||||||
|
|
||||||
def format(self, data):
|
|
||||||
"""
|
|
||||||
Format raw data from the database, using the current layout,
|
|
||||||
as lines of ACSII text.
|
|
||||||
"""
|
|
||||||
text = cStringIO.StringIO()
|
|
||||||
try:
|
|
||||||
for row in data:
|
|
||||||
text.write(self.layout.format(row))
|
|
||||||
except (ValueError, IndexError, TypeError) as e:
|
|
||||||
raise FormatterError("formatting error: " + e.message)
|
|
||||||
return text.getvalue()
|
|
||||||
@@ -7,20 +7,19 @@ Object that represents a NILM database file.
|
|||||||
Manages both the SQL database and the table storage backend.
|
Manages both the SQL database and the table storage backend.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Need absolute_import so that "import nilmdb" won't pull in
|
|
||||||
# nilmdb.py, but will pull the parent nilmdb module instead.
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import nilmdb.utils
|
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
from nilmdb.server.interval import (Interval, DBInterval,
|
|
||||||
IntervalSet, IntervalError)
|
|
||||||
from nilmdb.server import bulkdata
|
|
||||||
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
|
|
||||||
|
|
||||||
import sqlite3
|
|
||||||
import os
|
import os
|
||||||
import errno
|
import errno
|
||||||
import bisect
|
import sqlite3
|
||||||
|
|
||||||
|
import nilmdb.utils
|
||||||
|
from nilmdb.utils.printf import printf
|
||||||
|
from nilmdb.utils.time import timestamp_to_bytes
|
||||||
|
|
||||||
|
from nilmdb.utils.interval import IntervalError
|
||||||
|
from nilmdb.server.interval import Interval, DBInterval, IntervalSet
|
||||||
|
|
||||||
|
from nilmdb.server import bulkdata
|
||||||
|
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
|
||||||
|
|
||||||
# Note about performance and transactions:
|
# Note about performance and transactions:
|
||||||
#
|
#
|
||||||
@@ -35,10 +34,10 @@ import bisect
|
|||||||
# seems that 'PRAGMA synchronous=NORMAL' and 'PRAGMA journal_mode=WAL'
|
# seems that 'PRAGMA synchronous=NORMAL' and 'PRAGMA journal_mode=WAL'
|
||||||
# give an equivalent speedup more safely. That is what is used here.
|
# give an equivalent speedup more safely. That is what is used here.
|
||||||
_sql_schema_updates = {
|
_sql_schema_updates = {
|
||||||
0: """
|
0: {"next": 1, "sql": """
|
||||||
-- All streams
|
-- All streams
|
||||||
CREATE TABLE streams(
|
CREATE TABLE streams(
|
||||||
id INTEGER PRIMARY KEY, -- stream ID
|
id INTEGER PRIMARY KEY, -- stream ID
|
||||||
path TEXT UNIQUE NOT NULL, -- path, e.g. '/newton/prep'
|
path TEXT UNIQUE NOT NULL, -- path, e.g. '/newton/prep'
|
||||||
layout TEXT NOT NULL -- layout name, e.g. float32_8
|
layout TEXT NOT NULL -- layout name, e.g. float32_8
|
||||||
);
|
);
|
||||||
@@ -59,24 +58,47 @@ _sql_schema_updates = {
|
|||||||
end_pos INTEGER NOT NULL
|
end_pos INTEGER NOT NULL
|
||||||
);
|
);
|
||||||
CREATE INDEX _ranges_index ON ranges (stream_id, start_time, end_time);
|
CREATE INDEX _ranges_index ON ranges (stream_id, start_time, end_time);
|
||||||
""",
|
"""},
|
||||||
|
|
||||||
1: """
|
1: {"next": 3, "sql": """
|
||||||
-- Generic dictionary-type metadata that can be associated with a stream
|
-- Generic dictionary-type metadata that can be associated with a stream
|
||||||
CREATE TABLE metadata(
|
CREATE TABLE metadata(
|
||||||
stream_id INTEGER NOT NULL,
|
stream_id INTEGER NOT NULL,
|
||||||
key TEXT NOT NULL,
|
key TEXT NOT NULL,
|
||||||
value TEXT
|
value TEXT
|
||||||
);
|
);
|
||||||
""",
|
"""},
|
||||||
|
|
||||||
|
2: {"error": "old format with floating-point timestamps requires "
|
||||||
|
"nilmdb 1.3.1 or older"},
|
||||||
|
|
||||||
|
3: {"next": None},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@nilmdb.utils.must_close()
|
@nilmdb.utils.must_close()
|
||||||
class NilmDB(object):
|
class NilmDB():
|
||||||
verbose = 0
|
verbose = 0
|
||||||
|
|
||||||
def __init__(self, basepath, max_results=None,
|
def __init__(self, basepath,
|
||||||
|
max_results=None,
|
||||||
|
max_removals=None,
|
||||||
|
max_int_removals=None,
|
||||||
bulkdata_args=None):
|
bulkdata_args=None):
|
||||||
|
"""Initialize NilmDB at the given basepath.
|
||||||
|
Other arguments are for debugging / testing:
|
||||||
|
|
||||||
|
'max_results' is the max rows to send in a single
|
||||||
|
stream_intervals or stream_extract response.
|
||||||
|
|
||||||
|
'max_removals' is the max rows to delete at once
|
||||||
|
in stream_remove.
|
||||||
|
|
||||||
|
'max_int_removals' is the max intervals to delete
|
||||||
|
at once in stream_remove.
|
||||||
|
|
||||||
|
'bulkdata_args' is kwargs for the bulkdata module.
|
||||||
|
"""
|
||||||
if bulkdata_args is None:
|
if bulkdata_args is None:
|
||||||
bulkdata_args = {}
|
bulkdata_args = {}
|
||||||
|
|
||||||
@@ -95,19 +117,26 @@ class NilmDB(object):
|
|||||||
|
|
||||||
# SQLite database too
|
# SQLite database too
|
||||||
sqlfilename = os.path.join(self.basepath, "data.sql")
|
sqlfilename = os.path.join(self.basepath, "data.sql")
|
||||||
self.con = sqlite3.connect(sqlfilename, check_same_thread = True)
|
self.con = sqlite3.connect(sqlfilename, check_same_thread=True)
|
||||||
self._sql_schema_update()
|
try:
|
||||||
|
self._sql_schema_update()
|
||||||
|
except Exception:
|
||||||
|
self.data.close()
|
||||||
|
raise
|
||||||
|
|
||||||
# See big comment at top about the performance implications of this
|
# See big comment at top about the performance implications of this
|
||||||
self.con.execute("PRAGMA synchronous=NORMAL")
|
self.con.execute("PRAGMA synchronous=NORMAL")
|
||||||
self.con.execute("PRAGMA journal_mode=WAL")
|
self.con.execute("PRAGMA journal_mode=WAL")
|
||||||
|
|
||||||
# Approximate largest number of elements that we want to send
|
# Approximate largest number of elements that we want to send
|
||||||
# in a single reply (for stream_intervals, stream_extract)
|
# in a single reply (for stream_intervals, stream_extract).
|
||||||
if max_results:
|
self.max_results = max_results or 16384
|
||||||
self.max_results = max_results
|
|
||||||
else:
|
# Remove up to this many rows per call to stream_remove.
|
||||||
self.max_results = 16384
|
self.max_removals = max_removals or 1048576
|
||||||
|
|
||||||
|
# Remove up to this many intervals per call to stream_remove.
|
||||||
|
self.max_int_removals = max_int_removals or 4096
|
||||||
|
|
||||||
def get_basepath(self):
|
def get_basepath(self):
|
||||||
return self.basepath
|
return self.basepath
|
||||||
@@ -116,6 +145,7 @@ class NilmDB(object):
|
|||||||
if self.con:
|
if self.con:
|
||||||
self.con.commit()
|
self.con.commit()
|
||||||
self.con.close()
|
self.con.close()
|
||||||
|
self.con = None
|
||||||
self.data.close()
|
self.data.close()
|
||||||
|
|
||||||
def _sql_schema_update(self):
|
def _sql_schema_update(self):
|
||||||
@@ -123,11 +153,20 @@ class NilmDB(object):
|
|||||||
version = cur.execute("PRAGMA user_version").fetchone()[0]
|
version = cur.execute("PRAGMA user_version").fetchone()[0]
|
||||||
oldversion = version
|
oldversion = version
|
||||||
|
|
||||||
while version in _sql_schema_updates:
|
while True:
|
||||||
cur.executescript(_sql_schema_updates[version])
|
if version not in _sql_schema_updates:
|
||||||
version = version + 1
|
raise Exception(self.basepath + ": unknown database version "
|
||||||
if self.verbose: # pragma: no cover
|
+ str(version))
|
||||||
printf("Schema updated to %d\n", version)
|
update = _sql_schema_updates[version]
|
||||||
|
if "error" in update:
|
||||||
|
raise Exception(self.basepath + ": can't use database version "
|
||||||
|
+ str(version) + ": " + update["error"])
|
||||||
|
if update["next"] is None:
|
||||||
|
break
|
||||||
|
cur.executescript(update["sql"])
|
||||||
|
version = update["next"]
|
||||||
|
if self.verbose:
|
||||||
|
printf("Database schema updated to %d\n", version)
|
||||||
|
|
||||||
if version != oldversion:
|
if version != oldversion:
|
||||||
with self.con:
|
with self.con:
|
||||||
@@ -135,14 +174,14 @@ class NilmDB(object):
|
|||||||
|
|
||||||
def _check_user_times(self, start, end):
|
def _check_user_times(self, start, end):
|
||||||
if start is None:
|
if start is None:
|
||||||
start = -1e12
|
start = nilmdb.utils.time.min_timestamp
|
||||||
if end is None:
|
if end is None:
|
||||||
end = 1e12
|
end = nilmdb.utils.time.max_timestamp
|
||||||
if start >= end:
|
if start >= end:
|
||||||
raise NilmDBError("start must precede end")
|
raise NilmDBError("start must precede end")
|
||||||
return (start, end)
|
return (start, end)
|
||||||
|
|
||||||
@nilmdb.utils.lru_cache(size = 16)
|
@nilmdb.utils.lru_cache(size=64)
|
||||||
def _get_intervals(self, stream_id):
|
def _get_intervals(self, stream_id):
|
||||||
"""
|
"""
|
||||||
Return a mutable IntervalSet corresponding to the given stream ID.
|
Return a mutable IntervalSet corresponding to the given stream ID.
|
||||||
@@ -157,7 +196,7 @@ class NilmDB(object):
|
|||||||
iset += DBInterval(start_time, end_time,
|
iset += DBInterval(start_time, end_time,
|
||||||
start_time, end_time,
|
start_time, end_time,
|
||||||
start_pos, end_pos)
|
start_pos, end_pos)
|
||||||
except IntervalError: # pragma: no cover
|
except IntervalError:
|
||||||
raise NilmDBError("unexpected overlap in ranges table!")
|
raise NilmDBError("unexpected overlap in ranges table!")
|
||||||
|
|
||||||
return iset
|
return iset
|
||||||
@@ -184,21 +223,17 @@ class NilmDB(object):
|
|||||||
# Load this stream's intervals
|
# Load this stream's intervals
|
||||||
iset = self._get_intervals(stream_id)
|
iset = self._get_intervals(stream_id)
|
||||||
|
|
||||||
# Check for overlap
|
|
||||||
if iset.intersects(interval): # pragma: no cover (gets caught earlier)
|
|
||||||
raise NilmDBError("new interval overlaps existing data")
|
|
||||||
|
|
||||||
# Check for adjacency. If there's a stream in the database
|
# Check for adjacency. If there's a stream in the database
|
||||||
# that ends exactly when this one starts, and the database
|
# that ends exactly when this one starts, and the database
|
||||||
# rows match up, we can make one interval that covers the
|
# rows match up, we can make one interval that covers the
|
||||||
# time range [adjacent.start -> interval.end)
|
# time range [adjacent.start -> interval.end)
|
||||||
# and database rows [ adjacent.start_pos -> end_pos ].
|
# and database rows [ adjacent.start_pos -> end_pos ].
|
||||||
# Only do this if the resulting interval isn't too large.
|
# Only do this if the resulting interval isn't too large.
|
||||||
max_merged_rows = 8000 * 60 * 60 * 1.05 # 1.05 hours at 8 KHz
|
max_merged_rows = 8000 * 60 * 60 * 1.05 # 1.05 hours at 8 KHz
|
||||||
adjacent = iset.find_end(interval.start)
|
adjacent = iset.find_end(interval.start)
|
||||||
if (adjacent is not None and
|
if (adjacent is not None and
|
||||||
start_pos == adjacent.db_endpos and
|
start_pos == adjacent.db_endpos and
|
||||||
(end_pos - adjacent.db_startpos) < max_merged_rows):
|
(end_pos - adjacent.db_startpos) < max_merged_rows):
|
||||||
# First delete the old one, both from our iset and the
|
# First delete the old one, both from our iset and the
|
||||||
# database
|
# database
|
||||||
iset -= adjacent
|
iset -= adjacent
|
||||||
@@ -230,10 +265,6 @@ class NilmDB(object):
|
|||||||
original: original DBInterval; must be already present in DB
|
original: original DBInterval; must be already present in DB
|
||||||
to_remove: DBInterval to remove; must be subset of 'original'
|
to_remove: DBInterval to remove; must be subset of 'original'
|
||||||
"""
|
"""
|
||||||
# Just return if we have nothing to remove
|
|
||||||
if remove.start == remove.end: # pragma: no cover
|
|
||||||
return
|
|
||||||
|
|
||||||
# Load this stream's intervals
|
# Load this stream's intervals
|
||||||
iset = self._get_intervals(stream_id)
|
iset = self._get_intervals(stream_id)
|
||||||
|
|
||||||
@@ -248,7 +279,8 @@ class NilmDB(object):
|
|||||||
# the removed piece was in the middle.
|
# the removed piece was in the middle.
|
||||||
def add(iset, start, end, start_pos, end_pos):
|
def add(iset, start, end, start_pos, end_pos):
|
||||||
iset += DBInterval(start, end, start, end, start_pos, end_pos)
|
iset += DBInterval(start, end, start, end, start_pos, end_pos)
|
||||||
self._sql_interval_insert(stream_id, start, end, start_pos, end_pos)
|
self._sql_interval_insert(stream_id, start, end,
|
||||||
|
start_pos, end_pos)
|
||||||
|
|
||||||
if original.start != remove.start:
|
if original.start != remove.start:
|
||||||
# Interval before the removed region
|
# Interval before the removed region
|
||||||
@@ -265,7 +297,7 @@ class NilmDB(object):
|
|||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def stream_list(self, path = None, layout = None, extended = False):
|
def stream_list(self, path=None, layout=None, extended=False):
|
||||||
"""Return list of lists of all streams in the database.
|
"""Return list of lists of all streams in the database.
|
||||||
|
|
||||||
If path is specified, include only streams with a path that
|
If path is specified, include only streams with a path that
|
||||||
@@ -274,24 +306,24 @@ class NilmDB(object):
|
|||||||
If layout is specified, include only streams with a layout
|
If layout is specified, include only streams with a layout
|
||||||
that matches the given string.
|
that matches the given string.
|
||||||
|
|
||||||
If extended = False, returns a list of lists containing
|
If extended=False, returns a list of lists containing
|
||||||
the path and layout: [ path, layout ]
|
the path and layout: [ path, layout ]
|
||||||
|
|
||||||
If extended = True, returns a list of lists containing
|
If extended=True, returns a list of lists containing
|
||||||
more information:
|
more information:
|
||||||
path
|
path
|
||||||
layout
|
layout
|
||||||
interval_min (earliest interval start)
|
interval_min (earliest interval start)
|
||||||
interval_max (latest interval end)
|
interval_max (latest interval end)
|
||||||
rows (total number of rows of data)
|
rows (total number of rows of data)
|
||||||
seconds (total time covered by this stream)
|
time (total time covered by this stream, in timestamp units)
|
||||||
"""
|
"""
|
||||||
params = ()
|
params = ()
|
||||||
query = "SELECT streams.path, streams.layout"
|
query = "SELECT streams.path, streams.layout"
|
||||||
if extended:
|
if extended:
|
||||||
query += ", min(ranges.start_time), max(ranges.end_time) "
|
query += ", min(ranges.start_time), max(ranges.end_time) "
|
||||||
query += ", sum(ranges.end_pos - ranges.start_pos) "
|
query += ", coalesce(sum(ranges.end_pos - ranges.start_pos), 0) "
|
||||||
query += ", sum(ranges.end_time - ranges.start_time) "
|
query += ", coalesce(sum(ranges.end_time - ranges.start_time), 0) "
|
||||||
query += " FROM streams"
|
query += " FROM streams"
|
||||||
if extended:
|
if extended:
|
||||||
query += " LEFT JOIN ranges ON streams.id = ranges.stream_id"
|
query += " LEFT JOIN ranges ON streams.id = ranges.stream_id"
|
||||||
@@ -304,33 +336,47 @@ class NilmDB(object):
|
|||||||
params += (path,)
|
params += (path,)
|
||||||
query += " GROUP BY streams.id ORDER BY streams.path"
|
query += " GROUP BY streams.id ORDER BY streams.path"
|
||||||
result = self.con.execute(query, params).fetchall()
|
result = self.con.execute(query, params).fetchall()
|
||||||
return [ list(x) for x in result ]
|
return [list(x) for x in result]
|
||||||
|
|
||||||
def stream_intervals(self, path, start = None, end = None):
|
def stream_intervals(self, path, start=None, end=None, diffpath=None):
|
||||||
"""
|
"""
|
||||||
|
List all intervals in 'path' between 'start' and 'end'. If
|
||||||
|
'diffpath' is not none, list instead the set-difference
|
||||||
|
between the intervals in the two streams; i.e. all interval
|
||||||
|
ranges that are present in 'path' but not 'diffpath'.
|
||||||
|
|
||||||
Returns (intervals, restart) tuple.
|
Returns (intervals, restart) tuple.
|
||||||
|
|
||||||
intervals is a list of [start,end] timestamps of all intervals
|
'intervals' is a list of [start,end] timestamps of all intervals
|
||||||
that exist for path, between start and end.
|
that exist for path, between start and end.
|
||||||
|
|
||||||
restart, if nonzero, means that there were too many results to
|
'restart', if not None, means that there were too many results
|
||||||
return in a single request. The data is complete from the
|
to return in a single request. The data is complete from the
|
||||||
starting timestamp to the point at which it was truncated,
|
starting timestamp to the point at which it was truncated, and
|
||||||
and a new request with a start time of 'restart' will fetch
|
a new request with a start time of 'restart' will fetch the
|
||||||
the next block of data.
|
next block of data.
|
||||||
"""
|
"""
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
intervals = self._get_intervals(stream_id)
|
intervals = self._get_intervals(stream_id)
|
||||||
|
if diffpath:
|
||||||
|
diffstream_id = self._stream_id(diffpath)
|
||||||
|
diffintervals = self._get_intervals(diffstream_id)
|
||||||
(start, end) = self._check_user_times(start, end)
|
(start, end) = self._check_user_times(start, end)
|
||||||
requested = Interval(start, end)
|
requested = Interval(start, end)
|
||||||
result = []
|
result = []
|
||||||
for n, i in enumerate(intervals.intersection(requested)):
|
if diffpath:
|
||||||
|
getter = nilmdb.utils.interval.set_difference(
|
||||||
|
intervals.intersection(requested),
|
||||||
|
diffintervals.intersection(requested))
|
||||||
|
else:
|
||||||
|
getter = intervals.intersection(requested)
|
||||||
|
for n, i in enumerate(getter):
|
||||||
if n >= self.max_results:
|
if n >= self.max_results:
|
||||||
restart = i.start
|
restart = i.start
|
||||||
break
|
break
|
||||||
result.append([i.start, i.end])
|
result.append([i.start, i.end])
|
||||||
else:
|
else:
|
||||||
restart = 0
|
restart = None
|
||||||
return (result, restart)
|
return (result, restart)
|
||||||
|
|
||||||
def stream_create(self, path, layout_name):
|
def stream_create(self, path, layout_name):
|
||||||
@@ -364,8 +410,8 @@ class NilmDB(object):
|
|||||||
|
|
||||||
def stream_set_metadata(self, path, data):
|
def stream_set_metadata(self, path, data):
|
||||||
"""Set stream metadata from a dictionary, e.g.
|
"""Set stream metadata from a dictionary, e.g.
|
||||||
{ description = 'Downstairs lighting',
|
{ description: 'Downstairs lighting',
|
||||||
v_scaling = 123.45 }
|
v_scaling: 123.45 }
|
||||||
This replaces all existing metadata.
|
This replaces all existing metadata.
|
||||||
"""
|
"""
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
@@ -393,29 +439,50 @@ class NilmDB(object):
|
|||||||
data.update(newdata)
|
data.update(newdata)
|
||||||
self.stream_set_metadata(path, data)
|
self.stream_set_metadata(path, data)
|
||||||
|
|
||||||
|
def stream_rename(self, oldpath, newpath):
|
||||||
|
"""Rename a stream."""
|
||||||
|
stream_id = self._stream_id(oldpath)
|
||||||
|
|
||||||
|
# Rename the data
|
||||||
|
self.data.rename(oldpath, newpath)
|
||||||
|
|
||||||
|
# Rename the stream in the database
|
||||||
|
with self.con as con:
|
||||||
|
con.execute("UPDATE streams SET path=? WHERE id=?",
|
||||||
|
(newpath, stream_id))
|
||||||
|
|
||||||
def stream_destroy(self, path):
|
def stream_destroy(self, path):
|
||||||
"""Fully remove a table and all of its data from the database.
|
"""Fully remove a table from the database. Fails if there are
|
||||||
No way to undo it! Metadata is removed."""
|
any intervals data present; remove them first. Metadata is
|
||||||
|
also removed."""
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
|
|
||||||
# Delete the cached interval data (if it was cached)
|
# Verify that no intervals are present, and clear the cache
|
||||||
|
iset = self._get_intervals(stream_id)
|
||||||
|
if iset:
|
||||||
|
raise NilmDBError("all intervals must be removed before "
|
||||||
|
"destroying a stream")
|
||||||
self._get_intervals.cache_remove(self, stream_id)
|
self._get_intervals.cache_remove(self, stream_id)
|
||||||
|
|
||||||
# Delete the data
|
# Delete the bulkdata storage
|
||||||
self.data.destroy(path)
|
self.data.destroy(path)
|
||||||
|
|
||||||
# Delete metadata, stream, intervals
|
# Delete metadata, stream, intervals (should be none)
|
||||||
with self.con as con:
|
with self.con as con:
|
||||||
con.execute("DELETE FROM metadata WHERE stream_id=?", (stream_id,))
|
con.execute("DELETE FROM metadata WHERE stream_id=?", (stream_id,))
|
||||||
con.execute("DELETE FROM ranges WHERE stream_id=?", (stream_id,))
|
con.execute("DELETE FROM ranges WHERE stream_id=?", (stream_id,))
|
||||||
con.execute("DELETE FROM streams WHERE id=?", (stream_id,))
|
con.execute("DELETE FROM streams WHERE id=?", (stream_id,))
|
||||||
|
|
||||||
def stream_insert(self, path, start, end, data):
|
def stream_insert(self, path, start, end, data, binary=False):
|
||||||
"""Insert new data into the database.
|
"""Insert new data into the database.
|
||||||
path: Path at which to add the data
|
path: Path at which to add the data
|
||||||
start: Starting timestamp
|
start: Starting timestamp
|
||||||
end: Ending timestamp
|
end: Ending timestamp
|
||||||
data: Textual data, formatted according to the layout of path
|
data: Textual data, formatted according to the layout of path
|
||||||
|
|
||||||
|
'binary', if True, means that 'data' is raw binary:
|
||||||
|
little-endian, matching the current table's layout,
|
||||||
|
including the int64 timestamp.
|
||||||
"""
|
"""
|
||||||
# First check for basic overlap using timestamp info given.
|
# First check for basic overlap using timestamp info given.
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
@@ -429,7 +496,7 @@ class NilmDB(object):
|
|||||||
# there are any parse errors.
|
# there are any parse errors.
|
||||||
table = self.data.getnode(path)
|
table = self.data.getnode(path)
|
||||||
row_start = table.nrows
|
row_start = table.nrows
|
||||||
table.append_string(data, start, end)
|
table.append_data(data, start, end, binary)
|
||||||
row_end = table.nrows
|
row_end = table.nrows
|
||||||
|
|
||||||
# Insert the record into the sql database.
|
# Insert the record into the sql database.
|
||||||
@@ -438,6 +505,17 @@ class NilmDB(object):
|
|||||||
# And that's all
|
# And that's all
|
||||||
return
|
return
|
||||||
|
|
||||||
|
def _bisect_left(self, a, x, lo, hi):
|
||||||
|
# Like bisect.bisect_left, but doesn't choke on large indices on
|
||||||
|
# 32-bit systems, like bisect's fast C implementation does.
|
||||||
|
while lo < hi:
|
||||||
|
mid = (lo + hi) // 2
|
||||||
|
if a[mid] < x:
|
||||||
|
lo = mid + 1
|
||||||
|
else:
|
||||||
|
hi = mid
|
||||||
|
return lo
|
||||||
|
|
||||||
def _find_start(self, table, dbinterval):
|
def _find_start(self, table, dbinterval):
|
||||||
"""
|
"""
|
||||||
Given a DBInterval, find the row in the database that
|
Given a DBInterval, find the row in the database that
|
||||||
@@ -448,10 +526,10 @@ class NilmDB(object):
|
|||||||
# Optimization for the common case where an interval wasn't truncated
|
# Optimization for the common case where an interval wasn't truncated
|
||||||
if dbinterval.start == dbinterval.db_start:
|
if dbinterval.start == dbinterval.db_start:
|
||||||
return dbinterval.db_startpos
|
return dbinterval.db_startpos
|
||||||
return bisect.bisect_left(bulkdata.TimestampOnlyTable(table),
|
return self._bisect_left(table,
|
||||||
dbinterval.start,
|
dbinterval.start,
|
||||||
dbinterval.db_startpos,
|
dbinterval.db_startpos,
|
||||||
dbinterval.db_endpos)
|
dbinterval.db_endpos)
|
||||||
|
|
||||||
def _find_end(self, table, dbinterval):
|
def _find_end(self, table, dbinterval):
|
||||||
"""
|
"""
|
||||||
@@ -467,28 +545,36 @@ class NilmDB(object):
|
|||||||
# want to include the given timestamp in the results. This is
|
# want to include the given timestamp in the results. This is
|
||||||
# so a queries like 1:00 -> 2:00 and 2:00 -> 3:00 return
|
# so a queries like 1:00 -> 2:00 and 2:00 -> 3:00 return
|
||||||
# non-overlapping data.
|
# non-overlapping data.
|
||||||
return bisect.bisect_left(bulkdata.TimestampOnlyTable(table),
|
return self._bisect_left(table,
|
||||||
dbinterval.end,
|
dbinterval.end,
|
||||||
dbinterval.db_startpos,
|
dbinterval.db_startpos,
|
||||||
dbinterval.db_endpos)
|
dbinterval.db_endpos)
|
||||||
|
|
||||||
def stream_extract(self, path, start = None, end = None, count = False):
|
def stream_extract(self, path, start=None, end=None,
|
||||||
|
count=False, markup=False, binary=False):
|
||||||
"""
|
"""
|
||||||
Returns (data, restart) tuple.
|
Returns (data, restart) tuple.
|
||||||
|
|
||||||
data is ASCII-formatted data from the database, formatted
|
'data' is ASCII-formatted data from the database, formatted
|
||||||
according to the layout of the stream.
|
according to the layout of the stream.
|
||||||
|
|
||||||
restart, if nonzero, means that there were too many results to
|
'restart', if not None, means that there were too many results to
|
||||||
return in a single request. The data is complete from the
|
return in a single request. The data is complete from the
|
||||||
starting timestamp to the point at which it was truncated,
|
starting timestamp to the point at which it was truncated,
|
||||||
and a new request with a start time of 'restart' will fetch
|
and a new request with a start time of 'restart' will fetch
|
||||||
the next block of data.
|
the next block of data.
|
||||||
|
|
||||||
count, if true, means to not return raw data, but just the count
|
'count', if true, means to not return raw data, but just the count
|
||||||
of rows that would have been returned. This is much faster
|
of rows that would have been returned. This is much faster
|
||||||
than actually fetching the data. It is not limited by
|
than actually fetching the data. It is not limited by
|
||||||
max_results.
|
max_results.
|
||||||
|
|
||||||
|
'markup', if true, indicates that returned data should be
|
||||||
|
marked with a comment denoting when a particular interval
|
||||||
|
starts, and another comment when an interval ends.
|
||||||
|
|
||||||
|
'binary', if true, means to return raw binary rather than
|
||||||
|
ASCII-formatted data.
|
||||||
"""
|
"""
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
table = self.data.getnode(path)
|
table = self.data.getnode(path)
|
||||||
@@ -498,7 +584,9 @@ class NilmDB(object):
|
|||||||
result = []
|
result = []
|
||||||
matched = 0
|
matched = 0
|
||||||
remaining = self.max_results
|
remaining = self.max_results
|
||||||
restart = 0
|
restart = None
|
||||||
|
if binary and (markup or count):
|
||||||
|
raise NilmDBError("binary mode can't be used with markup or count")
|
||||||
for interval in intervals.intersection(requested):
|
for interval in intervals.intersection(requested):
|
||||||
# Reading single rows from the table is too slow, so
|
# Reading single rows from the table is too slow, so
|
||||||
# we use two bisections to find both the starting and
|
# we use two bisections to find both the starting and
|
||||||
@@ -515,27 +603,48 @@ class NilmDB(object):
|
|||||||
row_max = row_start + remaining
|
row_max = row_start + remaining
|
||||||
if row_max < row_end:
|
if row_max < row_end:
|
||||||
row_end = row_max
|
row_end = row_max
|
||||||
restart = table[row_max][0]
|
restart = table[row_max]
|
||||||
|
|
||||||
|
# Add markup
|
||||||
|
if markup:
|
||||||
|
result.append(b"# interval-start " +
|
||||||
|
timestamp_to_bytes(interval.start) + b"\n")
|
||||||
|
|
||||||
# Gather these results up
|
# Gather these results up
|
||||||
result.append(table.get_as_text(row_start, row_end))
|
result.append(table.get_data(row_start, row_end, binary))
|
||||||
|
|
||||||
# Count them
|
# Count them
|
||||||
remaining -= row_end - row_start
|
remaining -= row_end - row_start
|
||||||
|
|
||||||
if restart:
|
# Add markup, and exit if restart is set.
|
||||||
|
if restart is not None:
|
||||||
|
if markup:
|
||||||
|
result.append(b"# interval-end " +
|
||||||
|
timestamp_to_bytes(restart) + b"\n")
|
||||||
break
|
break
|
||||||
|
if markup:
|
||||||
|
result.append(b"# interval-end " +
|
||||||
|
timestamp_to_bytes(interval.end) + b"\n")
|
||||||
|
|
||||||
if count:
|
if count:
|
||||||
return matched
|
return matched
|
||||||
return ("".join(result), restart)
|
full_result = b"".join(result)
|
||||||
|
return (full_result, restart)
|
||||||
|
|
||||||
def stream_remove(self, path, start = None, end = None):
|
def stream_remove(self, path, start=None, end=None):
|
||||||
"""
|
"""
|
||||||
Remove data from the specified time interval within a stream.
|
Remove data from the specified time interval within a stream.
|
||||||
Removes all data in the interval [start, end), and intervals
|
|
||||||
are truncated or split appropriately. Returns the number of
|
Removes data in the interval [start, end), and intervals are
|
||||||
data points removed.
|
truncated or split appropriately.
|
||||||
|
|
||||||
|
Returns a (removed, restart) tuple.
|
||||||
|
|
||||||
|
'removed' is the number of data points that were removed.
|
||||||
|
|
||||||
|
'restart', if not None, means there were too many rows to
|
||||||
|
remove in a single request. This function should be called
|
||||||
|
again with a start time of 'restart' to complete the removal.
|
||||||
"""
|
"""
|
||||||
stream_id = self._stream_id(path)
|
stream_id = self._stream_id(path)
|
||||||
table = self.data.getnode(path)
|
table = self.data.getnode(path)
|
||||||
@@ -543,16 +652,34 @@ class NilmDB(object):
|
|||||||
(start, end) = self._check_user_times(start, end)
|
(start, end) = self._check_user_times(start, end)
|
||||||
to_remove = Interval(start, end)
|
to_remove = Interval(start, end)
|
||||||
removed = 0
|
removed = 0
|
||||||
|
remaining = self.max_removals
|
||||||
|
int_remaining = self.max_int_removals
|
||||||
|
restart = None
|
||||||
|
|
||||||
# Can't remove intervals from within the iterator, so we need to
|
# Can't remove intervals from within the iterator, so we need to
|
||||||
# remember what's currently in the intersection now.
|
# remember what's currently in the intersection now.
|
||||||
all_candidates = list(intervals.intersection(to_remove, orig = True))
|
all_candidates = list(intervals.intersection(to_remove, orig=True))
|
||||||
|
|
||||||
|
remove_start = None
|
||||||
|
remove_end = None
|
||||||
|
|
||||||
for (dbint, orig) in all_candidates:
|
for (dbint, orig) in all_candidates:
|
||||||
|
# Stop if we've hit the max number of interval removals
|
||||||
|
if int_remaining <= 0:
|
||||||
|
restart = dbint.start
|
||||||
|
break
|
||||||
|
|
||||||
# Find row start and end
|
# Find row start and end
|
||||||
row_start = self._find_start(table, dbint)
|
row_start = self._find_start(table, dbint)
|
||||||
row_end = self._find_end(table, dbint)
|
row_end = self._find_end(table, dbint)
|
||||||
|
|
||||||
|
# Shorten it if we'll hit the maximum number of removals
|
||||||
|
row_max = row_start + remaining
|
||||||
|
if row_max < row_end:
|
||||||
|
row_end = row_max
|
||||||
|
dbint.end = table[row_max]
|
||||||
|
restart = dbint.end
|
||||||
|
|
||||||
# Adjust the DBInterval to match the newly found ends
|
# Adjust the DBInterval to match the newly found ends
|
||||||
dbint.db_start = dbint.start
|
dbint.db_start = dbint.start
|
||||||
dbint.db_end = dbint.end
|
dbint.db_end = dbint.end
|
||||||
@@ -562,10 +689,29 @@ class NilmDB(object):
|
|||||||
# Remove interval from the database
|
# Remove interval from the database
|
||||||
self._remove_interval(stream_id, orig, dbint)
|
self._remove_interval(stream_id, orig, dbint)
|
||||||
|
|
||||||
# Remove data from the underlying table storage
|
# Remove data from the underlying table storage,
|
||||||
table.remove(row_start, row_end)
|
# coalescing adjacent removals to reduce the number of calls
|
||||||
|
# to table.remove.
|
||||||
|
if remove_end == row_start:
|
||||||
|
# Extend our coalesced region
|
||||||
|
remove_end = row_end
|
||||||
|
else:
|
||||||
|
# Perform previous removal, then save this one
|
||||||
|
if remove_end is not None:
|
||||||
|
table.remove(remove_start, remove_end)
|
||||||
|
remove_start = row_start
|
||||||
|
remove_end = row_end
|
||||||
|
|
||||||
# Count how many were removed
|
# Count how many were removed
|
||||||
removed += row_end - row_start
|
removed += row_end - row_start
|
||||||
|
remaining -= row_end - row_start
|
||||||
|
int_remaining -= 1
|
||||||
|
|
||||||
return removed
|
if restart is not None:
|
||||||
|
break
|
||||||
|
|
||||||
|
# Perform any final coalesced removal
|
||||||
|
if remove_end is not None:
|
||||||
|
table.remove(remove_start, remove_end)
|
||||||
|
|
||||||
|
return (removed, restart)
|
||||||
|
|||||||
@@ -1,143 +0,0 @@
|
|||||||
# Python implementation of the "rocket" data parsing interface.
|
|
||||||
# This interface translates between the binary format on disk
|
|
||||||
# and the ASCII format used when communicating with clients.
|
|
||||||
|
|
||||||
# This is slow! Use the C version instead.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import struct
|
|
||||||
import cStringIO
|
|
||||||
import itertools
|
|
||||||
from . import layout as _layout
|
|
||||||
import nilmdb.utils
|
|
||||||
from nilmdb.utils.time import float_time_to_string as ftts
|
|
||||||
|
|
||||||
ERR_UNKNOWN = 0
|
|
||||||
ERR_NON_MONOTONIC = 1
|
|
||||||
ERR_OUT_OF_INTERVAL = 2
|
|
||||||
class ParseError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@nilmdb.utils.must_close(wrap_verify = False)
|
|
||||||
class Rocket(object):
|
|
||||||
def __init__(self, layout, filename):
|
|
||||||
self.layout = layout
|
|
||||||
if filename:
|
|
||||||
self.file = open(filename, "a+b")
|
|
||||||
else:
|
|
||||||
self.file = None
|
|
||||||
|
|
||||||
# For packing/unpacking into a binary file.
|
|
||||||
# This will change in the C version
|
|
||||||
try:
|
|
||||||
(self.ltype, lcount) = layout.split('_', 2)
|
|
||||||
self.lcount = int(lcount)
|
|
||||||
except:
|
|
||||||
raise ValueError("no such layout: badly formatted string")
|
|
||||||
if self.lcount < 1:
|
|
||||||
raise ValueError("no such layout: bad count")
|
|
||||||
try:
|
|
||||||
struct_fmt = '<d' # Little endian, double timestamp
|
|
||||||
struct_mapping = {
|
|
||||||
"int8": 'b',
|
|
||||||
"uint8": 'B',
|
|
||||||
"int16": 'h',
|
|
||||||
"uint16": 'H',
|
|
||||||
"int32": 'i',
|
|
||||||
"uint32": 'I',
|
|
||||||
"int64": 'q',
|
|
||||||
"uint64": 'Q',
|
|
||||||
"float32": 'f',
|
|
||||||
"float64": 'd',
|
|
||||||
}
|
|
||||||
struct_fmt += struct_mapping[self.ltype] * self.lcount
|
|
||||||
except KeyError:
|
|
||||||
raise ValueError("no such layout: bad data type")
|
|
||||||
self.packer = struct.Struct(struct_fmt)
|
|
||||||
|
|
||||||
# For packing/unpacking from strings.
|
|
||||||
self.layoutparser = _layout.Layout(self.layout)
|
|
||||||
self.formatter = _layout.Formatter(self.layout)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
if self.file:
|
|
||||||
self.file.close()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def binary_size(self):
|
|
||||||
"""Return size of one row of data in the binary file, in bytes"""
|
|
||||||
return self.packer.size
|
|
||||||
|
|
||||||
def append_iter(self, maxrows, data):
|
|
||||||
"""Append the list data to the file"""
|
|
||||||
# We assume the file is opened in append mode,
|
|
||||||
# so all writes go to the end.
|
|
||||||
written = 0
|
|
||||||
for row in itertools.islice(data, maxrows):
|
|
||||||
self.file.write(self.packer.pack(*row))
|
|
||||||
written += 1
|
|
||||||
self.file.flush()
|
|
||||||
return written
|
|
||||||
|
|
||||||
def append_string(self, count, data, data_offset, linenum,
|
|
||||||
start, end, last_timestamp):
|
|
||||||
"""Parse string and append data.
|
|
||||||
|
|
||||||
count: maximum number of rows to add
|
|
||||||
data: string data
|
|
||||||
data_offset: byte offset into data to start parsing
|
|
||||||
linenum: current line number of data
|
|
||||||
start: starting timestamp for interval
|
|
||||||
end: end timestamp for interval
|
|
||||||
last_timestamp: last timestamp that was previously parsed
|
|
||||||
|
|
||||||
Raises ParseError if timestamps are non-monotonic, outside the
|
|
||||||
start/end interval, etc.
|
|
||||||
|
|
||||||
On success, return a tuple with three values:
|
|
||||||
added_rows: how many rows were added from the file
|
|
||||||
data_offset: current offset into the data string
|
|
||||||
last_timestamp: last timestamp we parsed
|
|
||||||
"""
|
|
||||||
# Parse the input data
|
|
||||||
indata = cStringIO.StringIO(data)
|
|
||||||
indata.seek(data_offset)
|
|
||||||
written = 0
|
|
||||||
while written < count:
|
|
||||||
line = indata.readline()
|
|
||||||
linenum += 1
|
|
||||||
if line == "":
|
|
||||||
break
|
|
||||||
comment = line.find('#')
|
|
||||||
if comment >= 0:
|
|
||||||
line = line.split('#', 1)[0]
|
|
||||||
line = line.strip()
|
|
||||||
if line == "":
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
(ts, row) = self.layoutparser.parse(line)
|
|
||||||
except ValueError as e:
|
|
||||||
raise ParseError(linenum, ERR_UNKNOWN, e)
|
|
||||||
if ts <= last_timestamp:
|
|
||||||
raise ParseError(linenum, ERR_NON_MONOTONIC, ts)
|
|
||||||
last_timestamp = ts
|
|
||||||
if ts < start or ts >= end:
|
|
||||||
raise ParseError(linenum, ERR_OUT_OF_INTERVAL, ts)
|
|
||||||
self.append_iter(1, [row])
|
|
||||||
written += 1
|
|
||||||
return (written, indata.tell(), last_timestamp, linenum)
|
|
||||||
|
|
||||||
def extract_list(self, offset, count):
|
|
||||||
"""Extract count rows of data from the file at offset offset.
|
|
||||||
Return a list of lists [[row],[row],...]"""
|
|
||||||
ret = []
|
|
||||||
self.file.seek(offset)
|
|
||||||
for i in xrange(count):
|
|
||||||
data = self.file.read(self.binary_size)
|
|
||||||
ret.append(list(self.packer.unpack(data)))
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def extract_string(self, offset, count):
|
|
||||||
"""Extract count rows of data from the file at offset offset.
|
|
||||||
Return an ascii formatted string according to the layout"""
|
|
||||||
return self.formatter.format(self.extract_list(offset, count))
|
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
|
# cython: language_level=2
|
||||||
|
|
||||||
cdef class RBNode:
|
cdef class RBNode:
|
||||||
cdef public object obj
|
cdef public object obj
|
||||||
cdef public double start, end
|
cdef public double start, end
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
# cython: profile=False
|
# cython: profile=False
|
||||||
# cython: cdivision=True
|
# cython: cdivision=True
|
||||||
|
# cython: language_level=2
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Jim Paris <jim@jtan.com>
|
Jim Paris <jim@jtan.com>
|
||||||
|
|||||||
@@ -2,8 +2,12 @@
|
|||||||
#include <structmember.h>
|
#include <structmember.h>
|
||||||
#include <endian.h>
|
#include <endian.h>
|
||||||
|
|
||||||
|
#include <ctype.h>
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
|
|
||||||
|
#define __STDC_FORMAT_MACROS
|
||||||
|
#include <inttypes.h>
|
||||||
|
|
||||||
/* Values missing from stdint.h */
|
/* Values missing from stdint.h */
|
||||||
#define UINT8_MIN 0
|
#define UINT8_MIN 0
|
||||||
#define UINT16_MIN 0
|
#define UINT16_MIN 0
|
||||||
@@ -16,9 +20,11 @@
|
|||||||
#define FLOAT64_MIN 0
|
#define FLOAT64_MIN 0
|
||||||
#define FLOAT64_MAX 0
|
#define FLOAT64_MAX 0
|
||||||
|
|
||||||
|
typedef int64_t timestamp_t;
|
||||||
|
|
||||||
/* Somewhat arbitrary, just so we can use fixed sizes for strings
|
/* Somewhat arbitrary, just so we can use fixed sizes for strings
|
||||||
etc. */
|
etc. */
|
||||||
static const int MAX_LAYOUT_COUNT = 64;
|
static const int MAX_LAYOUT_COUNT = 1024;
|
||||||
|
|
||||||
/* Error object and constants */
|
/* Error object and constants */
|
||||||
static PyObject *ParseError;
|
static PyObject *ParseError;
|
||||||
@@ -35,20 +41,20 @@ static void add_parseerror_codes(PyObject *module)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Helpers to raise ParseErrors. Use "return raise_str(...)" etc. */
|
/* Helpers to raise ParseErrors. Use "return raise_str(...)" etc. */
|
||||||
static PyObject *raise_str(int linenum, int code, const char *string)
|
static PyObject *raise_str(int line, int col, int code, const char *string)
|
||||||
{
|
{
|
||||||
PyObject *o;
|
PyObject *o;
|
||||||
o = Py_BuildValue("(iis)", linenum, code, string);
|
o = Py_BuildValue("(iiis)", line, col, code, string);
|
||||||
if (o != NULL) {
|
if (o != NULL) {
|
||||||
PyErr_SetObject(ParseError, o);
|
PyErr_SetObject(ParseError, o);
|
||||||
Py_DECREF(o);
|
Py_DECREF(o);
|
||||||
}
|
}
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
static PyObject *raise_num(int linenum, int code, double num)
|
static PyObject *raise_int(int line, int col, int code, int64_t num)
|
||||||
{
|
{
|
||||||
PyObject *o;
|
PyObject *o;
|
||||||
o = Py_BuildValue("(iid)", linenum, code, num);
|
o = Py_BuildValue("(iiiL)", line, col, code, (long long)num);
|
||||||
if (o != NULL) {
|
if (o != NULL) {
|
||||||
PyErr_SetObject(ParseError, o);
|
PyErr_SetObject(ParseError, o);
|
||||||
Py_DECREF(o);
|
Py_DECREF(o);
|
||||||
@@ -132,7 +138,7 @@ static void Rocket_dealloc(Rocket *self)
|
|||||||
fclose(self->file);
|
fclose(self->file);
|
||||||
self->file = NULL;
|
self->file = NULL;
|
||||||
}
|
}
|
||||||
self->ob_type->tp_free((PyObject *)self);
|
Py_TYPE(self)->tp_free((PyObject *)self);
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
@@ -154,13 +160,19 @@ static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
|||||||
static int Rocket_init(Rocket *self, PyObject *args, PyObject *kwds)
|
static int Rocket_init(Rocket *self, PyObject *args, PyObject *kwds)
|
||||||
{
|
{
|
||||||
const char *layout, *path;
|
const char *layout, *path;
|
||||||
|
int pathlen;
|
||||||
static char *kwlist[] = { "layout", "file", NULL };
|
static char *kwlist[] = { "layout", "file", NULL };
|
||||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "sz", kwlist,
|
if (!PyArg_ParseTupleAndKeywords(args, kwds, "sz#", kwlist,
|
||||||
&layout, &path))
|
&layout, &path, &pathlen))
|
||||||
return -1;
|
return -1;
|
||||||
if (!layout)
|
if (!layout)
|
||||||
return -1;
|
return -1;
|
||||||
if (path) {
|
if (path) {
|
||||||
|
if (strlen(path) != pathlen) {
|
||||||
|
PyErr_SetString(PyExc_ValueError, "path must not "
|
||||||
|
"contain NUL characters");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
if ((self->file = fopen(path, "a+b")) == NULL) {
|
if ((self->file = fopen(path, "a+b")) == NULL) {
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
PyErr_SetFromErrno(PyExc_OSError);
|
||||||
return -1;
|
return -1;
|
||||||
@@ -233,117 +245,17 @@ static PyObject *Rocket_get_file_size(Rocket *self)
|
|||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return PyInt_FromLong(self->file_size);
|
return PyLong_FromLong(self->file_size);
|
||||||
}
|
|
||||||
|
|
||||||
/****
|
|
||||||
* Append from iterator
|
|
||||||
*/
|
|
||||||
|
|
||||||
/* Helper for writing Python objects to the file */
|
|
||||||
static inline void append_pyobject(FILE *out, PyObject *val, layout_type_t type)
|
|
||||||
{
|
|
||||||
union8_t t8;
|
|
||||||
union16_t t16;
|
|
||||||
union32_t t32;
|
|
||||||
union64_t t64;
|
|
||||||
int ret = 0;
|
|
||||||
|
|
||||||
switch (type) {
|
|
||||||
#define CASE(type, pyconvert, pytype, disktype, htole, bytes) \
|
|
||||||
case LAYOUT_TYPE_##type: \
|
|
||||||
pytype = pyconvert(val); \
|
|
||||||
if (PyErr_Occurred()) \
|
|
||||||
return; \
|
|
||||||
disktype = htole(disktype); \
|
|
||||||
ret = fwrite(&disktype, bytes, 1, out); \
|
|
||||||
break
|
|
||||||
CASE(INT8, PyInt_AsLong, t8.i, t8.u, , 1);
|
|
||||||
CASE(UINT8, PyInt_AsLong, t8.u, t8.u, , 1);
|
|
||||||
CASE(INT16, PyInt_AsLong, t16.i, t16.u, htole16, 2);
|
|
||||||
CASE(UINT16, PyInt_AsLong, t16.u, t16.u, htole16, 2);
|
|
||||||
CASE(INT32, PyInt_AsLong, t32.i, t32.u, htole32, 4);
|
|
||||||
CASE(UINT32, PyInt_AsLong, t32.u, t32.u, htole32, 4);
|
|
||||||
CASE(INT64, PyInt_AsLong, t64.i, t64.u, htole64, 8);
|
|
||||||
CASE(UINT64, PyInt_AsLong, t64.u, t64.u, htole64, 8);
|
|
||||||
CASE(FLOAT32, PyFloat_AsDouble, t32.f, t32.u, htole32, 4);
|
|
||||||
CASE(FLOAT64, PyFloat_AsDouble, t64.d, t64.u, htole64, 8);
|
|
||||||
#undef CASE
|
|
||||||
default:
|
|
||||||
PyErr_SetString(PyExc_TypeError, "unknown type");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (ret <= 0) {
|
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/* .append_iter(maxrows, dataiter) */
|
|
||||||
static PyObject *Rocket_append_iter(Rocket *self, PyObject *args)
|
|
||||||
{
|
|
||||||
int maxrows;
|
|
||||||
PyObject *iter;
|
|
||||||
PyObject *rowlist;
|
|
||||||
if (!PyArg_ParseTuple(args, "iO:append_iter", &maxrows, &iter))
|
|
||||||
return NULL;
|
|
||||||
if (!PyIter_Check(iter)) {
|
|
||||||
PyErr_SetString(PyExc_TypeError, "need an iterable");
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
if (!self->file) {
|
|
||||||
PyErr_SetString(PyExc_Exception, "no file");
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Mark file size so that it will get updated next time it's read */
|
|
||||||
self->file_size = -1;
|
|
||||||
|
|
||||||
int row;
|
|
||||||
for (row = 0; row < maxrows; row++) {
|
|
||||||
rowlist = PyIter_Next(iter);
|
|
||||||
if (!rowlist)
|
|
||||||
break;
|
|
||||||
if (!PyList_Check(rowlist)) {
|
|
||||||
PyErr_SetString(PyExc_TypeError, "rows must be lists");
|
|
||||||
goto row_err;
|
|
||||||
}
|
|
||||||
if (PyList_Size(rowlist) != self->layout_count + 1) {
|
|
||||||
PyErr_SetString(PyExc_TypeError, "short row");
|
|
||||||
goto row_err;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Extract and write timestamp */
|
|
||||||
append_pyobject(self->file, PyList_GetItem(rowlist, 0),
|
|
||||||
LAYOUT_TYPE_FLOAT64);
|
|
||||||
if (PyErr_Occurred())
|
|
||||||
goto row_err;
|
|
||||||
|
|
||||||
/* Extract and write values */
|
|
||||||
int i;
|
|
||||||
for (i = 0; i < self->layout_count; i++) {
|
|
||||||
append_pyobject(self->file,
|
|
||||||
PyList_GetItem(rowlist, i+1),
|
|
||||||
self->layout_type);
|
|
||||||
if (PyErr_Occurred())
|
|
||||||
goto row_err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fflush(self->file);
|
|
||||||
/* All done */
|
|
||||||
return PyLong_FromLong(row);
|
|
||||||
row_err:
|
|
||||||
fflush(self->file);
|
|
||||||
Py_DECREF(rowlist);
|
|
||||||
return NULL;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/****
|
/****
|
||||||
* Append from string
|
* Append from string
|
||||||
*/
|
*/
|
||||||
static inline long int strtol10(const char *nptr, char **endptr) {
|
static inline long int strtoll10(const char *nptr, char **endptr) {
|
||||||
return strtol(nptr, endptr, 10);
|
return strtoll(nptr, endptr, 10);
|
||||||
}
|
}
|
||||||
static inline long int strtoul10(const char *nptr, char **endptr) {
|
static inline long int strtoull10(const char *nptr, char **endptr) {
|
||||||
return strtoul(nptr, endptr, 10);
|
return strtoull(nptr, endptr, 10);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* .append_string(count, data, offset, linenum, start, end, last_timestamp) */
|
/* .append_string(count, data, offset, linenum, start, end, last_timestamp) */
|
||||||
@@ -352,10 +264,12 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
|
|||||||
int count;
|
int count;
|
||||||
const char *data;
|
const char *data;
|
||||||
int offset;
|
int offset;
|
||||||
|
const char *linestart;
|
||||||
int linenum;
|
int linenum;
|
||||||
double start;
|
long long ll1, ll2, ll3;
|
||||||
double end;
|
timestamp_t start;
|
||||||
double last_timestamp;
|
timestamp_t end;
|
||||||
|
timestamp_t last_timestamp;
|
||||||
|
|
||||||
int written = 0;
|
int written = 0;
|
||||||
char *endptr;
|
char *endptr;
|
||||||
@@ -365,23 +279,32 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
|
|||||||
union64_t t64;
|
union64_t t64;
|
||||||
int i;
|
int i;
|
||||||
|
|
||||||
/* It would be nice to use 't#' instead of 's' for data,
|
/* Input data is bytes. Using 'y#' instead of 'y' might be
|
||||||
but we need the null termination for strto*. If we had
|
preferable, but strto* requires the null terminator. */
|
||||||
strnto* that took a length, we could use t# and not require
|
if (!PyArg_ParseTuple(args, "iyiiLLL:append_string", &count,
|
||||||
a copy. */
|
|
||||||
if (!PyArg_ParseTuple(args, "isiiddd:append_string", &count,
|
|
||||||
&data, &offset, &linenum,
|
&data, &offset, &linenum,
|
||||||
&start, &end, &last_timestamp))
|
&ll1, &ll2, &ll3))
|
||||||
return NULL;
|
return NULL;
|
||||||
|
start = ll1;
|
||||||
|
end = ll2;
|
||||||
|
last_timestamp = ll3;
|
||||||
|
|
||||||
|
/* Skip spaces, but don't skip over a newline. */
|
||||||
|
#define SKIP_BLANK(buf) do { \
|
||||||
|
while (isspace(*buf)) { \
|
||||||
|
if (*buf == '\n') \
|
||||||
|
break; \
|
||||||
|
buf++; \
|
||||||
|
} } while(0)
|
||||||
|
|
||||||
const char *buf = &data[offset];
|
const char *buf = &data[offset];
|
||||||
while (written < count && *buf)
|
while (written < count && *buf)
|
||||||
{
|
{
|
||||||
|
linestart = buf;
|
||||||
linenum++;
|
linenum++;
|
||||||
|
|
||||||
/* Skip leading whitespace and commented lines */
|
/* Skip leading whitespace and commented lines */
|
||||||
while (*buf == ' ' || *buf == '\t')
|
SKIP_BLANK(buf);
|
||||||
buf++;
|
|
||||||
if (*buf == '#') {
|
if (*buf == '#') {
|
||||||
while (*buf && *buf != '\n')
|
while (*buf && *buf != '\n')
|
||||||
buf++;
|
buf++;
|
||||||
@@ -391,14 +314,23 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Extract timestamp */
|
/* Extract timestamp */
|
||||||
t64.d = strtod(buf, &endptr);
|
t64.i = strtoll(buf, &endptr, 10);
|
||||||
if (endptr == buf)
|
if (endptr == buf || !isspace(*endptr)) {
|
||||||
return raise_str(linenum, ERR_OTHER, "bad timestamp");
|
/* Try parsing as a double instead */
|
||||||
if (t64.d <= last_timestamp)
|
t64.d = strtod(buf, &endptr);
|
||||||
return raise_num(linenum, ERR_NON_MONOTONIC, t64.d);
|
if (endptr == buf)
|
||||||
last_timestamp = t64.d;
|
goto bad_timestamp;
|
||||||
if (t64.d < start || t64.d >= end)
|
if (!isspace(*endptr))
|
||||||
return raise_num(linenum, ERR_OUT_OF_INTERVAL, t64.d);
|
goto cant_parse_value;
|
||||||
|
t64.i = round(t64.d);
|
||||||
|
}
|
||||||
|
if (t64.i <= last_timestamp)
|
||||||
|
return raise_int(linenum, buf - linestart + 1,
|
||||||
|
ERR_NON_MONOTONIC, t64.i);
|
||||||
|
last_timestamp = t64.i;
|
||||||
|
if (t64.i < start || t64.i >= end)
|
||||||
|
return raise_int(linenum, buf - linestart + 1,
|
||||||
|
ERR_OUT_OF_INTERVAL, t64.i);
|
||||||
t64.u = le64toh(t64.u);
|
t64.u = le64toh(t64.u);
|
||||||
if (fwrite(&t64.u, 8, 1, self->file) != 1)
|
if (fwrite(&t64.u, 8, 1, self->file) != 1)
|
||||||
goto err;
|
goto err;
|
||||||
@@ -410,23 +342,31 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
|
|||||||
case LAYOUT_TYPE_##type: \
|
case LAYOUT_TYPE_##type: \
|
||||||
/* parse and write in a loop */ \
|
/* parse and write in a loop */ \
|
||||||
for (i = 0; i < self->layout_count; i++) { \
|
for (i = 0; i < self->layout_count; i++) { \
|
||||||
parsetype = parsefunc(buf, &endptr); \
|
/* skip non-newlines */ \
|
||||||
if (endptr == buf) \
|
SKIP_BLANK(buf); \
|
||||||
|
if (*buf == '\n') \
|
||||||
goto wrong_number_of_values; \
|
goto wrong_number_of_values; \
|
||||||
|
/* parse number */ \
|
||||||
|
parsetype = parsefunc(buf, &endptr); \
|
||||||
|
if (*endptr && !isspace(*endptr)) \
|
||||||
|
goto cant_parse_value; \
|
||||||
|
/* check limits */ \
|
||||||
if (type##_MIN != type##_MAX && \
|
if (type##_MIN != type##_MAX && \
|
||||||
(parsetype < type##_MIN || \
|
(parsetype < type##_MIN || \
|
||||||
parsetype > type##_MAX)) \
|
parsetype > type##_MAX)) \
|
||||||
goto value_out_of_range; \
|
goto value_out_of_range; \
|
||||||
|
/* convert to disk representation */ \
|
||||||
realtype = parsetype; \
|
realtype = parsetype; \
|
||||||
disktype = letoh(disktype); \
|
disktype = letoh(disktype); \
|
||||||
|
/* write it */ \
|
||||||
if (fwrite(&disktype, bytes, \
|
if (fwrite(&disktype, bytes, \
|
||||||
1, self->file) != 1) \
|
1, self->file) != 1) \
|
||||||
goto err; \
|
goto err; \
|
||||||
|
/* advance buf */ \
|
||||||
buf = endptr; \
|
buf = endptr; \
|
||||||
} \
|
} \
|
||||||
/* Skip trailing whitespace and comments */ \
|
/* Skip trailing whitespace and comments */ \
|
||||||
while (*buf == ' ' || *buf == '\t') \
|
SKIP_BLANK(buf); \
|
||||||
buf++; \
|
|
||||||
if (*buf == '#') \
|
if (*buf == '#') \
|
||||||
while (*buf && *buf != '\n') \
|
while (*buf && *buf != '\n') \
|
||||||
buf++; \
|
buf++; \
|
||||||
@@ -436,14 +376,14 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
|
|||||||
goto extra_data_on_line; \
|
goto extra_data_on_line; \
|
||||||
break
|
break
|
||||||
|
|
||||||
CS(INT8, strtol10, t64.i, t8.i, t8.u, , 1);
|
CS(INT8, strtoll10, t64.i, t8.i, t8.u, , 1);
|
||||||
CS(UINT8, strtoul10, t64.u, t8.u, t8.u, , 1);
|
CS(UINT8, strtoull10, t64.u, t8.u, t8.u, , 1);
|
||||||
CS(INT16, strtol10, t64.i, t16.i, t16.u, le16toh, 2);
|
CS(INT16, strtoll10, t64.i, t16.i, t16.u, le16toh, 2);
|
||||||
CS(UINT16, strtoul10, t64.u, t16.u, t16.u, le16toh, 2);
|
CS(UINT16, strtoull10, t64.u, t16.u, t16.u, le16toh, 2);
|
||||||
CS(INT32, strtol10, t64.i, t32.i, t32.u, le32toh, 4);
|
CS(INT32, strtoll10, t64.i, t32.i, t32.u, le32toh, 4);
|
||||||
CS(UINT32, strtoul10, t64.u, t32.u, t32.u, le32toh, 4);
|
CS(UINT32, strtoull10, t64.u, t32.u, t32.u, le32toh, 4);
|
||||||
CS(INT64, strtol10, t64.i, t64.i, t64.u, le64toh, 8);
|
CS(INT64, strtoll10, t64.i, t64.i, t64.u, le64toh, 8);
|
||||||
CS(UINT64, strtoul10, t64.u, t64.u, t64.u, le64toh, 8);
|
CS(UINT64, strtoull10, t64.u, t64.u, t64.u, le64toh, 8);
|
||||||
CS(FLOAT32, strtod, t64.d, t32.f, t32.u, le32toh, 4);
|
CS(FLOAT32, strtod, t64.d, t32.f, t32.u, le32toh, 4);
|
||||||
CS(FLOAT64, strtod, t64.d, t64.d, t64.u, le64toh, 8);
|
CS(FLOAT64, strtod, t64.d, t64.d, t64.u, le64toh, 8);
|
||||||
#undef CS
|
#undef CS
|
||||||
@@ -458,141 +398,118 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
|
|||||||
|
|
||||||
fflush(self->file);
|
fflush(self->file);
|
||||||
|
|
||||||
/* Build return value and return*/
|
/* Build return value and return */
|
||||||
offset = buf - data;
|
offset = buf - data;
|
||||||
PyObject *o;
|
PyObject *o;
|
||||||
o = Py_BuildValue("(iidi)", written, offset, last_timestamp, linenum);
|
o = Py_BuildValue("(iiLi)", written, offset,
|
||||||
|
(long long)last_timestamp, linenum);
|
||||||
return o;
|
return o;
|
||||||
err:
|
err:
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
PyErr_SetFromErrno(PyExc_OSError);
|
||||||
return NULL;
|
return NULL;
|
||||||
|
bad_timestamp:
|
||||||
|
return raise_str(linenum, buf - linestart + 1,
|
||||||
|
ERR_OTHER, "bad timestamp");
|
||||||
|
cant_parse_value:
|
||||||
|
return raise_str(linenum, buf - linestart + 1,
|
||||||
|
ERR_OTHER, "can't parse value");
|
||||||
wrong_number_of_values:
|
wrong_number_of_values:
|
||||||
return raise_str(linenum, ERR_OTHER, "wrong number of values");
|
return raise_str(linenum, buf - linestart + 1,
|
||||||
|
ERR_OTHER, "wrong number of values");
|
||||||
value_out_of_range:
|
value_out_of_range:
|
||||||
return raise_str(linenum, ERR_OTHER, "value out of range");
|
return raise_str(linenum, buf - linestart + 1,
|
||||||
|
ERR_OTHER, "value out of range");
|
||||||
extra_data_on_line:
|
extra_data_on_line:
|
||||||
return raise_str(linenum, ERR_OTHER, "extra data on line");
|
return raise_str(linenum, buf - linestart + 1,
|
||||||
|
ERR_OTHER, "extra data on line");
|
||||||
}
|
}
|
||||||
|
|
||||||
/****
|
/****
|
||||||
* Extract to Python list
|
* Append from binary data
|
||||||
*/
|
*/
|
||||||
|
|
||||||
static int _extract_handle_params(Rocket *self, PyObject *args, long *count)
|
/* .append_binary(count, data, offset, linenum, start, end, last_timestamp) */
|
||||||
|
static PyObject *Rocket_append_binary(Rocket *self, PyObject *args)
|
||||||
{
|
{
|
||||||
long offset;
|
int count;
|
||||||
if (!PyArg_ParseTuple(args, "ll", &offset, count))
|
const uint8_t *data;
|
||||||
return -1;
|
int data_len;
|
||||||
if (!self->file) {
|
int linenum;
|
||||||
PyErr_SetString(PyExc_Exception, "no file");
|
int offset;
|
||||||
return -1;
|
long long ll1, ll2, ll3;
|
||||||
}
|
timestamp_t start;
|
||||||
/* Seek to target location */
|
timestamp_t end;
|
||||||
if (fseek(self->file, offset, SEEK_SET) < 0) {
|
timestamp_t last_timestamp;
|
||||||
PyErr_SetFromErrno(PyExc_OSError);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Helper for extracting data from a file as a Python object */
|
if (!PyArg_ParseTuple(args, "iy#iiLLL:append_binary",
|
||||||
static inline void *extract_pyobject(FILE *in, layout_type_t type)
|
&count, &data, &data_len, &offset,
|
||||||
{
|
&linenum, &ll1, &ll2, &ll3))
|
||||||
union8_t t8;
|
|
||||||
union16_t t16;
|
|
||||||
union32_t t32;
|
|
||||||
union64_t t64;
|
|
||||||
|
|
||||||
switch (type) {
|
|
||||||
#define CASE(type, pyconvert, pytype, disktype, letoh, bytes) \
|
|
||||||
case LAYOUT_TYPE_##type: \
|
|
||||||
if (fread(&disktype, bytes, 1, in) <= 0) \
|
|
||||||
break; \
|
|
||||||
disktype = letoh(disktype); \
|
|
||||||
return pyconvert(pytype); \
|
|
||||||
break
|
|
||||||
CASE(INT8, PyInt_FromLong, t8.i, t8.u, , 1);
|
|
||||||
CASE(UINT8, PyInt_FromLong, t8.u, t8.u, , 1);
|
|
||||||
CASE(INT16, PyInt_FromLong, t16.i, t16.u, le16toh, 2);
|
|
||||||
CASE(UINT16, PyInt_FromLong, t16.u, t16.u, le16toh, 2);
|
|
||||||
CASE(INT32, PyInt_FromLong, t32.i, t32.u, le32toh, 4);
|
|
||||||
CASE(UINT32, PyInt_FromLong, t32.u, t32.u, le32toh, 4);
|
|
||||||
CASE(INT64, PyInt_FromLong, t64.i, t64.u, le64toh, 8);
|
|
||||||
CASE(UINT64, PyInt_FromLong, t64.u, t64.u, le64toh, 8);
|
|
||||||
CASE(FLOAT32, PyFloat_FromDouble, t32.f, t32.u, le32toh, 4);
|
|
||||||
CASE(FLOAT64, PyFloat_FromDouble, t64.d, t64.u, le64toh, 8);
|
|
||||||
#undef CASE
|
|
||||||
default:
|
|
||||||
PyErr_SetString(PyExc_TypeError, "unknown type");
|
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
start = ll1;
|
||||||
PyErr_SetString(PyExc_OSError, "failed to read from file");
|
end = ll2;
|
||||||
return NULL;
|
last_timestamp = ll3;
|
||||||
}
|
|
||||||
|
|
||||||
static PyObject *Rocket_extract_list(Rocket *self, PyObject *args)
|
/* Advance to offset */
|
||||||
{
|
if (offset > data_len)
|
||||||
long count;
|
return raise_str(0, 0, ERR_OTHER, "bad offset");
|
||||||
if (_extract_handle_params(self, args, &count) < 0)
|
data += offset;
|
||||||
return NULL;
|
data_len -= offset;
|
||||||
|
|
||||||
/* Make a list to return */
|
/* Figure out max number of rows to insert */
|
||||||
PyObject *retlist = PyList_New(0);
|
int rows = data_len / self->binary_size;
|
||||||
if (!retlist)
|
if (rows > count)
|
||||||
return NULL;
|
rows = count;
|
||||||
|
|
||||||
/* Read data into new Python lists */
|
/* Check timestamps */
|
||||||
int row;
|
timestamp_t ts;
|
||||||
for (row = 0; row < count; row++)
|
int i;
|
||||||
{
|
for (i = 0; i < rows; i++) {
|
||||||
PyObject *rowlist = PyList_New(self->layout_count + 1);
|
/* Read raw timestamp, byteswap if needed */
|
||||||
if (!rowlist) {
|
memcpy(&ts, &data[i * self->binary_size], 8);
|
||||||
Py_DECREF(retlist);
|
ts = le64toh(ts);
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Timestamp */
|
/* Check limits */
|
||||||
PyObject *entry = extract_pyobject(self->file,
|
if (ts <= last_timestamp)
|
||||||
LAYOUT_TYPE_FLOAT64);
|
return raise_int(i, 0, ERR_NON_MONOTONIC, ts);
|
||||||
if (!entry || (PyList_SetItem(rowlist, 0, entry) < 0)) {
|
last_timestamp = ts;
|
||||||
Py_DECREF(rowlist);
|
if (ts < start || ts >= end)
|
||||||
Py_DECREF(retlist);
|
return raise_int(i, 0, ERR_OUT_OF_INTERVAL, ts);
|
||||||
return NULL;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/* Data */
|
/* Write binary data */
|
||||||
int i;
|
if (fwrite(data, self->binary_size, rows, self->file) != rows) {
|
||||||
for (i = 0; i < self->layout_count; i++) {
|
PyErr_SetFromErrno(PyExc_OSError);
|
||||||
PyObject *ent = extract_pyobject(self->file,
|
return NULL;
|
||||||
self->layout_type);
|
}
|
||||||
if (!ent || (PyList_SetItem(rowlist, i+1, ent) < 0)) {
|
fflush(self->file);
|
||||||
Py_DECREF(rowlist);
|
|
||||||
Py_DECREF(retlist);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Add row to return value */
|
/* Build return value and return */
|
||||||
if (PyList_Append(retlist, rowlist) < 0) {
|
PyObject *o;
|
||||||
Py_DECREF(rowlist);
|
o = Py_BuildValue("(iiLi)", rows, offset + rows * self->binary_size,
|
||||||
Py_DECREF(retlist);
|
(long long)last_timestamp, linenum);
|
||||||
return NULL;
|
return o;
|
||||||
}
|
|
||||||
|
|
||||||
Py_DECREF(rowlist);
|
|
||||||
}
|
|
||||||
return retlist;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/****
|
/****
|
||||||
* Extract to string
|
* Extract to binary bytes object containing ASCII text-formatted data
|
||||||
*/
|
*/
|
||||||
|
|
||||||
static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
|
static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
|
||||||
{
|
{
|
||||||
long count;
|
long count;
|
||||||
if (_extract_handle_params(self, args, &count) < 0)
|
long offset;
|
||||||
|
|
||||||
|
if (!PyArg_ParseTuple(args, "ll", &offset, &count))
|
||||||
return NULL;
|
return NULL;
|
||||||
|
if (!self->file) {
|
||||||
|
PyErr_SetString(PyExc_Exception, "no file");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
/* Seek to target location */
|
||||||
|
if (fseek(self->file, offset, SEEK_SET) < 0) {
|
||||||
|
PyErr_SetFromErrno(PyExc_OSError);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
char *str = NULL, *new;
|
char *str = NULL, *new;
|
||||||
long len_alloc = 0;
|
long len_alloc = 0;
|
||||||
@@ -626,8 +543,7 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
|
|||||||
if (fread(&t64.u, 8, 1, self->file) != 1)
|
if (fread(&t64.u, 8, 1, self->file) != 1)
|
||||||
goto err;
|
goto err;
|
||||||
t64.u = le64toh(t64.u);
|
t64.u = le64toh(t64.u);
|
||||||
/* Timestamps are always printed to the microsecond */
|
ret = sprintf(&str[len], "%" PRId64, t64.i);
|
||||||
ret = sprintf(&str[len], "%.6f", t64.d);
|
|
||||||
if (ret <= 0)
|
if (ret <= 0)
|
||||||
goto err;
|
goto err;
|
||||||
len += ret;
|
len += ret;
|
||||||
@@ -639,7 +555,7 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
|
|||||||
/* read and format in a loop */ \
|
/* read and format in a loop */ \
|
||||||
for (i = 0; i < self->layout_count; i++) { \
|
for (i = 0; i < self->layout_count; i++) { \
|
||||||
if (fread(&disktype, bytes, \
|
if (fread(&disktype, bytes, \
|
||||||
1, self->file) < 0) \
|
1, self->file) != 1) \
|
||||||
goto err; \
|
goto err; \
|
||||||
disktype = letoh(disktype); \
|
disktype = letoh(disktype); \
|
||||||
ret = sprintf(&str[len], " " fmt, \
|
ret = sprintf(&str[len], " " fmt, \
|
||||||
@@ -649,14 +565,14 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
|
|||||||
len += ret; \
|
len += ret; \
|
||||||
} \
|
} \
|
||||||
break
|
break
|
||||||
CASE(INT8, "%hhd", t8.i, t8.u, , 1);
|
CASE(INT8, "%" PRId8, t8.i, t8.u, , 1);
|
||||||
CASE(UINT8, "%hhu", t8.u, t8.u, , 1);
|
CASE(UINT8, "%" PRIu8, t8.u, t8.u, , 1);
|
||||||
CASE(INT16, "%hd", t16.i, t16.u, le16toh, 2);
|
CASE(INT16, "%" PRId16, t16.i, t16.u, le16toh, 2);
|
||||||
CASE(UINT16, "%hu", t16.u, t16.u, le16toh, 2);
|
CASE(UINT16, "%" PRIu16, t16.u, t16.u, le16toh, 2);
|
||||||
CASE(INT32, "%d", t32.i, t32.u, le32toh, 4);
|
CASE(INT32, "%" PRId32, t32.i, t32.u, le32toh, 4);
|
||||||
CASE(UINT32, "%u", t32.u, t32.u, le32toh, 4);
|
CASE(UINT32, "%" PRIu32, t32.u, t32.u, le32toh, 4);
|
||||||
CASE(INT64, "%ld", t64.i, t64.u, le64toh, 8);
|
CASE(INT64, "%" PRId64, t64.i, t64.u, le64toh, 8);
|
||||||
CASE(UINT64, "%lu", t64.u, t64.u, le64toh, 8);
|
CASE(UINT64, "%" PRIu64, t64.u, t64.u, le64toh, 8);
|
||||||
/* These next two are a bit debatable. floats
|
/* These next two are a bit debatable. floats
|
||||||
are 6-9 significant figures, so we print 7.
|
are 6-9 significant figures, so we print 7.
|
||||||
Doubles are 15-19, so we print 17. This is
|
Doubles are 15-19, so we print 17. This is
|
||||||
@@ -673,7 +589,7 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
|
|||||||
str[len++] = '\n';
|
str[len++] = '\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
PyObject *pystr = PyString_FromStringAndSize(str, len);
|
PyObject *pystr = PyBytes_FromStringAndSize(str, len);
|
||||||
free(str);
|
free(str);
|
||||||
return pystr;
|
return pystr;
|
||||||
err:
|
err:
|
||||||
@@ -682,6 +598,73 @@ err:
|
|||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/****
|
||||||
|
* Extract to binary bytes object containing raw little-endian binary data
|
||||||
|
*/
|
||||||
|
static PyObject *Rocket_extract_binary(Rocket *self, PyObject *args)
|
||||||
|
{
|
||||||
|
long count;
|
||||||
|
long offset;
|
||||||
|
|
||||||
|
if (!PyArg_ParseTuple(args, "ll", &offset, &count))
|
||||||
|
return NULL;
|
||||||
|
if (!self->file) {
|
||||||
|
PyErr_SetString(PyExc_Exception, "no file");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
/* Seek to target location */
|
||||||
|
if (fseek(self->file, offset, SEEK_SET) < 0) {
|
||||||
|
PyErr_SetFromErrno(PyExc_OSError);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t *str;
|
||||||
|
int len = count * self->binary_size;
|
||||||
|
str = malloc(len);
|
||||||
|
if (str == NULL) {
|
||||||
|
PyErr_SetFromErrno(PyExc_OSError);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Data in the file is already in the desired little-endian
|
||||||
|
binary format, so just read it directly. */
|
||||||
|
if (fread(str, self->binary_size, count, self->file) != count) {
|
||||||
|
free(str);
|
||||||
|
PyErr_SetFromErrno(PyExc_OSError);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
PyObject *pystr = PyBytes_FromStringAndSize((char *)str, len);
|
||||||
|
free(str);
|
||||||
|
return pystr;
|
||||||
|
}
|
||||||
|
|
||||||
|
/****
|
||||||
|
* Extract timestamp
|
||||||
|
*/
|
||||||
|
static PyObject *Rocket_extract_timestamp(Rocket *self, PyObject *args)
|
||||||
|
{
|
||||||
|
long offset;
|
||||||
|
union64_t t64;
|
||||||
|
if (!PyArg_ParseTuple(args, "l", &offset))
|
||||||
|
return NULL;
|
||||||
|
if (!self->file) {
|
||||||
|
PyErr_SetString(PyExc_Exception, "no file");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Seek to target location and read timestamp */
|
||||||
|
if ((fseek(self->file, offset, SEEK_SET) < 0) ||
|
||||||
|
(fread(&t64.u, 8, 1, self->file) != 1)) {
|
||||||
|
PyErr_SetFromErrno(PyExc_OSError);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Convert and return */
|
||||||
|
t64.u = le64toh(t64.u);
|
||||||
|
return Py_BuildValue("L", (long long)t64.i);
|
||||||
|
}
|
||||||
|
|
||||||
/****
|
/****
|
||||||
* Module and type setup
|
* Module and type setup
|
||||||
*/
|
*/
|
||||||
@@ -699,15 +682,13 @@ static PyMemberDef Rocket_members[] = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
static PyMethodDef Rocket_methods[] = {
|
static PyMethodDef Rocket_methods[] = {
|
||||||
{ "close", (PyCFunction)Rocket_close, METH_NOARGS,
|
{ "close",
|
||||||
|
(PyCFunction)Rocket_close, METH_NOARGS,
|
||||||
"close(self)\n\n"
|
"close(self)\n\n"
|
||||||
"Close file handle" },
|
"Close file handle" },
|
||||||
|
|
||||||
{ "append_iter", (PyCFunction)Rocket_append_iter, METH_VARARGS,
|
{ "append_string",
|
||||||
"append_iter(self, maxrows, iterable)\n\n"
|
(PyCFunction)Rocket_append_string, METH_VARARGS,
|
||||||
"Append up to maxrows of data from iter to the file" },
|
|
||||||
|
|
||||||
{ "append_string", (PyCFunction)Rocket_append_string, METH_VARARGS,
|
|
||||||
"append_string(self, count, data, offset, line, start, end, ts)\n\n"
|
"append_string(self, count, data, offset, line, start, end, ts)\n\n"
|
||||||
"Parse string and append data.\n"
|
"Parse string and append data.\n"
|
||||||
"\n"
|
"\n"
|
||||||
@@ -722,26 +703,56 @@ static PyMethodDef Rocket_methods[] = {
|
|||||||
"Raises ParseError if timestamps are non-monotonic, outside\n"
|
"Raises ParseError if timestamps are non-monotonic, outside\n"
|
||||||
"the start/end interval etc.\n"
|
"the start/end interval etc.\n"
|
||||||
"\n"
|
"\n"
|
||||||
"On success, return a tuple with three values:\n"
|
"On success, return a tuple:\n"
|
||||||
" added_rows: how many rows were added from the file\n"
|
" added_rows: how many rows were added from the file\n"
|
||||||
" data_offset: current offset into the data string\n"
|
" data_offset: current offset into the data string\n"
|
||||||
" last_timestamp: last timestamp we parsed" },
|
" last_timestamp: last timestamp we parsed\n"
|
||||||
|
" linenum: current line number" },
|
||||||
|
|
||||||
{ "extract_list", (PyCFunction)Rocket_extract_list, METH_VARARGS,
|
{ "append_binary",
|
||||||
"extract_list(self, offset, count)\n\n"
|
(PyCFunction)Rocket_append_binary, METH_VARARGS,
|
||||||
"Extract count rows of data from the file at offset offset.\n"
|
"append_binary(self, count, data, offset, line, start, end, ts)\n\n"
|
||||||
"Return a list of lists [[row],[row],...]" },
|
"Append binary data, which must match the data layout.\n"
|
||||||
|
"\n"
|
||||||
|
" count: maximum number of rows to add\n"
|
||||||
|
" data: binary data\n"
|
||||||
|
" offset: byte offset into data to start adding\n"
|
||||||
|
" line: current line number (unused)\n"
|
||||||
|
" start: starting timestamp for interval\n"
|
||||||
|
" end: end timestamp for interval\n"
|
||||||
|
" ts: last timestamp that was previously parsed\n"
|
||||||
|
"\n"
|
||||||
|
"Raises ParseError if timestamps are non-monotonic, outside\n"
|
||||||
|
"the start/end interval etc.\n"
|
||||||
|
"\n"
|
||||||
|
"On success, return a tuple:\n"
|
||||||
|
" added_rows: how many rows were added from the file\n"
|
||||||
|
" data_offset: current offset into the data string\n"
|
||||||
|
" last_timestamp: last timestamp we parsed\n"
|
||||||
|
" linenum: current line number (copied from argument)" },
|
||||||
|
|
||||||
{ "extract_string", (PyCFunction)Rocket_extract_string, METH_VARARGS,
|
{ "extract_string",
|
||||||
|
(PyCFunction)Rocket_extract_string, METH_VARARGS,
|
||||||
"extract_string(self, offset, count)\n\n"
|
"extract_string(self, offset, count)\n\n"
|
||||||
"Extract count rows of data from the file at offset offset.\n"
|
"Extract count rows of data from the file at offset offset.\n"
|
||||||
"Return an ascii formatted string according to the layout" },
|
"Return an ascii formatted string according to the layout" },
|
||||||
|
|
||||||
|
{ "extract_binary",
|
||||||
|
(PyCFunction)Rocket_extract_binary, METH_VARARGS,
|
||||||
|
"extract_binary(self, offset, count)\n\n"
|
||||||
|
"Extract count rows of data from the file at offset offset.\n"
|
||||||
|
"Return a raw binary string of data matching the data layout." },
|
||||||
|
|
||||||
|
{ "extract_timestamp",
|
||||||
|
(PyCFunction)Rocket_extract_timestamp, METH_VARARGS,
|
||||||
|
"extract_timestamp(self, offset)\n\n"
|
||||||
|
"Extract a single timestamp from the file" },
|
||||||
|
|
||||||
{ NULL },
|
{ NULL },
|
||||||
};
|
};
|
||||||
|
|
||||||
static PyTypeObject RocketType = {
|
static PyTypeObject RocketType = {
|
||||||
PyObject_HEAD_INIT(NULL)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
|
|
||||||
.tp_name = "rocket.Rocket",
|
.tp_name = "rocket.Rocket",
|
||||||
.tp_basicsize = sizeof(Rocket),
|
.tp_basicsize = sizeof(Rocket),
|
||||||
@@ -766,17 +777,23 @@ static PyMethodDef module_methods[] = {
|
|||||||
{ NULL },
|
{ NULL },
|
||||||
};
|
};
|
||||||
|
|
||||||
PyMODINIT_FUNC
|
static struct PyModuleDef moduledef = {
|
||||||
initrocket(void)
|
PyModuleDef_HEAD_INIT,
|
||||||
|
.m_name = "rocker",
|
||||||
|
.m_doc = "Rocket data parsing and formatting module",
|
||||||
|
.m_size = -1,
|
||||||
|
.m_methods = module_methods,
|
||||||
|
};
|
||||||
|
|
||||||
|
PyMODINIT_FUNC PyInit_rocket(void)
|
||||||
{
|
{
|
||||||
PyObject *module;
|
PyObject *module;
|
||||||
|
|
||||||
RocketType.tp_new = PyType_GenericNew;
|
RocketType.tp_new = PyType_GenericNew;
|
||||||
if (PyType_Ready(&RocketType) < 0)
|
if (PyType_Ready(&RocketType) < 0)
|
||||||
return;
|
return NULL;
|
||||||
|
|
||||||
module = Py_InitModule3("rocket", module_methods,
|
module = PyModule_Create(&moduledef);
|
||||||
"Rocket data parsing and formatting module");
|
|
||||||
Py_INCREF(&RocketType);
|
Py_INCREF(&RocketType);
|
||||||
PyModule_AddObject(module, "Rocket", (PyObject *)&RocketType);
|
PyModule_AddObject(module, "Rocket", (PyObject *)&RocketType);
|
||||||
|
|
||||||
@@ -785,5 +802,5 @@ initrocket(void)
|
|||||||
PyModule_AddObject(module, "ParseError", ParseError);
|
PyModule_AddObject(module, "ParseError", ParseError);
|
||||||
add_parseerror_codes(module);
|
add_parseerror_codes(module);
|
||||||
|
|
||||||
return;
|
return module;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,150 +1,49 @@
|
|||||||
"""CherryPy-based server for accessing NILM database via HTTP"""
|
"""CherryPy-based server for accessing NILM database via HTTP"""
|
||||||
|
|
||||||
# Need absolute_import so that "import nilmdb" won't pull in
|
|
||||||
# nilmdb.py, but will pull the nilmdb module instead.
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import nilmdb.server
|
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
from nilmdb.server.errors import NilmDBError
|
|
||||||
|
|
||||||
import cherrypy
|
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
import simplejson as json
|
import json
|
||||||
import decorator
|
import socket
|
||||||
import psutil
|
import traceback
|
||||||
|
|
||||||
class NilmApp(object):
|
import psutil
|
||||||
|
import cherrypy
|
||||||
|
|
||||||
|
import nilmdb.server
|
||||||
|
from nilmdb.utils.printf import sprintf
|
||||||
|
from nilmdb.server.errors import NilmDBError
|
||||||
|
from nilmdb.utils.time import string_to_timestamp
|
||||||
|
|
||||||
|
from nilmdb.server.serverutil import (
|
||||||
|
chunked_response,
|
||||||
|
response_type,
|
||||||
|
exception_to_httperror,
|
||||||
|
CORS_allow,
|
||||||
|
json_to_request_params,
|
||||||
|
json_error_page,
|
||||||
|
cherrypy_start,
|
||||||
|
cherrypy_stop,
|
||||||
|
bool_param,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add CORS_allow tool
|
||||||
|
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
|
||||||
|
|
||||||
|
|
||||||
|
class NilmApp():
|
||||||
def __init__(self, db):
|
def __init__(self, db):
|
||||||
self.db = db
|
self.db = db
|
||||||
|
|
||||||
# Decorators
|
|
||||||
def chunked_response(func):
|
|
||||||
"""Decorator to enable chunked responses."""
|
|
||||||
# Set this to False to get better tracebacks from some requests
|
|
||||||
# (/stream/extract, /stream/intervals).
|
|
||||||
func._cp_config = { 'response.stream': True }
|
|
||||||
return func
|
|
||||||
|
|
||||||
def response_type(content_type):
|
|
||||||
"""Return a decorator-generating function that sets the
|
|
||||||
response type to the specified string."""
|
|
||||||
def wrapper(func, *args, **kwargs):
|
|
||||||
cherrypy.response.headers['Content-Type'] = content_type
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
return decorator.decorator(wrapper)
|
|
||||||
|
|
||||||
@decorator.decorator
|
|
||||||
def workaround_cp_bug_1200(func, *args, **kwargs): # pragma: no cover
|
|
||||||
"""Decorator to work around CherryPy bug #1200 in a response
|
|
||||||
generator.
|
|
||||||
|
|
||||||
Even if chunked responses are disabled, LookupError or
|
|
||||||
UnicodeError exceptions may still be swallowed by CherryPy due to
|
|
||||||
bug #1200. This throws them as generic Exceptions instead so that
|
|
||||||
they make it through.
|
|
||||||
"""
|
|
||||||
exc_info = None
|
|
||||||
try:
|
|
||||||
for val in func(*args, **kwargs):
|
|
||||||
yield val
|
|
||||||
except (LookupError, UnicodeError):
|
|
||||||
# Re-raise it, but maintain the original traceback
|
|
||||||
exc_info = sys.exc_info()
|
|
||||||
new_exc = Exception(exc_info[0].__name__ + ": " + str(exc_info[1]))
|
|
||||||
raise new_exc, None, exc_info[2]
|
|
||||||
finally:
|
|
||||||
del exc_info
|
|
||||||
|
|
||||||
def exception_to_httperror(*expected):
|
|
||||||
"""Return a decorator-generating function that catches expected
|
|
||||||
errors and throws a HTTPError describing it instead.
|
|
||||||
|
|
||||||
@exception_to_httperror(NilmDBError, ValueError)
|
|
||||||
def foo():
|
|
||||||
pass
|
|
||||||
"""
|
|
||||||
def wrapper(func, *args, **kwargs):
|
|
||||||
exc_info = None
|
|
||||||
try:
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
except expected:
|
|
||||||
# Re-raise it, but maintain the original traceback
|
|
||||||
exc_info = sys.exc_info()
|
|
||||||
new_exc = cherrypy.HTTPError("400 Bad Request", str(exc_info[1]))
|
|
||||||
raise new_exc, None, exc_info[2]
|
|
||||||
finally:
|
|
||||||
del exc_info
|
|
||||||
# We need to preserve the function's argspecs for CherryPy to
|
|
||||||
# handle argument errors correctly. Decorator.decorator takes
|
|
||||||
# care of that.
|
|
||||||
return decorator.decorator(wrapper)
|
|
||||||
|
|
||||||
# Custom CherryPy tools
|
|
||||||
|
|
||||||
def CORS_allow(methods):
|
|
||||||
"""This does several things:
|
|
||||||
|
|
||||||
Handles CORS preflight requests.
|
|
||||||
Adds Allow: header to all requests.
|
|
||||||
Raise 405 if request.method not in method.
|
|
||||||
|
|
||||||
It is similar to cherrypy.tools.allow, with the CORS stuff added.
|
|
||||||
"""
|
|
||||||
request = cherrypy.request.headers
|
|
||||||
response = cherrypy.response.headers
|
|
||||||
|
|
||||||
if not isinstance(methods, (tuple, list)): # pragma: no cover
|
|
||||||
methods = [ methods ]
|
|
||||||
methods = [ m.upper() for m in methods if m ]
|
|
||||||
if not methods: # pragma: no cover
|
|
||||||
methods = [ 'GET', 'HEAD' ]
|
|
||||||
elif 'GET' in methods and 'HEAD' not in methods: # pragma: no cover
|
|
||||||
methods.append('HEAD')
|
|
||||||
response['Allow'] = ', '.join(methods)
|
|
||||||
|
|
||||||
# Allow all origins
|
|
||||||
if 'Origin' in request:
|
|
||||||
response['Access-Control-Allow-Origin'] = request['Origin']
|
|
||||||
|
|
||||||
# If it's a CORS request, send response.
|
|
||||||
request_method = request.get("Access-Control-Request-Method", None)
|
|
||||||
request_headers = request.get("Access-Control-Request-Headers", None)
|
|
||||||
if (cherrypy.request.method == "OPTIONS" and
|
|
||||||
request_method and request_headers):
|
|
||||||
response['Access-Control-Allow-Headers'] = request_headers
|
|
||||||
response['Access-Control-Allow-Methods'] = ', '.join(methods)
|
|
||||||
# Try to stop further processing and return a 200 OK
|
|
||||||
cherrypy.response.status = "200 OK"
|
|
||||||
cherrypy.response.body = ""
|
|
||||||
cherrypy.request.handler = lambda: ""
|
|
||||||
return
|
|
||||||
|
|
||||||
# Reject methods that were not explicitly allowed
|
|
||||||
if cherrypy.request.method not in methods:
|
|
||||||
raise cherrypy.HTTPError(405)
|
|
||||||
|
|
||||||
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
|
|
||||||
|
|
||||||
# Helper for json_in tool to process JSON data into normal request
|
|
||||||
# parameters.
|
|
||||||
def json_to_request_params(body):
|
|
||||||
cherrypy.lib.jsontools.json_processor(body)
|
|
||||||
if not isinstance(cherrypy.request.json, dict):
|
|
||||||
raise cherrypy.HTTPError(415)
|
|
||||||
cherrypy.request.params.update(cherrypy.request.json)
|
|
||||||
|
|
||||||
# CherryPy apps
|
# CherryPy apps
|
||||||
class Root(NilmApp):
|
class Root(NilmApp):
|
||||||
"""Root application for NILM database"""
|
"""Root application for NILM database"""
|
||||||
|
|
||||||
def __init__(self, db):
|
|
||||||
super(Root, self).__init__(db)
|
|
||||||
|
|
||||||
# /
|
# /
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def index(self):
|
def index(self):
|
||||||
raise cherrypy.NotFound()
|
cherrypy.response.headers['Content-Type'] = 'text/plain'
|
||||||
|
msg = sprintf("This is NilmDB version %s, running on host %s.\n",
|
||||||
|
nilmdb.__version__, socket.getfqdn())
|
||||||
|
return msg
|
||||||
|
|
||||||
# /favicon.ico
|
# /favicon.ico
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@@ -164,27 +63,58 @@ class Root(NilmApp):
|
|||||||
"""Return a dictionary with the database path,
|
"""Return a dictionary with the database path,
|
||||||
size of the database in bytes, and free disk space in bytes"""
|
size of the database in bytes, and free disk space in bytes"""
|
||||||
path = self.db.get_basepath()
|
path = self.db.get_basepath()
|
||||||
return { "path": path,
|
usage = psutil.disk_usage(path)
|
||||||
"size": nilmdb.utils.du(path),
|
dbsize = nilmdb.utils.du(path)
|
||||||
"free": psutil.disk_usage(path).free }
|
return {
|
||||||
|
"path": path,
|
||||||
|
"size": dbsize,
|
||||||
|
"other": max(usage.used - dbsize, 0),
|
||||||
|
"reserved": max(usage.total - usage.used - usage.free, 0),
|
||||||
|
"free": usage.free
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class Stream(NilmApp):
|
class Stream(NilmApp):
|
||||||
"""Stream-specific operations"""
|
"""Stream-specific operations"""
|
||||||
|
|
||||||
|
# Helpers
|
||||||
|
def _get_times(self, start_param, end_param):
|
||||||
|
(start, end) = (None, None)
|
||||||
|
try:
|
||||||
|
if start_param is not None:
|
||||||
|
start = string_to_timestamp(start_param)
|
||||||
|
except Exception:
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request", sprintf(
|
||||||
|
"invalid start (%s): must be a numeric timestamp",
|
||||||
|
start_param))
|
||||||
|
try:
|
||||||
|
if end_param is not None:
|
||||||
|
end = string_to_timestamp(end_param)
|
||||||
|
except Exception:
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request", sprintf(
|
||||||
|
"invalid end (%s): must be a numeric timestamp", end_param))
|
||||||
|
if start is not None and end is not None:
|
||||||
|
if start >= end:
|
||||||
|
raise cherrypy.HTTPError(
|
||||||
|
"400 Bad Request",
|
||||||
|
sprintf("start must precede end (%s >= %s)",
|
||||||
|
start_param, end_param))
|
||||||
|
return (start, end)
|
||||||
|
|
||||||
# /stream/list
|
# /stream/list
|
||||||
# /stream/list?layout=float32_8
|
# /stream/list?layout=float32_8
|
||||||
# /stream/list?path=/newton/prep&extended=1
|
# /stream/list?path=/newton/prep&extended=1
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
def list(self, path = None, layout = None, extended = None):
|
def list(self, path=None, layout=None, extended=None):
|
||||||
"""List all streams in the database. With optional path or
|
"""List all streams in the database. With optional path or
|
||||||
layout parameter, just list streams that match the given path
|
layout parameter, just list streams that match the given path
|
||||||
or layout.
|
or layout.
|
||||||
|
|
||||||
If extent is not given, returns a list of lists containing
|
If extended is missing or zero, returns a list of lists
|
||||||
the path and layout: [ path, layout ]
|
containing the path and layout: [ path, layout ]
|
||||||
|
|
||||||
If extended is provided, returns a list of lists containing
|
If extended is true, returns a list of lists containing
|
||||||
extended info: [ path, layout, extent_min, extent_max,
|
extended info: [ path, layout, extent_min, extent_max,
|
||||||
total_rows, total_seconds ]. More data may be added.
|
total_rows, total_seconds ]. More data may be added.
|
||||||
"""
|
"""
|
||||||
@@ -195,7 +125,7 @@ class Stream(NilmApp):
|
|||||||
@cherrypy.tools.json_in()
|
@cherrypy.tools.json_in()
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
@exception_to_httperror(NilmDBError, ValueError)
|
@exception_to_httperror(NilmDBError, ValueError)
|
||||||
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
@cherrypy.tools.CORS_allow(methods=["POST"])
|
||||||
def create(self, path, layout):
|
def create(self, path, layout):
|
||||||
"""Create a new stream in the database. Provide path
|
"""Create a new stream in the database. Provide path
|
||||||
and one of the nilmdb.layout.layouts keys.
|
and one of the nilmdb.layout.layouts keys.
|
||||||
@@ -207,11 +137,21 @@ class Stream(NilmApp):
|
|||||||
@cherrypy.tools.json_in()
|
@cherrypy.tools.json_in()
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
@exception_to_httperror(NilmDBError)
|
@exception_to_httperror(NilmDBError)
|
||||||
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
@cherrypy.tools.CORS_allow(methods=["POST"])
|
||||||
def destroy(self, path):
|
def destroy(self, path):
|
||||||
"""Delete a stream and its associated data."""
|
"""Delete a stream. Fails if any data is still present."""
|
||||||
return self.db.stream_destroy(path)
|
return self.db.stream_destroy(path)
|
||||||
|
|
||||||
|
# /stream/rename?oldpath=/newton/prep&newpath=/newton/prep/1
|
||||||
|
@cherrypy.expose
|
||||||
|
@cherrypy.tools.json_in()
|
||||||
|
@cherrypy.tools.json_out()
|
||||||
|
@exception_to_httperror(NilmDBError, ValueError)
|
||||||
|
@cherrypy.tools.CORS_allow(methods=["POST"])
|
||||||
|
def rename(self, oldpath, newpath):
|
||||||
|
"""Rename a stream."""
|
||||||
|
return self.db.stream_rename(oldpath, newpath)
|
||||||
|
|
||||||
# /stream/get_metadata?path=/newton/prep
|
# /stream/get_metadata?path=/newton/prep
|
||||||
# /stream/get_metadata?path=/newton/prep&key=foo&key=bar
|
# /stream/get_metadata?path=/newton/prep&key=foo&key=bar
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@@ -223,16 +163,16 @@ class Stream(NilmApp):
|
|||||||
try:
|
try:
|
||||||
data = self.db.stream_get_metadata(path)
|
data = self.db.stream_get_metadata(path)
|
||||||
except nilmdb.server.nilmdb.StreamError as e:
|
except nilmdb.server.nilmdb.StreamError as e:
|
||||||
raise cherrypy.HTTPError("404 Not Found", e.message)
|
raise cherrypy.HTTPError("404 Not Found", str(e))
|
||||||
if key is None: # If no keys specified, return them all
|
if key is None: # If no keys specified, return them all
|
||||||
key = data.keys()
|
key = list(data.keys())
|
||||||
elif not isinstance(key, list):
|
elif not isinstance(key, list):
|
||||||
key = [ key ]
|
key = [key]
|
||||||
result = {}
|
result = {}
|
||||||
for k in key:
|
for k in key:
|
||||||
if k in data:
|
if k in data:
|
||||||
result[k] = data[k]
|
result[k] = data[k]
|
||||||
else: # Return "None" for keys with no matching value
|
else: # Return "None" for keys with no matching value
|
||||||
result[k] = None
|
result[k] = None
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@@ -242,11 +182,9 @@ class Stream(NilmApp):
|
|||||||
try:
|
try:
|
||||||
data = dict(json.loads(data))
|
data = dict(json.loads(data))
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
raise NilmDBError("can't parse 'data' parameter: " + e.message)
|
raise NilmDBError("can't parse 'data' parameter: " + str(e))
|
||||||
for key in data:
|
for key in data:
|
||||||
if not (isinstance(data[key], basestring) or
|
if not isinstance(data[key], (str, float, int)):
|
||||||
isinstance(data[key], float) or
|
|
||||||
isinstance(data[key], int)):
|
|
||||||
raise NilmDBError("metadata values must be a string or number")
|
raise NilmDBError("metadata values must be a string or number")
|
||||||
function(path, data)
|
function(path, data)
|
||||||
|
|
||||||
@@ -255,7 +193,7 @@ class Stream(NilmApp):
|
|||||||
@cherrypy.tools.json_in()
|
@cherrypy.tools.json_in()
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
@exception_to_httperror(NilmDBError, LookupError)
|
@exception_to_httperror(NilmDBError, LookupError)
|
||||||
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
@cherrypy.tools.CORS_allow(methods=["POST"])
|
||||||
def set_metadata(self, path, data):
|
def set_metadata(self, path, data):
|
||||||
"""Set metadata for the named stream, replacing any existing
|
"""Set metadata for the named stream, replacing any existing
|
||||||
metadata. Data can be json-encoded or a plain dictionary."""
|
metadata. Data can be json-encoded or a plain dictionary."""
|
||||||
@@ -266,7 +204,7 @@ class Stream(NilmApp):
|
|||||||
@cherrypy.tools.json_in()
|
@cherrypy.tools.json_in()
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
@exception_to_httperror(NilmDBError, LookupError, ValueError)
|
@exception_to_httperror(NilmDBError, LookupError, ValueError)
|
||||||
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
@cherrypy.tools.CORS_allow(methods=["POST"])
|
||||||
def update_metadata(self, path, data):
|
def update_metadata(self, path, data):
|
||||||
"""Set metadata for the named stream, replacing any existing
|
"""Set metadata for the named stream, replacing any existing
|
||||||
metadata. Data can be json-encoded or a plain dictionary."""
|
metadata. Data can be json-encoded or a plain dictionary."""
|
||||||
@@ -276,33 +214,46 @@ class Stream(NilmApp):
|
|||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
@exception_to_httperror(NilmDBError, ValueError)
|
@exception_to_httperror(NilmDBError, ValueError)
|
||||||
@cherrypy.tools.CORS_allow(methods = ["PUT"])
|
@cherrypy.tools.CORS_allow(methods=["PUT"])
|
||||||
def insert(self, path, start, end):
|
def insert(self, path, start, end, binary=False):
|
||||||
"""
|
"""
|
||||||
Insert new data into the database. Provide textual data
|
Insert new data into the database. Provide textual data
|
||||||
(matching the path's layout) as a HTTP PUT.
|
(matching the path's layout) as a HTTP PUT.
|
||||||
|
|
||||||
|
If 'binary' is True, expect raw binary data, rather than lines
|
||||||
|
of ASCII-formatted data. Raw binary data is always
|
||||||
|
little-endian and matches the database types (including an
|
||||||
|
int64 timestamp).
|
||||||
"""
|
"""
|
||||||
|
binary = bool_param(binary)
|
||||||
|
|
||||||
# Important that we always read the input before throwing any
|
# Important that we always read the input before throwing any
|
||||||
# errors, to keep lengths happy for persistent connections.
|
# errors, to keep lengths happy for persistent connections.
|
||||||
# Note that CherryPy 3.2.2 has a bug where this fails for GET
|
# Note that CherryPy 3.2.2 has a bug where this fails for GET
|
||||||
# requests, if we ever want to handle those (issue #1134)
|
# requests, if we ever want to handle those (issue #1134)
|
||||||
body = cherrypy.request.body.read()
|
body = cherrypy.request.body.read()
|
||||||
|
|
||||||
|
# Verify content type for binary data
|
||||||
|
content_type = cherrypy.request.headers.get('content-type')
|
||||||
|
if binary and content_type:
|
||||||
|
if content_type != "application/octet-stream":
|
||||||
|
raise cherrypy.HTTPError("400", "Content type must be "
|
||||||
|
"application/octet-stream for "
|
||||||
|
"binary data, not " + content_type)
|
||||||
|
|
||||||
|
# Note that non-binary data is *not* decoded from bytes to string,
|
||||||
|
# but rather passed directly to stream_insert.
|
||||||
|
|
||||||
# Check path and get layout
|
# Check path and get layout
|
||||||
streams = self.db.stream_list(path = path)
|
if len(self.db.stream_list(path=path)) != 1:
|
||||||
if len(streams) != 1:
|
raise cherrypy.HTTPError("404", "No such stream: " + path)
|
||||||
raise cherrypy.HTTPError("404 Not Found", "No such stream")
|
|
||||||
|
|
||||||
# Check limits
|
# Check limits
|
||||||
start = float(start)
|
(start, end) = self._get_times(start, end)
|
||||||
end = float(end)
|
|
||||||
if start >= end:
|
|
||||||
raise cherrypy.HTTPError("400 Bad Request",
|
|
||||||
"start must precede end")
|
|
||||||
|
|
||||||
# Pass the data directly to nilmdb, which will parse it and
|
# Pass the data directly to nilmdb, which will parse it and
|
||||||
# raise a ValueError if there are any problems.
|
# raise a ValueError if there are any problems.
|
||||||
self.db.stream_insert(path, start, end, body)
|
self.db.stream_insert(path, start, end, body, binary)
|
||||||
|
|
||||||
# Done
|
# Done
|
||||||
return
|
return
|
||||||
@@ -311,62 +262,72 @@ class Stream(NilmApp):
|
|||||||
# /stream/remove?path=/newton/prep&start=1234567890.0&end=1234567899.0
|
# /stream/remove?path=/newton/prep&start=1234567890.0&end=1234567899.0
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@cherrypy.tools.json_in()
|
@cherrypy.tools.json_in()
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.CORS_allow(methods=["POST"])
|
||||||
@exception_to_httperror(NilmDBError)
|
@chunked_response
|
||||||
@cherrypy.tools.CORS_allow(methods = ["POST"])
|
@response_type("application/x-json-stream")
|
||||||
def remove(self, path, start = None, end = None):
|
def remove(self, path, start=None, end=None):
|
||||||
"""
|
"""
|
||||||
Remove data from the backend database. Removes all data in
|
Remove data from the backend database. Removes all data in
|
||||||
the interval [start, end). Returns the number of data points
|
the interval [start, end).
|
||||||
removed.
|
|
||||||
|
Returns the number of data points removed. Since this is a potentially
|
||||||
|
long-running operation, multiple numbers may be returned as the
|
||||||
|
data gets removed from the backend database. The total number of
|
||||||
|
points removed is the sum of all of these numbers.
|
||||||
"""
|
"""
|
||||||
if start is not None:
|
(start, end) = self._get_times(start, end)
|
||||||
start = float(start)
|
|
||||||
if end is not None:
|
if len(self.db.stream_list(path=path)) != 1:
|
||||||
end = float(end)
|
raise cherrypy.HTTPError("404", "No such stream: " + path)
|
||||||
if start is not None and end is not None:
|
|
||||||
if start >= end:
|
def content(start, end):
|
||||||
raise cherrypy.HTTPError("400 Bad Request",
|
# Note: disable chunked responses to see tracebacks from here.
|
||||||
"start must precede end")
|
while True:
|
||||||
return self.db.stream_remove(path, start, end)
|
(removed, restart) = self.db.stream_remove(path, start, end)
|
||||||
|
response = json.dumps(removed) + "\r\n"
|
||||||
|
yield response.encode('utf-8')
|
||||||
|
if restart is None:
|
||||||
|
break
|
||||||
|
start = restart
|
||||||
|
return content(start, end)
|
||||||
|
|
||||||
# /stream/intervals?path=/newton/prep
|
# /stream/intervals?path=/newton/prep
|
||||||
# /stream/intervals?path=/newton/prep&start=1234567890.0&end=1234567899.0
|
# /stream/intervals?path=/newton/prep&start=1234567890.0&end=1234567899.0
|
||||||
|
# /stream/intervals?path=/newton/prep&diffpath=/newton/prep2
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@chunked_response
|
@chunked_response
|
||||||
@response_type("application/x-json-stream")
|
@response_type("application/x-json-stream")
|
||||||
def intervals(self, path, start = None, end = None):
|
def intervals(self, path, start=None, end=None, diffpath=None):
|
||||||
"""
|
"""
|
||||||
Get intervals from backend database. Streams the resulting
|
Get intervals from backend database. Streams the resulting
|
||||||
intervals as JSON strings separated by CR LF pairs. This may
|
intervals as JSON strings separated by CR LF pairs. This may
|
||||||
make multiple requests to the nilmdb backend to avoid causing
|
make multiple requests to the nilmdb backend to avoid causing
|
||||||
it to block for too long.
|
it to block for too long.
|
||||||
|
|
||||||
|
Returns intervals between 'start' and 'end' belonging to
|
||||||
|
'path'. If 'diff' is provided, the set-difference between
|
||||||
|
intervals in 'path' and intervals in 'diffpath' are
|
||||||
|
returned instead.
|
||||||
|
|
||||||
Note that the response type is the non-standard
|
Note that the response type is the non-standard
|
||||||
'application/x-json-stream' for lack of a better option.
|
'application/x-json-stream' for lack of a better option.
|
||||||
"""
|
"""
|
||||||
if start is not None:
|
(start, end) = self._get_times(start, end)
|
||||||
start = float(start)
|
|
||||||
if end is not None:
|
|
||||||
end = float(end)
|
|
||||||
|
|
||||||
if start is not None and end is not None:
|
if len(self.db.stream_list(path=path)) != 1:
|
||||||
if start >= end:
|
raise cherrypy.HTTPError("404", "No such stream: " + path)
|
||||||
raise cherrypy.HTTPError("400 Bad Request",
|
|
||||||
"start must precede end")
|
|
||||||
|
|
||||||
streams = self.db.stream_list(path = path)
|
if diffpath and len(self.db.stream_list(path=diffpath)) != 1:
|
||||||
if len(streams) != 1:
|
raise cherrypy.HTTPError("404", "No such stream: " + diffpath)
|
||||||
raise cherrypy.HTTPError("404 Not Found", "No such stream")
|
|
||||||
|
|
||||||
@workaround_cp_bug_1200
|
|
||||||
def content(start, end):
|
def content(start, end):
|
||||||
# Note: disable chunked responses to see tracebacks from here.
|
# Note: disable chunked responses to see tracebacks from here.
|
||||||
while True:
|
while True:
|
||||||
(ints, restart) = self.db.stream_intervals(path, start, end)
|
(ints, restart) = self.db.stream_intervals(path, start, end,
|
||||||
response = ''.join([ json.dumps(i) + "\r\n" for i in ints ])
|
diffpath)
|
||||||
yield response
|
response = ''.join([json.dumps(i) + "\r\n" for i in ints])
|
||||||
if restart == 0:
|
yield response.encode('utf-8')
|
||||||
|
if restart is None:
|
||||||
break
|
break
|
||||||
start = restart
|
start = restart
|
||||||
return content(start, end)
|
return content(start, end)
|
||||||
@@ -374,71 +335,87 @@ class Stream(NilmApp):
|
|||||||
# /stream/extract?path=/newton/prep&start=1234567890.0&end=1234567899.0
|
# /stream/extract?path=/newton/prep&start=1234567890.0&end=1234567899.0
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@chunked_response
|
@chunked_response
|
||||||
@response_type("text/plain")
|
def extract(self, path, start=None, end=None,
|
||||||
def extract(self, path, start = None, end = None, count = False):
|
count=False, markup=False, binary=False):
|
||||||
"""
|
"""
|
||||||
Extract data from backend database. Streams the resulting
|
Extract data from backend database. Streams the resulting
|
||||||
entries as ASCII text lines separated by newlines. This may
|
entries as ASCII text lines separated by newlines. This may
|
||||||
make multiple requests to the nilmdb backend to avoid causing
|
make multiple requests to the nilmdb backend to avoid causing
|
||||||
it to block for too long.
|
it to block for too long.
|
||||||
|
|
||||||
Add count=True to return a count rather than actual data.
|
If 'count' is True, returns a count rather than actual data.
|
||||||
"""
|
|
||||||
if start is not None:
|
|
||||||
start = float(start)
|
|
||||||
if end is not None:
|
|
||||||
end = float(end)
|
|
||||||
|
|
||||||
# Check parameters
|
If 'markup' is True, adds comments to the stream denoting each
|
||||||
if start is not None and end is not None:
|
interval's start and end timestamp.
|
||||||
if start >= end:
|
|
||||||
raise cherrypy.HTTPError("400 Bad Request",
|
If 'binary' is True, return raw binary data, rather than lines
|
||||||
"start must precede end")
|
of ASCII-formatted data. Raw binary data is always
|
||||||
|
little-endian and matches the database types (including an
|
||||||
|
int64 timestamp).
|
||||||
|
"""
|
||||||
|
binary = bool_param(binary)
|
||||||
|
markup = bool_param(markup)
|
||||||
|
count = bool_param(count)
|
||||||
|
|
||||||
|
(start, end) = self._get_times(start, end)
|
||||||
|
|
||||||
# Check path and get layout
|
# Check path and get layout
|
||||||
streams = self.db.stream_list(path = path)
|
if len(self.db.stream_list(path=path)) != 1:
|
||||||
if len(streams) != 1:
|
raise cherrypy.HTTPError("404", "No such stream: " + path)
|
||||||
raise cherrypy.HTTPError("404 Not Found", "No such stream")
|
|
||||||
|
|
||||||
@workaround_cp_bug_1200
|
if binary:
|
||||||
def content(start, end, count):
|
content_type = "application/octet-stream"
|
||||||
|
if markup or count:
|
||||||
|
raise cherrypy.HTTPError("400", "can't mix binary and "
|
||||||
|
"markup or count modes")
|
||||||
|
else:
|
||||||
|
content_type = "text/plain"
|
||||||
|
cherrypy.response.headers['Content-Type'] = content_type
|
||||||
|
|
||||||
|
def content(start, end):
|
||||||
# Note: disable chunked responses to see tracebacks from here.
|
# Note: disable chunked responses to see tracebacks from here.
|
||||||
if count:
|
if count:
|
||||||
matched = self.db.stream_extract(path, start, end, count)
|
matched = self.db.stream_extract(path, start, end,
|
||||||
yield sprintf("%d\n", matched)
|
count=True)
|
||||||
|
yield sprintf(b"%d\n", matched)
|
||||||
return
|
return
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
(data, restart) = self.db.stream_extract(path, start, end)
|
(data, restart) = self.db.stream_extract(
|
||||||
|
path, start, end, count=False,
|
||||||
|
markup=markup, binary=binary)
|
||||||
yield data
|
yield data
|
||||||
|
|
||||||
if restart == 0:
|
if restart is None:
|
||||||
return
|
return
|
||||||
start = restart
|
start = restart
|
||||||
return content(start, end, count)
|
return content(start, end)
|
||||||
|
|
||||||
class Exiter(object):
|
|
||||||
|
class Exiter():
|
||||||
"""App that exits the server, for testing"""
|
"""App that exits the server, for testing"""
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def index(self):
|
def index(self):
|
||||||
cherrypy.response.headers['Content-Type'] = 'text/plain'
|
cherrypy.response.headers['Content-Type'] = 'text/plain'
|
||||||
def content():
|
|
||||||
yield 'Exiting by request'
|
|
||||||
raise SystemExit
|
|
||||||
return content()
|
|
||||||
index._cp_config = { 'response.stream': True }
|
|
||||||
|
|
||||||
class Server(object):
|
def content():
|
||||||
def __init__(self, db, host = '127.0.0.1', port = 8080,
|
yield b'Exiting by request'
|
||||||
stoppable = False, # whether /exit URL exists
|
raise SystemExit
|
||||||
embedded = True, # hide diagnostics and output, etc
|
|
||||||
fast_shutdown = False, # don't wait for clients to disconn.
|
return content()
|
||||||
force_traceback = False # include traceback in all errors
|
index._cp_config = {'response.stream': True}
|
||||||
|
|
||||||
|
|
||||||
|
class Server():
|
||||||
|
def __init__(self, db, host='127.0.0.1', port=8080,
|
||||||
|
stoppable=False, # whether /exit URL exists
|
||||||
|
fast_shutdown=False, # don't wait for clients to disconn.
|
||||||
|
force_traceback=False, # include traceback in all errors
|
||||||
|
basepath='', # base URL path for cherrypy.tree
|
||||||
):
|
):
|
||||||
# Save server version, just for verification during tests
|
# Save server version, just for verification during tests
|
||||||
self.version = nilmdb.__version__
|
self.version = nilmdb.__version__
|
||||||
|
|
||||||
self.embedded = embedded
|
|
||||||
self.db = db
|
self.db = db
|
||||||
if not getattr(db, "_thread_safe", None):
|
if not getattr(db, "_thread_safe", None):
|
||||||
raise KeyError("Database object " + str(db) + " doesn't claim "
|
raise KeyError("Database object " + str(db) + " doesn't claim "
|
||||||
@@ -448,13 +425,12 @@ class Server(object):
|
|||||||
|
|
||||||
# Build up global server configuration
|
# Build up global server configuration
|
||||||
cherrypy.config.update({
|
cherrypy.config.update({
|
||||||
|
'environment': 'embedded',
|
||||||
'server.socket_host': host,
|
'server.socket_host': host,
|
||||||
'server.socket_port': port,
|
'server.socket_port': port,
|
||||||
'engine.autoreload_on': False,
|
'engine.autoreload.on': False,
|
||||||
'server.max_request_body_size': 8*1024*1024,
|
'server.max_request_body_size': 8*1024*1024,
|
||||||
})
|
})
|
||||||
if self.embedded:
|
|
||||||
cherrypy.config.update({ 'environment': 'embedded' })
|
|
||||||
|
|
||||||
# Build up application specific configuration
|
# Build up application specific configuration
|
||||||
app_config = {}
|
app_config = {}
|
||||||
@@ -463,23 +439,23 @@ class Server(object):
|
|||||||
})
|
})
|
||||||
|
|
||||||
# Some default headers to just help identify that things are working
|
# Some default headers to just help identify that things are working
|
||||||
app_config.update({ 'response.headers.X-Jim-Is-Awesome': 'yeah' })
|
app_config.update({'response.headers.X-Jim-Is-Awesome': 'yeah'})
|
||||||
|
|
||||||
# Set up Cross-Origin Resource Sharing (CORS) handler so we
|
# Set up Cross-Origin Resource Sharing (CORS) handler so we
|
||||||
# can correctly respond to browsers' CORS preflight requests.
|
# can correctly respond to browsers' CORS preflight requests.
|
||||||
# This also limits verbs to GET and HEAD by default.
|
# This also limits verbs to GET and HEAD by default.
|
||||||
app_config.update({ 'tools.CORS_allow.on': True,
|
app_config.update({'tools.CORS_allow.on': True,
|
||||||
'tools.CORS_allow.methods': ['GET', 'HEAD'] })
|
'tools.CORS_allow.methods': ['GET', 'HEAD']})
|
||||||
|
|
||||||
# Configure the 'json_in' tool to also allow other content-types
|
# Configure the 'json_in' tool to also allow other content-types
|
||||||
# (like x-www-form-urlencoded), and to treat JSON as a dict that
|
# (like x-www-form-urlencoded), and to treat JSON as a dict that
|
||||||
# fills requests.param.
|
# fills requests.param.
|
||||||
app_config.update({ 'tools.json_in.force': False,
|
app_config.update({'tools.json_in.force': False,
|
||||||
'tools.json_in.processor': json_to_request_params })
|
'tools.json_in.processor': json_to_request_params})
|
||||||
|
|
||||||
# Send tracebacks in error responses. They're hidden by the
|
# Send tracebacks in error responses. They're hidden by the
|
||||||
# error_page function for client errors (code 400-499).
|
# error_page function for client errors (code 400-499).
|
||||||
app_config.update({ 'request.show_tracebacks' : True })
|
app_config.update({'request.show_tracebacks': True})
|
||||||
self.force_traceback = force_traceback
|
self.force_traceback = force_traceback
|
||||||
|
|
||||||
# Patch CherryPy error handler to never pad out error messages.
|
# Patch CherryPy error handler to never pad out error messages.
|
||||||
@@ -493,79 +469,78 @@ class Server(object):
|
|||||||
if stoppable:
|
if stoppable:
|
||||||
root.exit = Exiter()
|
root.exit = Exiter()
|
||||||
cherrypy.tree.apps = {}
|
cherrypy.tree.apps = {}
|
||||||
cherrypy.tree.mount(root, "/", config = { "/" : app_config })
|
cherrypy.tree.mount(root, basepath, config={"/": app_config})
|
||||||
|
|
||||||
# Shutdowns normally wait for clients to disconnect. To speed
|
# Shutdowns normally wait for clients to disconnect. To speed
|
||||||
# up tests, set fast_shutdown = True
|
# up tests, set fast_shutdown = True
|
||||||
if fast_shutdown:
|
if fast_shutdown:
|
||||||
# Setting timeout to 0 triggers os._exit(70) at shutdown, grr...
|
cherrypy.server.shutdown_timeout = 0
|
||||||
cherrypy.server.shutdown_timeout = 0.01
|
|
||||||
else:
|
else:
|
||||||
cherrypy.server.shutdown_timeout = 5
|
cherrypy.server.shutdown_timeout = 5
|
||||||
|
|
||||||
|
# Set up the WSGI application pointer for external programs
|
||||||
|
self.wsgi_application = cherrypy.tree
|
||||||
|
|
||||||
def json_error_page(self, status, message, traceback, version):
|
def json_error_page(self, status, message, traceback, version):
|
||||||
"""Return a custom error page in JSON so the client can parse it"""
|
"""Return a custom error page in JSON so the client can parse it"""
|
||||||
errordata = { "status" : status,
|
return json_error_page(status, message, traceback, version,
|
||||||
"message" : message,
|
self.force_traceback)
|
||||||
"traceback" : traceback }
|
|
||||||
# Don't send a traceback if the error was 400-499 (client's fault)
|
|
||||||
try:
|
|
||||||
code = int(status.split()[0])
|
|
||||||
if not self.force_traceback:
|
|
||||||
if code >= 400 and code <= 499:
|
|
||||||
errordata["traceback"] = ""
|
|
||||||
except Exception: # pragma: no cover
|
|
||||||
pass
|
|
||||||
# Override the response type, which was previously set to text/html
|
|
||||||
cherrypy.serving.response.headers['Content-Type'] = (
|
|
||||||
"application/json;charset=utf-8" )
|
|
||||||
# Undo the HTML escaping that cherrypy's get_error_page function applies
|
|
||||||
# (cherrypy issue 1135)
|
|
||||||
for k, v in errordata.iteritems():
|
|
||||||
v = v.replace("<","<")
|
|
||||||
v = v.replace(">",">")
|
|
||||||
v = v.replace("&","&")
|
|
||||||
errordata[k] = v
|
|
||||||
return json.dumps(errordata, separators=(',',':'))
|
|
||||||
|
|
||||||
def start(self, blocking = False, event = None):
|
def start(self, blocking=False, event=None):
|
||||||
|
cherrypy_start(blocking, event)
|
||||||
if not self.embedded: # pragma: no cover
|
|
||||||
# Handle signals nicely
|
|
||||||
if hasattr(cherrypy.engine, "signal_handler"):
|
|
||||||
cherrypy.engine.signal_handler.subscribe()
|
|
||||||
if hasattr(cherrypy.engine, "console_control_handler"):
|
|
||||||
cherrypy.engine.console_control_handler.subscribe()
|
|
||||||
|
|
||||||
# Cherrypy stupidly calls os._exit(70) when it can't bind the
|
|
||||||
# port. At least try to print a reasonable error and continue
|
|
||||||
# in this case, rather than just dying silently (as we would
|
|
||||||
# otherwise do in embedded mode)
|
|
||||||
real_exit = os._exit
|
|
||||||
def fake_exit(code): # pragma: no cover
|
|
||||||
if code == os.EX_SOFTWARE:
|
|
||||||
fprintf(sys.stderr, "error: CherryPy called os._exit!\n")
|
|
||||||
else:
|
|
||||||
real_exit(code)
|
|
||||||
os._exit = fake_exit
|
|
||||||
cherrypy.engine.start()
|
|
||||||
os._exit = real_exit
|
|
||||||
|
|
||||||
# Signal that the engine has started successfully
|
|
||||||
if event is not None:
|
|
||||||
event.set()
|
|
||||||
|
|
||||||
if blocking:
|
|
||||||
try:
|
|
||||||
cherrypy.engine.wait(cherrypy.engine.states.EXITING,
|
|
||||||
interval = 0.1, channel = 'main')
|
|
||||||
except (KeyboardInterrupt, IOError): # pragma: no cover
|
|
||||||
cherrypy.engine.log('Keyboard Interrupt: shutting down bus')
|
|
||||||
cherrypy.engine.exit()
|
|
||||||
except SystemExit: # pragma: no cover
|
|
||||||
cherrypy.engine.log('SystemExit raised: shutting down bus')
|
|
||||||
cherrypy.engine.exit()
|
|
||||||
raise
|
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
cherrypy.engine.exit()
|
cherrypy_stop()
|
||||||
|
|
||||||
|
|
||||||
|
# Use a single global nilmdb.server.NilmDB and nilmdb.server.Server
|
||||||
|
# instance since the database can only be opened once. For this to
|
||||||
|
# work, the web server must use only a single process and single
|
||||||
|
# Python interpreter. Multiple threads are OK.
|
||||||
|
_wsgi_server = None
|
||||||
|
|
||||||
|
|
||||||
|
def wsgi_application(dbpath, basepath):
|
||||||
|
"""Return a WSGI application object with a database at the
|
||||||
|
specified path.
|
||||||
|
|
||||||
|
'dbpath' is a filesystem location, e.g. /home/nilm/db
|
||||||
|
|
||||||
|
'basepath' is the URL path of the application base, which
|
||||||
|
is the same as the first argument to Apache's WSGIScriptAlias
|
||||||
|
directive.
|
||||||
|
"""
|
||||||
|
def application(environ, start_response):
|
||||||
|
global _wsgi_server
|
||||||
|
if _wsgi_server is None:
|
||||||
|
# Try to start the server
|
||||||
|
try:
|
||||||
|
db = nilmdb.utils.serializer_proxy(
|
||||||
|
nilmdb.server.NilmDB)(dbpath)
|
||||||
|
_wsgi_server = nilmdb.server.Server(
|
||||||
|
db, basepath=basepath.rstrip('/'))
|
||||||
|
except Exception:
|
||||||
|
# Build an error message on failure
|
||||||
|
import pprint
|
||||||
|
err = sprintf("Initializing database at path '%s' failed:\n\n",
|
||||||
|
dbpath)
|
||||||
|
err += traceback.format_exc()
|
||||||
|
import pwd
|
||||||
|
import grp
|
||||||
|
err += sprintf("\nRunning as: uid=%d (%s), gid=%d (%s) "
|
||||||
|
"on host %s, pid %d\n",
|
||||||
|
os.getuid(), pwd.getpwuid(os.getuid())[0],
|
||||||
|
os.getgid(), grp.getgrgid(os.getgid())[0],
|
||||||
|
socket.gethostname(), os.getpid())
|
||||||
|
err += sprintf("\nEnvironment:\n%s\n", pprint.pformat(environ))
|
||||||
|
if _wsgi_server is None:
|
||||||
|
# Serve up the error with our own mini WSGI app.
|
||||||
|
err_b = err.encode('utf-8')
|
||||||
|
headers = [('Content-type', 'text/plain; charset=utf-8'),
|
||||||
|
('Content-length', str(len(err_b)))]
|
||||||
|
start_response("500 Internal Server Error", headers)
|
||||||
|
return [err_b]
|
||||||
|
|
||||||
|
# Call the normal application
|
||||||
|
return _wsgi_server.wsgi_application(environ, start_response)
|
||||||
|
return application
|
||||||
|
|||||||
211
nilmdb/server/serverutil.py
Normal file
211
nilmdb/server/serverutil.py
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
"""Miscellaneous decorators and other helpers for running a CherryPy
|
||||||
|
server"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import decorator
|
||||||
|
import functools
|
||||||
|
|
||||||
|
import cherrypy
|
||||||
|
|
||||||
|
|
||||||
|
# Helper to parse parameters into booleans
|
||||||
|
def bool_param(s):
|
||||||
|
"""Return a bool indicating whether parameter 's' was True or False,
|
||||||
|
supporting a few different types for 's'."""
|
||||||
|
try:
|
||||||
|
ss = s.lower()
|
||||||
|
if ss in ["0", "false", "f", "no", "n"]:
|
||||||
|
return False
|
||||||
|
if ss in ["1", "true", "t", "yes", "y"]:
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
return bool(s)
|
||||||
|
raise cherrypy.HTTPError("400 Bad Request",
|
||||||
|
"can't parse parameter: " + ss)
|
||||||
|
|
||||||
|
|
||||||
|
# Decorators
|
||||||
|
def chunked_response(func):
|
||||||
|
"""Decorator to enable chunked responses."""
|
||||||
|
# Set this to False to get better tracebacks from some requests
|
||||||
|
# (/stream/extract, /stream/intervals).
|
||||||
|
func._cp_config = {'response.stream': True}
|
||||||
|
return func
|
||||||
|
|
||||||
|
|
||||||
|
def response_type(content_type):
|
||||||
|
"""Return a decorator-generating function that sets the
|
||||||
|
response type to the specified string."""
|
||||||
|
def wrapper(func, *args, **kwargs):
|
||||||
|
cherrypy.response.headers['Content-Type'] = content_type
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
return decorator.decorator(wrapper)
|
||||||
|
|
||||||
|
|
||||||
|
def exception_to_httperror(*expected):
|
||||||
|
"""Return a decorator-generating function that catches expected
|
||||||
|
errors and throws a HTTPError describing it instead.
|
||||||
|
|
||||||
|
@exception_to_httperror(NilmDBError, ValueError)
|
||||||
|
def foo():
|
||||||
|
pass
|
||||||
|
"""
|
||||||
|
def wrapper(func, *args, **kwargs):
|
||||||
|
exc_info = None
|
||||||
|
try:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
except expected:
|
||||||
|
# Re-raise it, but maintain the original traceback
|
||||||
|
exc_info = sys.exc_info()
|
||||||
|
new_exc = cherrypy.HTTPError("400 Bad Request", str(exc_info[1]))
|
||||||
|
raise new_exc.with_traceback(exc_info[2])
|
||||||
|
finally:
|
||||||
|
del exc_info
|
||||||
|
# We need to preserve the function's argspecs for CherryPy to
|
||||||
|
# handle argument errors correctly. Decorator.decorator takes
|
||||||
|
# care of that.
|
||||||
|
return decorator.decorator(wrapper)
|
||||||
|
|
||||||
|
|
||||||
|
# Custom CherryPy tools
|
||||||
|
def CORS_allow(methods):
|
||||||
|
"""This does several things:
|
||||||
|
|
||||||
|
Handles CORS preflight requests.
|
||||||
|
Adds Allow: header to all requests.
|
||||||
|
Raise 405 if request.method not in method.
|
||||||
|
|
||||||
|
It is similar to cherrypy.tools.allow, with the CORS stuff added.
|
||||||
|
|
||||||
|
Add this to CherryPy with:
|
||||||
|
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
|
||||||
|
"""
|
||||||
|
request = cherrypy.request.headers
|
||||||
|
response = cherrypy.response.headers
|
||||||
|
|
||||||
|
if not isinstance(methods, (tuple, list)):
|
||||||
|
methods = [methods]
|
||||||
|
methods = [m.upper() for m in methods if m]
|
||||||
|
if not methods:
|
||||||
|
methods = ['GET', 'HEAD']
|
||||||
|
elif 'GET' in methods and 'HEAD' not in methods:
|
||||||
|
methods.append('HEAD')
|
||||||
|
response['Allow'] = ', '.join(methods)
|
||||||
|
|
||||||
|
# Allow all origins
|
||||||
|
if 'Origin' in request:
|
||||||
|
response['Access-Control-Allow-Origin'] = request['Origin']
|
||||||
|
|
||||||
|
# If it's a CORS request, send response.
|
||||||
|
request_method = request.get("Access-Control-Request-Method", None)
|
||||||
|
request_headers = request.get("Access-Control-Request-Headers", None)
|
||||||
|
if (cherrypy.request.method == "OPTIONS" and
|
||||||
|
request_method and request_headers):
|
||||||
|
response['Access-Control-Allow-Headers'] = request_headers
|
||||||
|
response['Access-Control-Allow-Methods'] = ', '.join(methods)
|
||||||
|
# Try to stop further processing and return a 200 OK
|
||||||
|
cherrypy.response.status = "200 OK"
|
||||||
|
cherrypy.response.body = b""
|
||||||
|
cherrypy.request.handler = lambda: ""
|
||||||
|
return
|
||||||
|
|
||||||
|
# Reject methods that were not explicitly allowed
|
||||||
|
if cherrypy.request.method not in methods:
|
||||||
|
raise cherrypy.HTTPError(405)
|
||||||
|
|
||||||
|
|
||||||
|
# Helper for json_in tool to process JSON data into normal request
|
||||||
|
# parameters.
|
||||||
|
def json_to_request_params(body):
|
||||||
|
cherrypy.lib.jsontools.json_processor(body)
|
||||||
|
if not isinstance(cherrypy.request.json, dict):
|
||||||
|
raise cherrypy.HTTPError(415)
|
||||||
|
cherrypy.request.params.update(cherrypy.request.json)
|
||||||
|
|
||||||
|
|
||||||
|
# Used as an "error_page.default" handler
|
||||||
|
def json_error_page(status, message, traceback, version,
|
||||||
|
force_traceback=False):
|
||||||
|
"""Return a custom error page in JSON so the client can parse it"""
|
||||||
|
errordata = {"status": status,
|
||||||
|
"message": message,
|
||||||
|
"version": version,
|
||||||
|
"traceback": traceback}
|
||||||
|
# Don't send a traceback if the error was 400-499 (client's fault)
|
||||||
|
code = int(status.split()[0])
|
||||||
|
if not force_traceback:
|
||||||
|
if 400 <= code <= 499:
|
||||||
|
errordata["traceback"] = ""
|
||||||
|
# Override the response type, which was previously set to text/html
|
||||||
|
cherrypy.serving.response.headers['Content-Type'] = (
|
||||||
|
"application/json;charset=utf-8")
|
||||||
|
# Undo the HTML escaping that cherrypy's get_error_page function applies
|
||||||
|
# (cherrypy issue 1135)
|
||||||
|
for k, v in errordata.items():
|
||||||
|
v = v.replace("<", "<")
|
||||||
|
v = v.replace(">", ">")
|
||||||
|
v = v.replace("&", "&")
|
||||||
|
errordata[k] = v
|
||||||
|
return json.dumps(errordata, separators=(',', ':'))
|
||||||
|
|
||||||
|
|
||||||
|
class CherryPyExit(SystemExit):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def cherrypy_patch_exit():
|
||||||
|
# Cherrypy stupidly calls os._exit(70) when it can't bind the port
|
||||||
|
# and exits. Instead of that, raise a CherryPyExit (derived from
|
||||||
|
# SystemExit). This exception may not make it back up to the caller
|
||||||
|
# due to internal thread use in the CherryPy engine, but there should
|
||||||
|
# be at least some indication that it happened.
|
||||||
|
bus = cherrypy.process.wspbus.bus
|
||||||
|
if "_patched_exit" in bus.__dict__:
|
||||||
|
return
|
||||||
|
bus._patched_exit = True
|
||||||
|
|
||||||
|
def patched_exit(orig):
|
||||||
|
real_exit = os._exit
|
||||||
|
|
||||||
|
def fake_exit(code):
|
||||||
|
raise CherryPyExit(code)
|
||||||
|
os._exit = fake_exit
|
||||||
|
try:
|
||||||
|
orig()
|
||||||
|
finally:
|
||||||
|
os._exit = real_exit
|
||||||
|
bus.exit = functools.partial(patched_exit, bus.exit)
|
||||||
|
|
||||||
|
|
||||||
|
# Start/stop CherryPy standalone server
|
||||||
|
def cherrypy_start(blocking=False, event=False):
|
||||||
|
"""Start the CherryPy server, handling errors and signals
|
||||||
|
somewhat gracefully."""
|
||||||
|
|
||||||
|
cherrypy_patch_exit()
|
||||||
|
|
||||||
|
# Start the server
|
||||||
|
cherrypy.engine.start()
|
||||||
|
|
||||||
|
# Signal that the engine has started successfully
|
||||||
|
if event is not None:
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
if blocking:
|
||||||
|
try:
|
||||||
|
cherrypy.engine.wait(cherrypy.engine.states.EXITING,
|
||||||
|
interval=0.1, channel='main')
|
||||||
|
except (KeyboardInterrupt, IOError):
|
||||||
|
cherrypy.engine.log('Keyboard Interrupt: shutting down')
|
||||||
|
cherrypy.engine.exit()
|
||||||
|
except SystemExit:
|
||||||
|
cherrypy.engine.log('SystemExit raised: shutting down')
|
||||||
|
cherrypy.engine.exit()
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
# Stop CherryPy server
|
||||||
|
def cherrypy_stop():
|
||||||
|
cherrypy.engine.exit()
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
"""NilmDB utilities"""
|
"""NilmDB utilities"""
|
||||||
|
|
||||||
|
|
||||||
from nilmdb.utils.timer import Timer
|
from nilmdb.utils.timer import Timer
|
||||||
from nilmdb.utils.iteratorizer import Iteratorizer
|
|
||||||
from nilmdb.utils.serializer import serializer_proxy
|
from nilmdb.utils.serializer import serializer_proxy
|
||||||
from nilmdb.utils.lrucache import lru_cache
|
from nilmdb.utils.lrucache import lru_cache
|
||||||
from nilmdb.utils.diskusage import du, human_size
|
from nilmdb.utils.diskusage import du, human_size
|
||||||
@@ -10,3 +10,7 @@ from nilmdb.utils import atomic
|
|||||||
import nilmdb.utils.threadsafety
|
import nilmdb.utils.threadsafety
|
||||||
import nilmdb.utils.fallocate
|
import nilmdb.utils.fallocate
|
||||||
import nilmdb.utils.time
|
import nilmdb.utils.time
|
||||||
|
import nilmdb.utils.iterator
|
||||||
|
import nilmdb.utils.interval
|
||||||
|
import nilmdb.utils.lock
|
||||||
|
import nilmdb.utils.sort
|
||||||
|
|||||||
@@ -2,12 +2,12 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
def replace_file(filename, content):
|
def replace_file(filename, content):
|
||||||
"""Attempt to atomically and durably replace the filename with the
|
"""Attempt to atomically and durably replace the filename with the
|
||||||
given contents. This is intended to be 'pretty good on most
|
given contents"""
|
||||||
OSes', but not necessarily bulletproof."""
|
|
||||||
|
|
||||||
newfilename = filename + ".new"
|
newfilename = filename + b".new"
|
||||||
|
|
||||||
# Write to new file, flush it
|
# Write to new file, flush it
|
||||||
with open(newfilename, "wb") as f:
|
with open(newfilename, "wb") as f:
|
||||||
@@ -16,11 +16,4 @@ def replace_file(filename, content):
|
|||||||
os.fsync(f.fileno())
|
os.fsync(f.fileno())
|
||||||
|
|
||||||
# Move new file over old one
|
# Move new file over old one
|
||||||
try:
|
os.replace(newfilename, filename)
|
||||||
os.rename(newfilename, filename)
|
|
||||||
except OSError: # pragma: no cover
|
|
||||||
# Some OSes might not support renaming over an existing file.
|
|
||||||
# This is definitely NOT atomic!
|
|
||||||
os.remove(filename)
|
|
||||||
os.rename(newfilename, filename)
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,710 +0,0 @@
|
|||||||
#!/usr/bin/python
|
|
||||||
#
|
|
||||||
# Copyright 2009 Google Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# Disable the invalid name warning as we are inheriting from a standard library
|
|
||||||
# object.
|
|
||||||
# pylint: disable-msg=C6409,W0212
|
|
||||||
|
|
||||||
"""A version of the datetime module which *cares* about timezones.
|
|
||||||
|
|
||||||
This module will never return a naive datetime object. This requires the module
|
|
||||||
know your local timezone, which it tries really hard to figure out.
|
|
||||||
|
|
||||||
You can override the detection by using the datetime.tzaware.defaulttz_set
|
|
||||||
method. It the module is unable to figure out the timezone itself this method
|
|
||||||
*must* be called before the normal module is imported. If done before importing
|
|
||||||
it can also speed up the time taken to import as the defaulttz will no longer
|
|
||||||
try and do the detection.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__author__ = "tansell@google.com (Tim Ansell)"
|
|
||||||
|
|
||||||
import calendar
|
|
||||||
import datetime
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
import warnings
|
|
||||||
import dateutil.parser
|
|
||||||
import dateutil.relativedelta
|
|
||||||
import dateutil.tz
|
|
||||||
import pytz
|
|
||||||
import pytz_abbr
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
# pylint: disable-msg=C6204
|
|
||||||
import functools
|
|
||||||
except ImportError, e:
|
|
||||||
|
|
||||||
class functools(object):
|
|
||||||
"""Fake replacement for a full functools."""
|
|
||||||
|
|
||||||
# pylint: disable-msg=W0613
|
|
||||||
@staticmethod
|
|
||||||
def wraps(f, *args, **kw):
|
|
||||||
return f
|
|
||||||
|
|
||||||
|
|
||||||
# Need to patch pytz.utc to have a _utcoffset so you can normalize/localize
|
|
||||||
# using it.
|
|
||||||
pytz.utc._utcoffset = datetime.timedelta()
|
|
||||||
|
|
||||||
|
|
||||||
timedelta = datetime.timedelta
|
|
||||||
|
|
||||||
|
|
||||||
def _tzinfome(tzinfo):
|
|
||||||
"""Gets a tzinfo object from a string.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
tzinfo: A string (or string like) object, or a datetime.tzinfo object.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An datetime.tzinfo object.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
UnknownTimeZoneError: If the timezone given can't be decoded.
|
|
||||||
"""
|
|
||||||
if not isinstance(tzinfo, datetime.tzinfo):
|
|
||||||
try:
|
|
||||||
tzinfo = pytz.timezone(tzinfo)
|
|
||||||
except AttributeError:
|
|
||||||
raise pytz.UnknownTimeZoneError("Unknown timezone! %s" % tzinfo)
|
|
||||||
return tzinfo
|
|
||||||
|
|
||||||
|
|
||||||
# Our "local" timezone
|
|
||||||
_localtz = None
|
|
||||||
|
|
||||||
|
|
||||||
def localtz():
|
|
||||||
"""Get the local timezone.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The localtime timezone as a tzinfo object.
|
|
||||||
"""
|
|
||||||
# pylint: disable-msg=W0603
|
|
||||||
global _localtz
|
|
||||||
if _localtz is None:
|
|
||||||
_localtz = detect_timezone()
|
|
||||||
return _localtz
|
|
||||||
|
|
||||||
|
|
||||||
def localtz_set(timezone):
|
|
||||||
"""Set the local timezone."""
|
|
||||||
# pylint: disable-msg=W0603
|
|
||||||
global _localtz
|
|
||||||
_localtz = _tzinfome(timezone)
|
|
||||||
|
|
||||||
|
|
||||||
def detect_timezone():
|
|
||||||
"""Try and detect the timezone that Python is currently running in.
|
|
||||||
|
|
||||||
We have a bunch of different methods for trying to figure this out (listed in
|
|
||||||
order they are attempted).
|
|
||||||
* Try TZ environment variable.
|
|
||||||
* Try and find /etc/timezone file (with timezone name).
|
|
||||||
* Try and find /etc/localtime file (with timezone data).
|
|
||||||
* Try and match a TZ to the current dst/offset/shortname.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The detected local timezone as a tzinfo object
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
pytz.UnknownTimeZoneError: If it was unable to detect a timezone.
|
|
||||||
"""
|
|
||||||
# First we try the TZ variable
|
|
||||||
tz = _detect_timezone_environ()
|
|
||||||
if tz is not None:
|
|
||||||
return tz
|
|
||||||
|
|
||||||
# Second we try /etc/timezone and use the value in that
|
|
||||||
tz = _detect_timezone_etc_timezone()
|
|
||||||
if tz is not None:
|
|
||||||
return tz
|
|
||||||
|
|
||||||
# Next we try and see if something matches the tzinfo in /etc/localtime
|
|
||||||
tz = _detect_timezone_etc_localtime()
|
|
||||||
if tz is not None:
|
|
||||||
return tz
|
|
||||||
|
|
||||||
# Next we try and use a similiar method to what PHP does.
|
|
||||||
# We first try to search on time.tzname, time.timezone, time.daylight to
|
|
||||||
# match a pytz zone.
|
|
||||||
warnings.warn("Had to fall back to worst detection method (the 'PHP' "
|
|
||||||
"method).")
|
|
||||||
|
|
||||||
tz = _detect_timezone_php()
|
|
||||||
if tz is not None:
|
|
||||||
return tz
|
|
||||||
|
|
||||||
raise pytz.UnknownTimeZoneError("Unable to detect your timezone!")
|
|
||||||
|
|
||||||
|
|
||||||
def _detect_timezone_environ():
|
|
||||||
if "TZ" in os.environ:
|
|
||||||
try:
|
|
||||||
return pytz.timezone(os.environ["TZ"])
|
|
||||||
except (IOError, pytz.UnknownTimeZoneError):
|
|
||||||
warnings.warn("You provided a TZ environment value (%r) we did not "
|
|
||||||
"understand!" % os.environ["TZ"])
|
|
||||||
|
|
||||||
|
|
||||||
def _detect_timezone_etc_timezone():
|
|
||||||
if os.path.exists("/etc/timezone"):
|
|
||||||
try:
|
|
||||||
tz = file("/etc/timezone").read().strip()
|
|
||||||
try:
|
|
||||||
return pytz.timezone(tz)
|
|
||||||
except (IOError, pytz.UnknownTimeZoneError), ei:
|
|
||||||
warnings.warn("Your /etc/timezone file references a timezone (%r) that"
|
|
||||||
" is not valid (%r)." % (tz, ei))
|
|
||||||
|
|
||||||
# Problem reading the /etc/timezone file
|
|
||||||
except IOError, eo:
|
|
||||||
warnings.warn("Could not access your /etc/timezone file: %s" % eo)
|
|
||||||
|
|
||||||
|
|
||||||
def _detect_timezone_etc_localtime():
|
|
||||||
matches = []
|
|
||||||
if os.path.exists("/etc/localtime"):
|
|
||||||
localtime = pytz.tzfile.build_tzinfo("/etc/localtime",
|
|
||||||
file("/etc/localtime"))
|
|
||||||
|
|
||||||
# See if we can find a "Human Name" for this..
|
|
||||||
for tzname in pytz.all_timezones:
|
|
||||||
tz = _tzinfome(tzname)
|
|
||||||
|
|
||||||
if dir(tz) != dir(localtime):
|
|
||||||
continue
|
|
||||||
|
|
||||||
for attrib in dir(tz):
|
|
||||||
# Ignore functions and specials
|
|
||||||
if callable(getattr(tz, attrib)) or attrib.startswith("__"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# This will always be different
|
|
||||||
if attrib == "zone" or attrib == "_tzinfos":
|
|
||||||
continue
|
|
||||||
|
|
||||||
if getattr(tz, attrib) != getattr(localtime, attrib):
|
|
||||||
break
|
|
||||||
|
|
||||||
# We get here iff break didn't happen, i.e. no meaningful attributes
|
|
||||||
# differ between tz and localtime
|
|
||||||
else:
|
|
||||||
matches.append(tzname)
|
|
||||||
|
|
||||||
if len(matches) == 1:
|
|
||||||
return _tzinfome(matches[0])
|
|
||||||
else:
|
|
||||||
# Warn the person about this!
|
|
||||||
warning = "Could not get a human name for your timezone: "
|
|
||||||
if len(matches) > 1:
|
|
||||||
warning += ("We detected multiple matches for your /etc/localtime. "
|
|
||||||
"(Matches where %s)" % matches)
|
|
||||||
return _tzinfome(matches[0])
|
|
||||||
else:
|
|
||||||
warning += "We detected no matches for your /etc/localtime."
|
|
||||||
warnings.warn(warning)
|
|
||||||
|
|
||||||
# Register /etc/localtime as the timezone loaded.
|
|
||||||
pytz._tzinfo_cache['/etc/localtime'] = localtime
|
|
||||||
return localtime
|
|
||||||
|
|
||||||
|
|
||||||
def _detect_timezone_php():
|
|
||||||
tomatch = (time.tzname[0], time.timezone, time.daylight)
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
|
|
||||||
matches = []
|
|
||||||
for tzname in pytz.all_timezones:
|
|
||||||
try:
|
|
||||||
tz = pytz.timezone(tzname)
|
|
||||||
except IOError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
indst = tz.localize(now).timetuple()[-1]
|
|
||||||
|
|
||||||
if tomatch == (tz._tzname, -tz._utcoffset.seconds, indst):
|
|
||||||
matches.append(tzname)
|
|
||||||
|
|
||||||
# pylint: disable-msg=W0704
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if len(matches) > 1:
|
|
||||||
warnings.warn("We detected multiple matches for the timezone, choosing "
|
|
||||||
"the first %s. (Matches where %s)" % (matches[0], matches))
|
|
||||||
return pytz.timezone(matches[0])
|
|
||||||
|
|
||||||
|
|
||||||
class datetime_tz(datetime.datetime):
|
|
||||||
"""An extension of the inbuilt datetime adding more functionality.
|
|
||||||
|
|
||||||
The extra functionality includes:
|
|
||||||
* Partial parsing support (IE 2006/02/30 matches %Y/%M/%D %H:%M)
|
|
||||||
* Full integration with pytz (just give it the string of the timezone!)
|
|
||||||
* Proper support for going to/from Unix timestamps (which are in UTC!).
|
|
||||||
"""
|
|
||||||
__slots__ = ["is_dst"]
|
|
||||||
|
|
||||||
def __new__(cls, *args, **kw):
|
|
||||||
args = list(args)
|
|
||||||
if not args:
|
|
||||||
raise TypeError("Not enough arguments given.")
|
|
||||||
|
|
||||||
# See if we are given a tzinfo object...
|
|
||||||
tzinfo = None
|
|
||||||
if isinstance(args[-1], (datetime.tzinfo, basestring)):
|
|
||||||
tzinfo = _tzinfome(args.pop(-1))
|
|
||||||
elif kw.get("tzinfo", None) is not None:
|
|
||||||
tzinfo = _tzinfome(kw.pop("tzinfo"))
|
|
||||||
|
|
||||||
# Create a datetime object if we don't have one
|
|
||||||
if isinstance(args[0], datetime.datetime):
|
|
||||||
# Convert the datetime instance to a datetime object.
|
|
||||||
newargs = (list(args[0].timetuple()[0:6]) +
|
|
||||||
[args[0].microsecond, args[0].tzinfo])
|
|
||||||
dt = datetime.datetime(*newargs)
|
|
||||||
|
|
||||||
if tzinfo is None and dt.tzinfo is None:
|
|
||||||
raise TypeError("Must specify a timezone!")
|
|
||||||
|
|
||||||
if tzinfo is not None and dt.tzinfo is not None:
|
|
||||||
raise TypeError("Can not give a timezone with timezone aware"
|
|
||||||
" datetime object! (Use localize.)")
|
|
||||||
else:
|
|
||||||
dt = datetime.datetime(*args, **kw)
|
|
||||||
|
|
||||||
if dt.tzinfo is not None:
|
|
||||||
# Re-normalize the dt object
|
|
||||||
dt = dt.tzinfo.normalize(dt)
|
|
||||||
|
|
||||||
else:
|
|
||||||
if tzinfo is None:
|
|
||||||
tzinfo = localtz()
|
|
||||||
|
|
||||||
try:
|
|
||||||
dt = tzinfo.localize(dt, is_dst=None)
|
|
||||||
except pytz.AmbiguousTimeError:
|
|
||||||
is_dst = None
|
|
||||||
if "is_dst" in kw:
|
|
||||||
is_dst = kw.pop("is_dst")
|
|
||||||
|
|
||||||
try:
|
|
||||||
dt = tzinfo.localize(dt, is_dst)
|
|
||||||
except IndexError:
|
|
||||||
raise pytz.AmbiguousTimeError("No such time exists!")
|
|
||||||
|
|
||||||
newargs = list(dt.timetuple()[0:6])+[dt.microsecond, dt.tzinfo]
|
|
||||||
obj = datetime.datetime.__new__(cls, *newargs)
|
|
||||||
obj.is_dst = obj.dst() != datetime.timedelta(0)
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def asdatetime(self, naive=True):
|
|
||||||
"""Return this datetime_tz as a datetime object.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
naive: Return *without* any tz info.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
This datetime_tz as a datetime object.
|
|
||||||
"""
|
|
||||||
args = list(self.timetuple()[0:6])+[self.microsecond]
|
|
||||||
if not naive:
|
|
||||||
args.append(self.tzinfo)
|
|
||||||
return datetime.datetime(*args)
|
|
||||||
|
|
||||||
def asdate(self):
|
|
||||||
"""Return this datetime_tz as a date object.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
This datetime_tz as a date object.
|
|
||||||
"""
|
|
||||||
return datetime.date(self.year, self.month, self.day)
|
|
||||||
|
|
||||||
def totimestamp(self):
|
|
||||||
"""Convert this datetime object back to a unix timestamp.
|
|
||||||
|
|
||||||
The Unix epoch is the time 00:00:00 UTC on January 1, 1970.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Unix timestamp.
|
|
||||||
"""
|
|
||||||
return calendar.timegm(self.utctimetuple())+1e-6*self.microsecond
|
|
||||||
|
|
||||||
def astimezone(self, tzinfo):
|
|
||||||
"""Returns a version of this timestamp converted to the given timezone.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
tzinfo: Either a datetime.tzinfo object or a string (which will be looked
|
|
||||||
up in pytz.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A datetime_tz object in the given timezone.
|
|
||||||
"""
|
|
||||||
# Assert we are not a naive datetime object
|
|
||||||
assert self.tzinfo is not None
|
|
||||||
|
|
||||||
tzinfo = _tzinfome(tzinfo)
|
|
||||||
|
|
||||||
d = self.asdatetime(naive=False).astimezone(tzinfo)
|
|
||||||
return datetime_tz(d)
|
|
||||||
|
|
||||||
# pylint: disable-msg=C6113
|
|
||||||
def replace(self, **kw):
|
|
||||||
"""Return datetime with new specified fields given as arguments.
|
|
||||||
|
|
||||||
For example, dt.replace(days=4) would return a new datetime_tz object with
|
|
||||||
exactly the same as dt but with the days attribute equal to 4.
|
|
||||||
|
|
||||||
Any attribute can be replaced, but tzinfo can not be set to None.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
Any datetime_tz attribute.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A datetime_tz object with the attributes replaced.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
TypeError: If the given replacement is invalid.
|
|
||||||
"""
|
|
||||||
if "tzinfo" in kw:
|
|
||||||
if kw["tzinfo"] is None:
|
|
||||||
raise TypeError("Can not remove the timezone use asdatetime()")
|
|
||||||
|
|
||||||
is_dst = None
|
|
||||||
if "is_dst" in kw:
|
|
||||||
is_dst = kw["is_dst"]
|
|
||||||
del kw["is_dst"]
|
|
||||||
else:
|
|
||||||
# Use our own DST setting..
|
|
||||||
is_dst = self.is_dst
|
|
||||||
|
|
||||||
replaced = self.asdatetime().replace(**kw)
|
|
||||||
|
|
||||||
return datetime_tz(replaced, tzinfo=self.tzinfo.zone, is_dst=is_dst)
|
|
||||||
|
|
||||||
# pylint: disable-msg=C6310
|
|
||||||
@classmethod
|
|
||||||
def smartparse(cls, toparse, tzinfo=None):
|
|
||||||
"""Method which uses dateutil.parse and extras to try and parse the string.
|
|
||||||
|
|
||||||
Valid dates are found at:
|
|
||||||
http://labix.org/python-dateutil#head-1443e0f14ad5dff07efd465e080d1110920673d8-2
|
|
||||||
|
|
||||||
Other valid formats include:
|
|
||||||
"now" or "today"
|
|
||||||
"yesterday"
|
|
||||||
"tommorrow"
|
|
||||||
"5 minutes ago"
|
|
||||||
"10 hours ago"
|
|
||||||
"10h5m ago"
|
|
||||||
"start of yesterday"
|
|
||||||
"end of tommorrow"
|
|
||||||
"end of 3rd of March"
|
|
||||||
|
|
||||||
Args:
|
|
||||||
toparse: The string to parse.
|
|
||||||
tzinfo: Timezone for the resultant datetime_tz object should be in.
|
|
||||||
(Defaults to your local timezone.)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
New datetime_tz object.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If unable to make sense of the input.
|
|
||||||
"""
|
|
||||||
# Default for empty fields are:
|
|
||||||
# year/month/day == now
|
|
||||||
# hour/minute/second/microsecond == 0
|
|
||||||
toparse = toparse.strip()
|
|
||||||
|
|
||||||
if tzinfo is None:
|
|
||||||
dt = cls.now()
|
|
||||||
else:
|
|
||||||
dt = cls.now(tzinfo)
|
|
||||||
|
|
||||||
default = dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
||||||
|
|
||||||
# Remove "start of " and "end of " prefix in the string
|
|
||||||
if toparse.lower().startswith("end of "):
|
|
||||||
toparse = toparse[7:].strip()
|
|
||||||
|
|
||||||
dt += datetime.timedelta(days=1)
|
|
||||||
dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
||||||
dt -= datetime.timedelta(microseconds=1)
|
|
||||||
|
|
||||||
default = dt
|
|
||||||
|
|
||||||
elif toparse.lower().startswith("start of "):
|
|
||||||
toparse = toparse[9:].strip()
|
|
||||||
|
|
||||||
dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
||||||
default = dt
|
|
||||||
|
|
||||||
# Handle strings with "now", "today", "yesterday", "tomorrow" and "ago".
|
|
||||||
# Need to use lowercase
|
|
||||||
toparselower = toparse.lower()
|
|
||||||
|
|
||||||
if toparselower in ["now", "today"]:
|
|
||||||
pass
|
|
||||||
|
|
||||||
elif toparselower == "yesterday":
|
|
||||||
dt -= datetime.timedelta(days=1)
|
|
||||||
|
|
||||||
elif toparselower == "tommorrow":
|
|
||||||
dt += datetime.timedelta(days=1)
|
|
||||||
|
|
||||||
elif "ago" in toparselower:
|
|
||||||
# Remove the "ago" bit
|
|
||||||
toparselower = toparselower[:-3]
|
|
||||||
# Replace all "a day and an hour" with "1 day 1 hour"
|
|
||||||
toparselower = toparselower.replace("a ", "1 ")
|
|
||||||
toparselower = toparselower.replace("an ", "1 ")
|
|
||||||
toparselower = toparselower.replace(" and ", " ")
|
|
||||||
|
|
||||||
# Match the following
|
|
||||||
# 1 hour ago
|
|
||||||
# 1h ago
|
|
||||||
# 1 h ago
|
|
||||||
# 1 hour ago
|
|
||||||
# 2 hours ago
|
|
||||||
# Same with minutes, seconds, etc.
|
|
||||||
|
|
||||||
tocheck = ("seconds", "minutes", "hours", "days", "weeks", "months",
|
|
||||||
"years")
|
|
||||||
result = {}
|
|
||||||
for match in re.finditer("([0-9]+)([^0-9]*)", toparselower):
|
|
||||||
amount = int(match.group(1))
|
|
||||||
unit = match.group(2).strip()
|
|
||||||
|
|
||||||
for bit in tocheck:
|
|
||||||
regex = "^([%s]|((%s)s?))$" % (
|
|
||||||
bit[0], bit[:-1])
|
|
||||||
|
|
||||||
bitmatch = re.search(regex, unit)
|
|
||||||
if bitmatch:
|
|
||||||
result[bit] = amount
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
raise ValueError("Was not able to parse date unit %r!" % unit)
|
|
||||||
|
|
||||||
delta = dateutil.relativedelta.relativedelta(**result)
|
|
||||||
dt -= delta
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Handle strings with normal datetime format, use original case.
|
|
||||||
dt = dateutil.parser.parse(toparse, default=default.asdatetime(),
|
|
||||||
tzinfos=pytz_abbr.tzinfos)
|
|
||||||
if dt is None:
|
|
||||||
raise ValueError("Was not able to parse date!")
|
|
||||||
|
|
||||||
if dt.tzinfo is pytz_abbr.unknown:
|
|
||||||
dt = dt.replace(tzinfo=None)
|
|
||||||
|
|
||||||
if dt.tzinfo is None:
|
|
||||||
if tzinfo is None:
|
|
||||||
tzinfo = localtz()
|
|
||||||
dt = cls(dt, tzinfo)
|
|
||||||
else:
|
|
||||||
if isinstance(dt.tzinfo, pytz_abbr.tzabbr):
|
|
||||||
abbr = dt.tzinfo
|
|
||||||
dt = dt.replace(tzinfo=None)
|
|
||||||
dt = cls(dt, abbr.zone, is_dst=abbr.dst)
|
|
||||||
|
|
||||||
dt = cls(dt)
|
|
||||||
|
|
||||||
return dt
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def utcfromtimestamp(cls, timestamp):
|
|
||||||
"""Returns a datetime object of a given timestamp (in UTC)."""
|
|
||||||
obj = datetime.datetime.utcfromtimestamp(timestamp)
|
|
||||||
obj = pytz.utc.localize(obj)
|
|
||||||
return cls(obj)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def fromtimestamp(cls, timestamp):
|
|
||||||
"""Returns a datetime object of a given timestamp (in local tz)."""
|
|
||||||
d = cls.utcfromtimestamp(timestamp)
|
|
||||||
return d.astimezone(localtz())
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def utcnow(cls):
|
|
||||||
"""Return a new datetime representing UTC day and time."""
|
|
||||||
obj = datetime.datetime.utcnow()
|
|
||||||
obj = cls(obj, tzinfo=pytz.utc)
|
|
||||||
return obj
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def now(cls, tzinfo=None):
|
|
||||||
"""[tz] -> new datetime with tz's local day and time."""
|
|
||||||
obj = cls.utcnow()
|
|
||||||
if tzinfo is None:
|
|
||||||
tzinfo = localtz()
|
|
||||||
return obj.astimezone(tzinfo)
|
|
||||||
|
|
||||||
today = now
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fromordinal(ordinal):
|
|
||||||
raise SyntaxError("Not enough information to create a datetime_tz object "
|
|
||||||
"from an ordinal. Please use datetime.date.fromordinal")
|
|
||||||
|
|
||||||
|
|
||||||
class iterate(object):
|
|
||||||
"""Helpful iterators for working with datetime_tz objects."""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def between(start, delta, end=None):
|
|
||||||
"""Return an iterator between this date till given end point.
|
|
||||||
|
|
||||||
Example usage:
|
|
||||||
>>> d = datetime_tz.smartparse("5 days ago")
|
|
||||||
2008/05/12 11:45
|
|
||||||
>>> for i in d.between(timedelta(days=1), datetime_tz.now()):
|
|
||||||
>>> print i
|
|
||||||
2008/05/12 11:45
|
|
||||||
2008/05/13 11:45
|
|
||||||
2008/05/14 11:45
|
|
||||||
2008/05/15 11:45
|
|
||||||
2008/05/16 11:45
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start: The date to start at.
|
|
||||||
delta: The interval to iterate with.
|
|
||||||
end: (Optional) Date to end at. If not given the iterator will never
|
|
||||||
terminate.
|
|
||||||
|
|
||||||
Yields:
|
|
||||||
datetime_tz objects.
|
|
||||||
"""
|
|
||||||
toyield = start
|
|
||||||
while end is None or toyield < end:
|
|
||||||
yield toyield
|
|
||||||
toyield += delta
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def weeks(start, end=None):
|
|
||||||
"""Iterate over the weeks between the given datetime_tzs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start: datetime_tz to start from.
|
|
||||||
end: (Optional) Date to end at, if not given the iterator will never
|
|
||||||
terminate.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An iterator which generates datetime_tz objects a week apart.
|
|
||||||
"""
|
|
||||||
return iterate.between(start, datetime.timedelta(days=7), end)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def days(start, end=None):
|
|
||||||
"""Iterate over the days between the given datetime_tzs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start: datetime_tz to start from.
|
|
||||||
end: (Optional) Date to end at, if not given the iterator will never
|
|
||||||
terminate.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An iterator which generates datetime_tz objects a day apart.
|
|
||||||
"""
|
|
||||||
return iterate.between(start, datetime.timedelta(days=1), end)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def hours(start, end=None):
|
|
||||||
"""Iterate over the hours between the given datetime_tzs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start: datetime_tz to start from.
|
|
||||||
end: (Optional) Date to end at, if not given the iterator will never
|
|
||||||
terminate.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An iterator which generates datetime_tz objects a hour apart.
|
|
||||||
"""
|
|
||||||
return iterate.between(start, datetime.timedelta(hours=1), end)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def minutes(start, end=None):
|
|
||||||
"""Iterate over the minutes between the given datetime_tzs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start: datetime_tz to start from.
|
|
||||||
end: (Optional) Date to end at, if not given the iterator will never
|
|
||||||
terminate.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An iterator which generates datetime_tz objects a minute apart.
|
|
||||||
"""
|
|
||||||
return iterate.between(start, datetime.timedelta(minutes=1), end)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def seconds(start, end=None):
|
|
||||||
"""Iterate over the seconds between the given datetime_tzs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start: datetime_tz to start from.
|
|
||||||
end: (Optional) Date to end at, if not given the iterator will never
|
|
||||||
terminate.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An iterator which generates datetime_tz objects a second apart.
|
|
||||||
"""
|
|
||||||
return iterate.between(start, datetime.timedelta(minutes=1), end)
|
|
||||||
|
|
||||||
|
|
||||||
def _wrap_method(name):
|
|
||||||
"""Wrap a method.
|
|
||||||
|
|
||||||
Patch a method which might return a datetime.datetime to return a
|
|
||||||
datetime_tz.datetime_tz instead.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name: The name of the method to patch
|
|
||||||
"""
|
|
||||||
method = getattr(datetime.datetime, name)
|
|
||||||
|
|
||||||
# Have to give the second argument as method has no __module__ option.
|
|
||||||
@functools.wraps(method, ("__name__", "__doc__"), ())
|
|
||||||
def wrapper(*args, **kw):
|
|
||||||
r = method(*args, **kw)
|
|
||||||
|
|
||||||
if isinstance(r, datetime.datetime) and not isinstance(r, datetime_tz):
|
|
||||||
r = datetime_tz(r)
|
|
||||||
return r
|
|
||||||
|
|
||||||
setattr(datetime_tz, name, wrapper)
|
|
||||||
|
|
||||||
for methodname in ["__add__", "__radd__", "__rsub__", "__sub__", "combine"]:
|
|
||||||
|
|
||||||
# Make sure we have not already got an override for this method
|
|
||||||
assert methodname not in datetime_tz.__dict__
|
|
||||||
|
|
||||||
_wrap_method(methodname)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['datetime_tz', 'detect_timezone', 'iterate', 'localtz',
|
|
||||||
'localtz_set', 'timedelta', '_detect_timezone_environ',
|
|
||||||
'_detect_timezone_etc_localtime', '_detect_timezone_etc_timezone',
|
|
||||||
'_detect_timezone_php']
|
|
||||||
@@ -1,230 +0,0 @@
|
|||||||
#!/usr/bin/python2.4
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
# Copyright 2010 Google Inc. All Rights Reserved.
|
|
||||||
#
|
|
||||||
|
|
||||||
"""
|
|
||||||
Common time zone acronyms/abbreviations for use with the datetime_tz module.
|
|
||||||
|
|
||||||
*WARNING*: There are lots of caveats when using this module which are listed
|
|
||||||
below.
|
|
||||||
|
|
||||||
CAVEAT 1: The acronyms/abbreviations are not globally unique, they are not even
|
|
||||||
unique within a region. For example, EST can mean any of,
|
|
||||||
Eastern Standard Time in Australia (which is 10 hour ahead of UTC)
|
|
||||||
Eastern Standard Time in North America (which is 5 hours behind UTC)
|
|
||||||
|
|
||||||
Where there are two abbreviations the more popular one will appear in the all
|
|
||||||
dictionary, while the less common one will only appear in that countries region
|
|
||||||
dictionary. IE If using all, EST will be mapped to Eastern Standard Time in
|
|
||||||
North America.
|
|
||||||
|
|
||||||
CAVEAT 2: Many of the acronyms don't map to a neat Oslon timezones. For example,
|
|
||||||
Eastern European Summer Time (EEDT) is used by many different countries in
|
|
||||||
Europe *at different times*! If the acronym does not map neatly to one zone it
|
|
||||||
is mapped to the Etc/GMT+-XX Oslon zone. This means that any date manipulations
|
|
||||||
can end up with idiot things like summer time in the middle of winter.
|
|
||||||
|
|
||||||
CAVEAT 3: The Summer/Standard time difference is really important! For an hour
|
|
||||||
each year it is needed to determine which time you are actually talking about.
|
|
||||||
2002-10-27 01:20:00 EST != 2002-10-27 01:20:00 EDT
|
|
||||||
"""
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import pytz
|
|
||||||
import pytz.tzfile
|
|
||||||
|
|
||||||
|
|
||||||
class tzabbr(datetime.tzinfo):
|
|
||||||
"""A timezone abbreviation.
|
|
||||||
|
|
||||||
*WARNING*: This is not a tzinfo implementation! Trying to use this as tzinfo
|
|
||||||
object will result in failure. We inherit from datetime.tzinfo so we can get
|
|
||||||
through the dateutil checks.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# A "marker" tzinfo object which is used to signify an unknown timezone.
|
|
||||||
unknown = datetime.tzinfo(0)
|
|
||||||
|
|
||||||
|
|
||||||
regions = {'all': {}, 'military': {}}
|
|
||||||
# Create a special alias for the all and military regions
|
|
||||||
all = regions['all']
|
|
||||||
military = regions['military']
|
|
||||||
|
|
||||||
|
|
||||||
def tzabbr_register(abbr, name, region, zone, dst):
|
|
||||||
"""Register a new timezone abbreviation in the global registry.
|
|
||||||
|
|
||||||
If another abbreviation with the same name has already been registered it new
|
|
||||||
abbreviation will only be registered in region specific dictionary.
|
|
||||||
"""
|
|
||||||
newabbr = tzabbr()
|
|
||||||
newabbr.abbr = abbr
|
|
||||||
newabbr.name = name
|
|
||||||
newabbr.region = region
|
|
||||||
newabbr.zone = zone
|
|
||||||
newabbr.dst = dst
|
|
||||||
|
|
||||||
if abbr not in all:
|
|
||||||
all[abbr] = newabbr
|
|
||||||
|
|
||||||
if not region in regions:
|
|
||||||
regions[region] = {}
|
|
||||||
|
|
||||||
assert abbr not in regions[region]
|
|
||||||
regions[region][abbr] = newabbr
|
|
||||||
|
|
||||||
|
|
||||||
def tzinfos_create(use_region):
|
|
||||||
abbrs = regions[use_region]
|
|
||||||
|
|
||||||
def tzinfos(abbr, offset):
|
|
||||||
if abbr:
|
|
||||||
if abbr in abbrs:
|
|
||||||
result = abbrs[abbr]
|
|
||||||
if offset:
|
|
||||||
# FIXME: Check the offset matches the abbreviation we just selected.
|
|
||||||
pass
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
raise ValueError, "Unknown timezone found %s" % abbr
|
|
||||||
if offset == 0:
|
|
||||||
return pytz.utc
|
|
||||||
if offset:
|
|
||||||
return pytz.FixedOffset(offset/60)
|
|
||||||
return unknown
|
|
||||||
|
|
||||||
return tzinfos
|
|
||||||
|
|
||||||
|
|
||||||
# Create a special alias for the all tzinfos
|
|
||||||
tzinfos = tzinfos_create('all')
|
|
||||||
|
|
||||||
|
|
||||||
# Create the abbreviations.
|
|
||||||
# *WARNING*: Order matters!
|
|
||||||
tzabbr_register("A", u"Alpha Time Zone", u"Military", "Etc/GMT-1", False)
|
|
||||||
tzabbr_register("ACDT", u"Australian Central Daylight Time", u"Australia",
|
|
||||||
"Australia/Adelaide", True)
|
|
||||||
tzabbr_register("ACST", u"Australian Central Standard Time", u"Australia",
|
|
||||||
"Australia/Adelaide", False)
|
|
||||||
tzabbr_register("ADT", u"Atlantic Daylight Time", u"North America",
|
|
||||||
"America/Halifax", True)
|
|
||||||
tzabbr_register("AEDT", u"Australian Eastern Daylight Time", u"Australia",
|
|
||||||
"Australia/Sydney", True)
|
|
||||||
tzabbr_register("AEST", u"Australian Eastern Standard Time", u"Australia",
|
|
||||||
"Australia/Sydney", False)
|
|
||||||
tzabbr_register("AKDT", u"Alaska Daylight Time", u"North America",
|
|
||||||
"US/Alaska", True)
|
|
||||||
tzabbr_register("AKST", u"Alaska Standard Time", u"North America",
|
|
||||||
"US/Alaska", False)
|
|
||||||
tzabbr_register("AST", u"Atlantic Standard Time", u"North America",
|
|
||||||
"America/Halifax", False)
|
|
||||||
tzabbr_register("AWDT", u"Australian Western Daylight Time", u"Australia",
|
|
||||||
"Australia/West", True)
|
|
||||||
tzabbr_register("AWST", u"Australian Western Standard Time", u"Australia",
|
|
||||||
"Australia/West", False)
|
|
||||||
tzabbr_register("B", u"Bravo Time Zone", u"Military", "Etc/GMT-2", False)
|
|
||||||
tzabbr_register("BST", u"British Summer Time", u"Europe", "Europe/London", True)
|
|
||||||
tzabbr_register("C", u"Charlie Time Zone", u"Military", "Etc/GMT-2", False)
|
|
||||||
tzabbr_register("CDT", u"Central Daylight Time", u"North America",
|
|
||||||
"US/Central", True)
|
|
||||||
tzabbr_register("CEDT", u"Central European Daylight Time", u"Europe",
|
|
||||||
"Etc/GMT+2", True)
|
|
||||||
tzabbr_register("CEST", u"Central European Summer Time", u"Europe",
|
|
||||||
"Etc/GMT+2", True)
|
|
||||||
tzabbr_register("CET", u"Central European Time", u"Europe", "Etc/GMT+1", False)
|
|
||||||
tzabbr_register("CST", u"Central Standard Time", u"North America",
|
|
||||||
"US/Central", False)
|
|
||||||
tzabbr_register("CXT", u"Christmas Island Time", u"Australia",
|
|
||||||
"Indian/Christmas", False)
|
|
||||||
tzabbr_register("D", u"Delta Time Zone", u"Military", "Etc/GMT-2", False)
|
|
||||||
tzabbr_register("E", u"Echo Time Zone", u"Military", "Etc/GMT-2", False)
|
|
||||||
tzabbr_register("EDT", u"Eastern Daylight Time", u"North America",
|
|
||||||
"US/Eastern", True)
|
|
||||||
tzabbr_register("EEDT", u"Eastern European Daylight Time", u"Europe",
|
|
||||||
"Etc/GMT+3", True)
|
|
||||||
tzabbr_register("EEST", u"Eastern European Summer Time", u"Europe",
|
|
||||||
"Etc/GMT+3", True)
|
|
||||||
tzabbr_register("EET", u"Eastern European Time", u"Europe", "Etc/GMT+2", False)
|
|
||||||
tzabbr_register("EST", u"Eastern Standard Time", u"North America",
|
|
||||||
"US/Eastern", False)
|
|
||||||
tzabbr_register("F", u"Foxtrot Time Zone", u"Military", "Etc/GMT-6", False)
|
|
||||||
tzabbr_register("G", u"Golf Time Zone", u"Military", "Etc/GMT-7", False)
|
|
||||||
tzabbr_register("GMT", u"Greenwich Mean Time", u"Europe", pytz.utc, False)
|
|
||||||
tzabbr_register("H", u"Hotel Time Zone", u"Military", "Etc/GMT-8", False)
|
|
||||||
#tzabbr_register("HAA", u"Heure Avancée de l'Atlantique", u"North America", u"UTC - 3 hours")
|
|
||||||
#tzabbr_register("HAC", u"Heure Avancée du Centre", u"North America", u"UTC - 5 hours")
|
|
||||||
tzabbr_register("HADT", u"Hawaii-Aleutian Daylight Time", u"North America",
|
|
||||||
"Pacific/Honolulu", True)
|
|
||||||
#tzabbr_register("HAE", u"Heure Avancée de l'Est", u"North America", u"UTC - 4 hours")
|
|
||||||
#tzabbr_register("HAP", u"Heure Avancée du Pacifique", u"North America", u"UTC - 7 hours")
|
|
||||||
#tzabbr_register("HAR", u"Heure Avancée des Rocheuses", u"North America", u"UTC - 6 hours")
|
|
||||||
tzabbr_register("HAST", u"Hawaii-Aleutian Standard Time", u"North America",
|
|
||||||
"Pacific/Honolulu", False)
|
|
||||||
#tzabbr_register("HAT", u"Heure Avancée de Terre-Neuve", u"North America", u"UTC - 2:30 hours")
|
|
||||||
#tzabbr_register("HAY", u"Heure Avancée du Yukon", u"North America", u"UTC - 8 hours")
|
|
||||||
tzabbr_register("HDT", u"Hawaii Daylight Time", u"North America",
|
|
||||||
"Pacific/Honolulu", True)
|
|
||||||
#tzabbr_register("HNA", u"Heure Normale de l'Atlantique", u"North America", u"UTC - 4 hours")
|
|
||||||
#tzabbr_register("HNC", u"Heure Normale du Centre", u"North America", u"UTC - 6 hours")
|
|
||||||
#tzabbr_register("HNE", u"Heure Normale de l'Est", u"North America", u"UTC - 5 hours")
|
|
||||||
#tzabbr_register("HNP", u"Heure Normale du Pacifique", u"North America", u"UTC - 8 hours")
|
|
||||||
#tzabbr_register("HNR", u"Heure Normale des Rocheuses", u"North America", u"UTC - 7 hours")
|
|
||||||
#tzabbr_register("HNT", u"Heure Normale de Terre-Neuve", u"North America", u"UTC - 3:30 hours")
|
|
||||||
#tzabbr_register("HNY", u"Heure Normale du Yukon", u"North America", u"UTC - 9 hours")
|
|
||||||
tzabbr_register("HST", u"Hawaii Standard Time", u"North America",
|
|
||||||
"Pacific/Honolulu", False)
|
|
||||||
tzabbr_register("I", u"India Time Zone", u"Military", "Etc/GMT-9", False)
|
|
||||||
tzabbr_register("IST", u"Irish Summer Time", u"Europe", "Europe/Dublin", True)
|
|
||||||
tzabbr_register("K", u"Kilo Time Zone", u"Military", "Etc/GMT-10", False)
|
|
||||||
tzabbr_register("L", u"Lima Time Zone", u"Military", "Etc/GMT-11", False)
|
|
||||||
tzabbr_register("M", u"Mike Time Zone", u"Military", "Etc/GMT-12", False)
|
|
||||||
tzabbr_register("MDT", u"Mountain Daylight Time", u"North America",
|
|
||||||
"US/Mountain", True)
|
|
||||||
#tzabbr_register("MESZ", u"Mitteleuroäische Sommerzeit", u"Europe", u"UTC + 2 hours")
|
|
||||||
#tzabbr_register("MEZ", u"Mitteleuropäische Zeit", u"Europe", u"UTC + 1 hour")
|
|
||||||
tzabbr_register("MSD", u"Moscow Daylight Time", u"Europe",
|
|
||||||
"Europe/Moscow", True)
|
|
||||||
tzabbr_register("MSK", u"Moscow Standard Time", u"Europe",
|
|
||||||
"Europe/Moscow", False)
|
|
||||||
tzabbr_register("MST", u"Mountain Standard Time", u"North America",
|
|
||||||
"US/Mountain", False)
|
|
||||||
tzabbr_register("N", u"November Time Zone", u"Military", "Etc/GMT+1", False)
|
|
||||||
tzabbr_register("NDT", u"Newfoundland Daylight Time", u"North America",
|
|
||||||
"America/St_Johns", True)
|
|
||||||
tzabbr_register("NFT", u"Norfolk (Island) Time", u"Australia",
|
|
||||||
"Pacific/Norfolk", False)
|
|
||||||
tzabbr_register("NST", u"Newfoundland Standard Time", u"North America",
|
|
||||||
"America/St_Johns", False)
|
|
||||||
tzabbr_register("O", u"Oscar Time Zone", u"Military", "Etc/GMT+2", False)
|
|
||||||
tzabbr_register("P", u"Papa Time Zone", u"Military", "Etc/GMT+3", False)
|
|
||||||
tzabbr_register("PDT", u"Pacific Daylight Time", u"North America",
|
|
||||||
"US/Pacific", True)
|
|
||||||
tzabbr_register("PST", u"Pacific Standard Time", u"North America",
|
|
||||||
"US/Pacific", False)
|
|
||||||
tzabbr_register("Q", u"Quebec Time Zone", u"Military", "Etc/GMT+4", False)
|
|
||||||
tzabbr_register("R", u"Romeo Time Zone", u"Military", "Etc/GMT+5", False)
|
|
||||||
tzabbr_register("S", u"Sierra Time Zone", u"Military", "Etc/GMT+6", False)
|
|
||||||
tzabbr_register("T", u"Tango Time Zone", u"Military", "Etc/GMT+7", False)
|
|
||||||
tzabbr_register("U", u"Uniform Time Zone", u"Military", "Etc/GMT+8", False)
|
|
||||||
tzabbr_register("UTC", u"Coordinated Universal Time", u"Europe",
|
|
||||||
pytz.utc, False)
|
|
||||||
tzabbr_register("V", u"Victor Time Zone", u"Military", "Etc/GMT+9", False)
|
|
||||||
tzabbr_register("W", u"Whiskey Time Zone", u"Military", "Etc/GMT+10", False)
|
|
||||||
tzabbr_register("WDT", u"Western Daylight Time", u"Australia",
|
|
||||||
"Australia/West", True)
|
|
||||||
tzabbr_register("WEDT", u"Western European Daylight Time", u"Europe",
|
|
||||||
"Etc/GMT+1", True)
|
|
||||||
tzabbr_register("WEST", u"Western European Summer Time", u"Europe",
|
|
||||||
"Etc/GMT+1", True)
|
|
||||||
tzabbr_register("WET", u"Western European Time", u"Europe", pytz.utc, False)
|
|
||||||
tzabbr_register("WST", u"Western Standard Time", u"Australia",
|
|
||||||
"Australia/West", False)
|
|
||||||
tzabbr_register("X", u"X-ray Time Zone", u"Military", "Etc/GMT+11", False)
|
|
||||||
tzabbr_register("Y", u"Yankee Time Zone", u"Military", "Etc/GMT+12", False)
|
|
||||||
tzabbr_register("Z", u"Zulu Time Zone", u"Military", pytz.utc, False)
|
|
||||||
@@ -1,25 +1,36 @@
|
|||||||
import os
|
import os
|
||||||
|
import errno
|
||||||
from math import log
|
from math import log
|
||||||
|
|
||||||
|
|
||||||
def human_size(num):
|
def human_size(num):
|
||||||
"""Human friendly file size"""
|
"""Human friendly file size"""
|
||||||
unit_list = zip(['bytes', 'kiB', 'MiB', 'GiB', 'TiB'], [0, 0, 1, 2, 2])
|
unit_list = list(zip(['bytes', 'kiB', 'MiB', 'GiB', 'TiB'],
|
||||||
if num > 1:
|
[0, 0, 1, 2, 2]))
|
||||||
exponent = min(int(log(num, 1024)), len(unit_list) - 1)
|
if num == 0:
|
||||||
quotient = float(num) / 1024**exponent
|
|
||||||
unit, num_decimals = unit_list[exponent]
|
|
||||||
format_string = '{:.%sf} {}' % (num_decimals)
|
|
||||||
return format_string.format(quotient, unit)
|
|
||||||
if num == 0: # pragma: no cover
|
|
||||||
return '0 bytes'
|
return '0 bytes'
|
||||||
if num == 1: # pragma: no cover
|
if num == 1:
|
||||||
return '1 byte'
|
return '1 byte'
|
||||||
|
exponent = min(int(log(num, 1024)), len(unit_list) - 1)
|
||||||
|
quotient = float(num) / 1024**exponent
|
||||||
|
unit, num_decimals = unit_list[exponent]
|
||||||
|
format_string = '{:.%sf} {}' % (num_decimals)
|
||||||
|
return format_string.format(quotient, unit)
|
||||||
|
|
||||||
|
|
||||||
def du(path):
|
def du(path):
|
||||||
"""Like du -sb, returns total size of path in bytes."""
|
"""Like du -sb, returns total size of path in bytes. Ignore
|
||||||
size = os.path.getsize(path)
|
errors that might occur if we encounter broken symlinks or
|
||||||
if os.path.isdir(path):
|
files in the process of being removed."""
|
||||||
for thisfile in os.listdir(path):
|
try:
|
||||||
filepath = os.path.join(path, thisfile)
|
st = os.stat(path)
|
||||||
size += du(filepath)
|
size = st.st_blocks * 512
|
||||||
return size
|
if os.path.isdir(path):
|
||||||
|
for thisfile in os.listdir(path):
|
||||||
|
filepath = os.path.join(path, thisfile)
|
||||||
|
size += du(filepath)
|
||||||
|
return size
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.ENOENT:
|
||||||
|
raise
|
||||||
|
return 0
|
||||||
|
|||||||
@@ -1,49 +1,20 @@
|
|||||||
# Implementation of hole punching via fallocate, if the OS
|
# Implementation of hole punching via fallocate, if the OS
|
||||||
# and filesystem support it.
|
# and filesystem support it.
|
||||||
|
|
||||||
try:
|
import fallocate
|
||||||
import os
|
|
||||||
import ctypes
|
|
||||||
import ctypes.util
|
|
||||||
|
|
||||||
def make_fallocate():
|
|
||||||
libc_name = ctypes.util.find_library('c')
|
|
||||||
libc = ctypes.CDLL(libc_name, use_errno=True)
|
|
||||||
|
|
||||||
_fallocate = libc.fallocate
|
def punch_hole(filename, offset, length, ignore_errors=True):
|
||||||
_fallocate.restype = ctypes.c_int
|
|
||||||
_fallocate.argtypes = [ ctypes.c_int, ctypes.c_int,
|
|
||||||
ctypes.c_int64, ctypes.c_int64 ]
|
|
||||||
|
|
||||||
del libc
|
|
||||||
del libc_name
|
|
||||||
|
|
||||||
def fallocate(fd, mode, offset, len_):
|
|
||||||
res = _fallocate(fd, mode, offset, len_)
|
|
||||||
if res != 0: # pragma: no cover
|
|
||||||
errno = ctypes.get_errno()
|
|
||||||
raise IOError(errno, os.strerror(errno))
|
|
||||||
return fallocate
|
|
||||||
|
|
||||||
fallocate = make_fallocate()
|
|
||||||
del make_fallocate
|
|
||||||
except Exception: # pragma: no cover
|
|
||||||
fallocate = None
|
|
||||||
|
|
||||||
FALLOC_FL_KEEP_SIZE = 0x01
|
|
||||||
FALLOC_FL_PUNCH_HOLE = 0x02
|
|
||||||
|
|
||||||
def punch_hole(filename, offset, length, ignore_errors = True):
|
|
||||||
"""Punch a hole in the file. This isn't well supported, so errors
|
"""Punch a hole in the file. This isn't well supported, so errors
|
||||||
are ignored by default."""
|
are ignored by default."""
|
||||||
try:
|
try:
|
||||||
if fallocate is None: # pragma: no cover
|
|
||||||
raise IOError("fallocate not available")
|
|
||||||
with open(filename, "r+") as f:
|
with open(filename, "r+") as f:
|
||||||
fallocate(f.fileno(),
|
fallocate.fallocate(
|
||||||
FALLOC_FL_KEEP_SIZE | FALLOC_FL_PUNCH_HOLE,
|
f.fileno(),
|
||||||
offset, length)
|
offset,
|
||||||
except IOError: # pragma: no cover
|
length,
|
||||||
|
fallocate.FALLOC_FL_KEEP_SIZE | fallocate.FALLOC_FL_PUNCH_HOLE)
|
||||||
|
except Exception:
|
||||||
if ignore_errors:
|
if ignore_errors:
|
||||||
return
|
return
|
||||||
raise
|
raise
|
||||||
|
|||||||
168
nilmdb/utils/interval.py
Normal file
168
nilmdb/utils/interval.py
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
"""Interval. Like nilmdb.server.interval, but re-implemented here
|
||||||
|
in plain Python so clients have easier access to it, and with a few
|
||||||
|
helper functions.
|
||||||
|
|
||||||
|
Intervals are half-open, ie. they include data points with timestamps
|
||||||
|
[start, end)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import nilmdb.utils.time
|
||||||
|
import nilmdb.utils.iterator
|
||||||
|
|
||||||
|
|
||||||
|
class IntervalError(Exception):
|
||||||
|
"""Error due to interval overlap, etc"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# Interval
|
||||||
|
class Interval:
|
||||||
|
"""Represents an interval of time."""
|
||||||
|
|
||||||
|
def __init__(self, start, end):
|
||||||
|
"""
|
||||||
|
'start' and 'end' are arbitrary numbers that represent time
|
||||||
|
"""
|
||||||
|
if start >= end:
|
||||||
|
# Explicitly disallow zero-width intervals, since they're half-open
|
||||||
|
raise IntervalError("start %s must precede end %s" % (start, end))
|
||||||
|
self.start = start
|
||||||
|
self.end = end
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
s = repr(self.start) + ", " + repr(self.end)
|
||||||
|
return self.__class__.__name__ + "(" + s + ")"
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return ("[" + nilmdb.utils.time.timestamp_to_string(self.start) +
|
||||||
|
" -> " + nilmdb.utils.time.timestamp_to_string(self.end) + ")")
|
||||||
|
|
||||||
|
def human_string(self):
|
||||||
|
return ("[ " + nilmdb.utils.time.timestamp_to_human(self.start) +
|
||||||
|
" -> " + nilmdb.utils.time.timestamp_to_human(self.end) + " ]")
|
||||||
|
|
||||||
|
# Compare two intervals. If non-equal, order by start then end
|
||||||
|
def __lt__(self, other):
|
||||||
|
return (self.start, self.end) < (other.start, other.end)
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
return (self.start, self.end) > (other.start, other.end)
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
return (self.start, self.end) <= (other.start, other.end)
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
return (self.start, self.end) >= (other.start, other.end)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return (self.start, self.end) == (other.start, other.end)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return (self.start, self.end) != (other.start, other.end)
|
||||||
|
|
||||||
|
def intersects(self, other):
|
||||||
|
"""Return True if two Interval objects intersect"""
|
||||||
|
if not isinstance(other, Interval):
|
||||||
|
raise TypeError("need an Interval")
|
||||||
|
if self.end <= other.start or self.start >= other.end:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def subset(self, start, end):
|
||||||
|
"""Return a new Interval that is a subset of this one"""
|
||||||
|
# A subclass that tracks additional data might override this.
|
||||||
|
if start < self.start or end > self.end:
|
||||||
|
raise IntervalError("not a subset")
|
||||||
|
return Interval(start, end)
|
||||||
|
|
||||||
|
|
||||||
|
def _interval_math_helper(a, b, op, subset=True):
|
||||||
|
"""Helper for set_difference, intersection functions,
|
||||||
|
to compute interval subsets based on a math operator on ranges
|
||||||
|
present in A and B. Subsets are computed from A, or new intervals
|
||||||
|
are generated if subset = False."""
|
||||||
|
# Iterate through all starts and ends in sorted order. Add a
|
||||||
|
# tag to the iterator so that we can figure out which one they
|
||||||
|
# were, after sorting.
|
||||||
|
def decorate(it, key_start, key_end):
|
||||||
|
for i in it:
|
||||||
|
yield i.start, key_start, i
|
||||||
|
yield i.end, key_end, i
|
||||||
|
a_iter = decorate(iter(a), 0, 2)
|
||||||
|
b_iter = decorate(iter(b), 1, 3)
|
||||||
|
|
||||||
|
# Now iterate over the timestamps of each start and end.
|
||||||
|
# At each point, evaluate which type of end it is, to determine
|
||||||
|
# how to build up the output intervals.
|
||||||
|
a_interval = None
|
||||||
|
in_a = False
|
||||||
|
in_b = False
|
||||||
|
out_start = None
|
||||||
|
for (ts, k, i) in nilmdb.utils.iterator.imerge(a_iter, b_iter):
|
||||||
|
if k == 0:
|
||||||
|
a_interval = i
|
||||||
|
in_a = True
|
||||||
|
elif k == 1:
|
||||||
|
in_b = True
|
||||||
|
elif k == 2:
|
||||||
|
in_a = False
|
||||||
|
else: # k == 3
|
||||||
|
in_b = False
|
||||||
|
include = op(in_a, in_b)
|
||||||
|
if include and out_start is None:
|
||||||
|
out_start = ts
|
||||||
|
elif not include:
|
||||||
|
if out_start is not None and out_start != ts:
|
||||||
|
if subset:
|
||||||
|
yield a_interval.subset(out_start, ts)
|
||||||
|
else:
|
||||||
|
yield Interval(out_start, ts)
|
||||||
|
out_start = None
|
||||||
|
|
||||||
|
|
||||||
|
def set_difference(a, b):
|
||||||
|
"""
|
||||||
|
Compute the difference (a \\ b) between the intervals in 'a' and
|
||||||
|
the intervals in 'b'; i.e., the ranges that are present in 'self'
|
||||||
|
but not 'other'.
|
||||||
|
|
||||||
|
'a' and 'b' must both be iterables.
|
||||||
|
|
||||||
|
Returns a generator that yields each interval in turn.
|
||||||
|
Output intervals are built as subsets of the intervals in the
|
||||||
|
first argument (a).
|
||||||
|
"""
|
||||||
|
return _interval_math_helper(a, b, (lambda a, b: a and not b))
|
||||||
|
|
||||||
|
|
||||||
|
def intersection(a, b):
|
||||||
|
"""
|
||||||
|
Compute the intersection between the intervals in 'a' and the
|
||||||
|
intervals in 'b'; i.e., the ranges that are present in both 'a'
|
||||||
|
and 'b'.
|
||||||
|
|
||||||
|
'a' and 'b' must both be iterables.
|
||||||
|
|
||||||
|
Returns a generator that yields each interval in turn.
|
||||||
|
Output intervals are built as subsets of the intervals in the
|
||||||
|
first argument (a).
|
||||||
|
"""
|
||||||
|
return _interval_math_helper(a, b, (lambda a, b: a and b))
|
||||||
|
|
||||||
|
|
||||||
|
def optimize(it):
|
||||||
|
"""
|
||||||
|
Given an iterable 'it' with intervals, optimize them by joining
|
||||||
|
together intervals that are adjacent in time, and return a generator
|
||||||
|
that yields the new intervals.
|
||||||
|
"""
|
||||||
|
saved_int = None
|
||||||
|
for interval in it:
|
||||||
|
if saved_int is not None:
|
||||||
|
if saved_int.end == interval.start:
|
||||||
|
interval.start = saved_int.start
|
||||||
|
else:
|
||||||
|
yield saved_int
|
||||||
|
saved_int = interval
|
||||||
|
if saved_int is not None:
|
||||||
|
yield saved_int
|
||||||
38
nilmdb/utils/iterator.py
Normal file
38
nilmdb/utils/iterator.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# Misc iterator tools
|
||||||
|
|
||||||
|
# Iterator merging, based on http://code.activestate.com/recipes/491285/
|
||||||
|
import heapq
|
||||||
|
|
||||||
|
|
||||||
|
def imerge(*iterables):
|
||||||
|
'''Merge multiple sorted inputs into a single sorted output.
|
||||||
|
|
||||||
|
Equivalent to: sorted(itertools.chain(*iterables))
|
||||||
|
|
||||||
|
>>> list(imerge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25]))
|
||||||
|
[0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25]
|
||||||
|
|
||||||
|
'''
|
||||||
|
heappop, siftup, _Stop = heapq.heappop, heapq._siftup, StopIteration
|
||||||
|
|
||||||
|
h = []
|
||||||
|
h_append = h.append
|
||||||
|
for it in map(iter, iterables):
|
||||||
|
try:
|
||||||
|
nexter = it.__next__
|
||||||
|
h_append([nexter(), nexter])
|
||||||
|
except _Stop:
|
||||||
|
pass
|
||||||
|
heapq.heapify(h)
|
||||||
|
|
||||||
|
while 1:
|
||||||
|
try:
|
||||||
|
while 1:
|
||||||
|
v, nexter = s = h[0] # raises IndexError when h is empty
|
||||||
|
yield v
|
||||||
|
s[0] = nexter() # raises StopIteration when exhausted
|
||||||
|
siftup(h, 0) # restore heap condition
|
||||||
|
except _Stop:
|
||||||
|
heappop(h) # remove empty iterator
|
||||||
|
except IndexError:
|
||||||
|
return
|
||||||
@@ -1,100 +0,0 @@
|
|||||||
import Queue
|
|
||||||
import threading
|
|
||||||
import sys
|
|
||||||
import contextlib
|
|
||||||
|
|
||||||
# This file provides a context manager that converts a function
|
|
||||||
# that takes a callback into a generator that returns an iterable.
|
|
||||||
# This is done by running the function in a new thread.
|
|
||||||
|
|
||||||
# Based partially on http://stackoverflow.com/questions/9968592/
|
|
||||||
|
|
||||||
class IteratorizerThread(threading.Thread):
|
|
||||||
def __init__(self, queue, function, curl_hack):
|
|
||||||
"""
|
|
||||||
function: function to execute, which takes the
|
|
||||||
callback (provided by this class) as an argument
|
|
||||||
"""
|
|
||||||
threading.Thread.__init__(self)
|
|
||||||
self.name = "Iteratorizer-" + function.__name__ + "-" + self.name
|
|
||||||
self.function = function
|
|
||||||
self.queue = queue
|
|
||||||
self.die = False
|
|
||||||
self.curl_hack = curl_hack
|
|
||||||
|
|
||||||
def callback(self, data):
|
|
||||||
try:
|
|
||||||
if self.die:
|
|
||||||
raise Exception() # trigger termination
|
|
||||||
self.queue.put((1, data))
|
|
||||||
except:
|
|
||||||
if self.curl_hack:
|
|
||||||
# We can't raise exceptions, because the pycurl
|
|
||||||
# extension module will unconditionally print the
|
|
||||||
# exception itself, and not pass it up to the caller.
|
|
||||||
# Instead, just return a value that tells curl to
|
|
||||||
# abort. (-1 would be best, in case we were given 0
|
|
||||||
# bytes, but the extension doesn't support that).
|
|
||||||
self.queue.put((2, sys.exc_info()))
|
|
||||||
return 0
|
|
||||||
raise
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
try:
|
|
||||||
result = self.function(self.callback)
|
|
||||||
except:
|
|
||||||
self.queue.put((2, sys.exc_info()))
|
|
||||||
else:
|
|
||||||
self.queue.put((0, result))
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def Iteratorizer(function, curl_hack = False):
|
|
||||||
"""
|
|
||||||
Context manager that takes a function expecting a callback,
|
|
||||||
and provides an iterable that yields the values passed to that
|
|
||||||
callback instead.
|
|
||||||
|
|
||||||
function: function to execute, which takes a callback
|
|
||||||
(provided by this context manager) as an argument
|
|
||||||
|
|
||||||
with iteratorizer(func) as it:
|
|
||||||
for i in it:
|
|
||||||
print 'callback was passed:', i
|
|
||||||
print 'function returned:', it.retval
|
|
||||||
"""
|
|
||||||
queue = Queue.Queue(maxsize = 1)
|
|
||||||
thread = IteratorizerThread(queue, function, curl_hack)
|
|
||||||
thread.daemon = True
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
class iteratorizer_gen(object):
|
|
||||||
def __init__(self, queue):
|
|
||||||
self.queue = queue
|
|
||||||
self.retval = None
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def next(self):
|
|
||||||
(typ, data) = self.queue.get()
|
|
||||||
if typ == 0:
|
|
||||||
# function has returned
|
|
||||||
self.retval = data
|
|
||||||
raise StopIteration
|
|
||||||
elif typ == 1:
|
|
||||||
# data is available
|
|
||||||
return data
|
|
||||||
else:
|
|
||||||
# callback raised an exception
|
|
||||||
raise data[0], data[1], data[2]
|
|
||||||
|
|
||||||
try:
|
|
||||||
yield iteratorizer_gen(queue)
|
|
||||||
finally:
|
|
||||||
# Ask the thread to die, if it's still running.
|
|
||||||
thread.die = True
|
|
||||||
while thread.isAlive():
|
|
||||||
try:
|
|
||||||
queue.get(True, 0.01)
|
|
||||||
except: # pragma: no cover
|
|
||||||
pass
|
|
||||||
22
nilmdb/utils/lock.py
Normal file
22
nilmdb/utils/lock.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# File locking
|
||||||
|
|
||||||
|
import fcntl
|
||||||
|
import errno
|
||||||
|
|
||||||
|
|
||||||
|
def exclusive_lock(f):
|
||||||
|
"""Acquire an exclusive lock. Returns True on successful
|
||||||
|
lock, or False on error."""
|
||||||
|
try:
|
||||||
|
fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||||
|
except IOError as e:
|
||||||
|
if e.errno in (errno.EACCES, errno.EAGAIN):
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def exclusive_unlock(f):
|
||||||
|
"""Release an exclusive lock."""
|
||||||
|
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
||||||
@@ -6,10 +6,11 @@
|
|||||||
import collections
|
import collections
|
||||||
import decorator
|
import decorator
|
||||||
|
|
||||||
def lru_cache(size = 10, onremove = None, keys = slice(None)):
|
|
||||||
|
def lru_cache(size=10, onremove=None, keys=slice(None)):
|
||||||
"""Least-recently-used cache decorator.
|
"""Least-recently-used cache decorator.
|
||||||
|
|
||||||
@lru_cache(size = 10, onevict = None)
|
@lru_cache(size=10, onremove=None)
|
||||||
def f(...):
|
def f(...):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -26,7 +27,7 @@ def lru_cache(size = 10, onremove = None, keys = slice(None)):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def decorate(func):
|
def decorate(func):
|
||||||
cache = collections.OrderedDict() # order: least- to most-recent
|
cache = collections.OrderedDict() # order: least- to most-recent
|
||||||
|
|
||||||
def evict(value):
|
def evict(value):
|
||||||
if onremove:
|
if onremove:
|
||||||
@@ -43,8 +44,8 @@ def lru_cache(size = 10, onremove = None, keys = slice(None)):
|
|||||||
value = orig(*args)
|
value = orig(*args)
|
||||||
orig.cache_misses += 1
|
orig.cache_misses += 1
|
||||||
if len(cache) >= size:
|
if len(cache) >= size:
|
||||||
evict(cache.popitem(0)[1]) # evict LRU cache entry
|
evict(cache.popitem(0)[1]) # evict LRU cache entry
|
||||||
cache[key] = value # (re-)insert this key at end
|
cache[key] = value # (re-)insert this key at end
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def cache_remove(*args):
|
def cache_remove(*args):
|
||||||
@@ -53,14 +54,17 @@ def lru_cache(size = 10, onremove = None, keys = slice(None)):
|
|||||||
if key in cache:
|
if key in cache:
|
||||||
evict(cache.pop(key))
|
evict(cache.pop(key))
|
||||||
else:
|
else:
|
||||||
if len(cache) > 0 and len(args) != len(cache.iterkeys().next()):
|
if cache:
|
||||||
raise KeyError("trying to remove from LRU cache, but "
|
if len(args) != len(next(iter(cache.keys()))):
|
||||||
"number of arguments doesn't match the "
|
raise KeyError("trying to remove from LRU cache, but "
|
||||||
"cache key length")
|
"number of arguments doesn't match the "
|
||||||
|
"cache key length")
|
||||||
|
|
||||||
def cache_remove_all():
|
def cache_remove_all():
|
||||||
|
nonlocal cache
|
||||||
for key in cache:
|
for key in cache:
|
||||||
evict(cache.pop(key))
|
evict(cache[key])
|
||||||
|
cache = collections.OrderedDict()
|
||||||
|
|
||||||
def cache_info():
|
def cache_info():
|
||||||
return (func.cache_hits, func.cache_misses)
|
return (func.cache_hits, func.cache_misses)
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
from nilmdb.utils.printf import *
|
|
||||||
import sys
|
import sys
|
||||||
import inspect
|
import inspect
|
||||||
import decorator
|
import decorator
|
||||||
|
from nilmdb.utils.printf import fprintf
|
||||||
|
|
||||||
def must_close(errorfile = sys.stderr, wrap_verify = False):
|
|
||||||
|
def must_close(errorfile=sys.stderr, wrap_verify=False):
|
||||||
"""Class decorator that warns on 'errorfile' at deletion time if
|
"""Class decorator that warns on 'errorfile' at deletion time if
|
||||||
the class's close() member wasn't called.
|
the class's close() member wasn't called.
|
||||||
|
|
||||||
@@ -12,12 +13,17 @@ def must_close(errorfile = sys.stderr, wrap_verify = False):
|
|||||||
already been called."""
|
already been called."""
|
||||||
def class_decorator(cls):
|
def class_decorator(cls):
|
||||||
|
|
||||||
|
def is_method_or_function(x):
|
||||||
|
return inspect.ismethod(x) or inspect.isfunction(x)
|
||||||
|
|
||||||
def wrap_class_method(wrapper):
|
def wrap_class_method(wrapper):
|
||||||
try:
|
try:
|
||||||
orig = getattr(cls, wrapper.__name__).im_func
|
orig = getattr(cls, wrapper.__name__)
|
||||||
except:
|
except AttributeError:
|
||||||
orig = lambda x: None
|
orig = lambda x: None
|
||||||
setattr(cls, wrapper.__name__, decorator.decorator(wrapper, orig))
|
if is_method_or_function(orig):
|
||||||
|
setattr(cls, wrapper.__name__,
|
||||||
|
decorator.decorator(wrapper, orig))
|
||||||
|
|
||||||
@wrap_class_method
|
@wrap_class_method
|
||||||
def __init__(orig, self, *args, **kwargs):
|
def __init__(orig, self, *args, **kwargs):
|
||||||
@@ -28,10 +34,13 @@ def must_close(errorfile = sys.stderr, wrap_verify = False):
|
|||||||
|
|
||||||
@wrap_class_method
|
@wrap_class_method
|
||||||
def __del__(orig, self, *args, **kwargs):
|
def __del__(orig, self, *args, **kwargs):
|
||||||
if "_must_close" in self.__dict__:
|
try:
|
||||||
fprintf(errorfile, "error: %s.close() wasn't called!\n",
|
if "_must_close" in self.__dict__:
|
||||||
self.__class__.__name__)
|
fprintf(errorfile, "error: %s.close() wasn't called!\n",
|
||||||
return orig(self, *args, **kwargs)
|
self.__class__.__name__)
|
||||||
|
return orig(self, *args, **kwargs)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
@wrap_class_method
|
@wrap_class_method
|
||||||
def close(orig, self, *args, **kwargs):
|
def close(orig, self, *args, **kwargs):
|
||||||
@@ -42,20 +51,21 @@ def must_close(errorfile = sys.stderr, wrap_verify = False):
|
|||||||
# Optionally wrap all other functions
|
# Optionally wrap all other functions
|
||||||
def verifier(orig, self, *args, **kwargs):
|
def verifier(orig, self, *args, **kwargs):
|
||||||
if ("_must_close" not in self.__dict__ and
|
if ("_must_close" not in self.__dict__ and
|
||||||
"_must_close_initialized" in self.__dict__):
|
"_must_close_initialized" in self.__dict__):
|
||||||
raise AssertionError("called " + str(orig) + " after close")
|
raise AssertionError("called " + str(orig) + " after close")
|
||||||
return orig(self, *args, **kwargs)
|
return orig(self, *args, **kwargs)
|
||||||
if wrap_verify:
|
if wrap_verify:
|
||||||
for (name, method) in inspect.getmembers(cls, inspect.ismethod):
|
for (name, method) in inspect.getmembers(cls,
|
||||||
# Skip class methods
|
is_method_or_function):
|
||||||
if method.__self__ is not None:
|
|
||||||
continue
|
|
||||||
# Skip some methods
|
# Skip some methods
|
||||||
if name in [ "__del__", "__init__" ]:
|
if name in ["__del__", "__init__"]:
|
||||||
continue
|
continue
|
||||||
# Set up wrapper
|
# Set up wrapper
|
||||||
setattr(cls, name, decorator.decorator(verifier,
|
if inspect.ismethod(method):
|
||||||
method.im_func))
|
func = method.__func__
|
||||||
|
else:
|
||||||
|
func = method
|
||||||
|
setattr(cls, name, decorator.decorator(verifier, func))
|
||||||
|
|
||||||
return cls
|
return cls
|
||||||
return class_decorator
|
return class_decorator
|
||||||
|
|||||||
@@ -1,9 +1,13 @@
|
|||||||
"""printf, fprintf, sprintf"""
|
"""printf, fprintf, sprintf"""
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
def printf(_str, *args):
|
def printf(_str, *args):
|
||||||
print(_str % args, end='')
|
print(_str % args, end='')
|
||||||
|
|
||||||
|
|
||||||
def fprintf(_file, _str, *args):
|
def fprintf(_file, _str, *args):
|
||||||
print(_str % args, end='', file=_file)
|
print(_str % args, end='', file=_file)
|
||||||
|
|
||||||
|
|
||||||
def sprintf(_str, *args):
|
def sprintf(_str, *args):
|
||||||
return (_str % args)
|
return (_str % args)
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
import Queue
|
import queue
|
||||||
import threading
|
import threading
|
||||||
import sys
|
import sys
|
||||||
import decorator
|
|
||||||
import inspect
|
|
||||||
import types
|
|
||||||
import functools
|
|
||||||
|
|
||||||
# This file provides a class that will wrap an object and serialize
|
# This file provides a class that will wrap an object and serialize
|
||||||
# all calls to its methods. All calls to that object will be queued
|
# all calls to its methods. All calls to that object will be queued
|
||||||
@@ -13,6 +9,7 @@ import functools
|
|||||||
|
|
||||||
# Based partially on http://stackoverflow.com/questions/2642515/
|
# Based partially on http://stackoverflow.com/questions/2642515/
|
||||||
|
|
||||||
|
|
||||||
class SerializerThread(threading.Thread):
|
class SerializerThread(threading.Thread):
|
||||||
"""Thread that retrieves call information from the queue, makes the
|
"""Thread that retrieves call information from the queue, makes the
|
||||||
call, and returns the results."""
|
call, and returns the results."""
|
||||||
@@ -30,7 +27,7 @@ class SerializerThread(threading.Thread):
|
|||||||
exception = None
|
exception = None
|
||||||
result = None
|
result = None
|
||||||
try:
|
try:
|
||||||
result = func(*args, **kwargs) # wrapped
|
result = func(*args, **kwargs) # wrapped
|
||||||
except:
|
except:
|
||||||
exception = sys.exc_info()
|
exception = sys.exc_info()
|
||||||
# Ensure we delete these before returning a result, so
|
# Ensure we delete these before returning a result, so
|
||||||
@@ -40,6 +37,7 @@ class SerializerThread(threading.Thread):
|
|||||||
result_queue.put((exception, result))
|
result_queue.put((exception, result))
|
||||||
del exception, result
|
del exception, result
|
||||||
|
|
||||||
|
|
||||||
def serializer_proxy(obj_or_type):
|
def serializer_proxy(obj_or_type):
|
||||||
"""Wrap the given object or type in a SerializerObjectProxy.
|
"""Wrap the given object or type in a SerializerObjectProxy.
|
||||||
|
|
||||||
@@ -49,61 +47,88 @@ def serializer_proxy(obj_or_type):
|
|||||||
The proxied requests, including instantiation, are performed in a
|
The proxied requests, including instantiation, are performed in a
|
||||||
single thread and serialized between caller threads.
|
single thread and serialized between caller threads.
|
||||||
"""
|
"""
|
||||||
class SerializerCallProxy(object):
|
class SerializerCallProxy():
|
||||||
def __init__(self, call_queue, func, objectproxy):
|
def __init__(self, call_queue, func, objectproxy):
|
||||||
self.call_queue = call_queue
|
self.call_queue = call_queue
|
||||||
self.func = func
|
self.func = func
|
||||||
# Need to hold a reference to object proxy so it doesn't
|
# Need to hold a reference to object proxy so it doesn't
|
||||||
# go away (and kill the thread) until after get called.
|
# go away (and kill the thread) until after get called.
|
||||||
self.objectproxy = objectproxy
|
self.objectproxy = objectproxy
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
result_queue = Queue.Queue()
|
result_queue = queue.Queue()
|
||||||
self.call_queue.put((result_queue, self.func, args, kwargs))
|
self.call_queue.put((result_queue, self.func, args, kwargs))
|
||||||
( exc_info, result ) = result_queue.get()
|
(exc_info, result) = result_queue.get()
|
||||||
if exc_info is None:
|
if exc_info is None:
|
||||||
return result
|
return result
|
||||||
else:
|
else:
|
||||||
raise exc_info[0], exc_info[1], exc_info[2]
|
raise exc_info[1].with_traceback(exc_info[2])
|
||||||
|
|
||||||
class SerializerObjectProxy(object):
|
class SerializerObjectProxy():
|
||||||
def __init__(self, obj_or_type, *args, **kwargs):
|
def __init__(self, obj_or_type, *args, **kwargs):
|
||||||
self.__object = obj_or_type
|
self.__object = obj_or_type
|
||||||
try:
|
if isinstance(obj_or_type, type):
|
||||||
if type(obj_or_type) in (types.TypeType, types.ClassType):
|
classname = obj_or_type.__name__
|
||||||
classname = obj_or_type.__name__
|
else:
|
||||||
else:
|
classname = obj_or_type.__class__.__name__
|
||||||
classname = obj_or_type.__class__.__name__
|
self.__call_queue = queue.Queue()
|
||||||
except AttributeError: # pragma: no cover
|
|
||||||
classname = "???"
|
|
||||||
self.__call_queue = Queue.Queue()
|
|
||||||
self.__thread = SerializerThread(classname, self.__call_queue)
|
self.__thread = SerializerThread(classname, self.__call_queue)
|
||||||
self.__thread.daemon = True
|
self.__thread.daemon = True
|
||||||
self.__thread.start()
|
self.__thread.start()
|
||||||
self._thread_safe = True
|
self._thread_safe = True
|
||||||
|
|
||||||
def __getattr__(self, key):
|
def __getattr__(self, key):
|
||||||
if key.startswith("_SerializerObjectProxy__"): # pragma: no cover
|
# If the attribute is a function, we want to return a
|
||||||
raise AttributeError
|
# proxy that will perform the call through the serializer
|
||||||
|
# when called. Otherwise, we want to return the value
|
||||||
|
# directly. This means we need to grab the attribute once,
|
||||||
|
# and therefore self.__object.__getattr__ may be called
|
||||||
|
# in an unsafe way, from the caller's thread.
|
||||||
attr = getattr(self.__object, key)
|
attr = getattr(self.__object, key)
|
||||||
if not callable(attr):
|
if not callable(attr):
|
||||||
|
# It's not callable, so perform the getattr from within
|
||||||
|
# the serializer thread, then return its value.
|
||||||
|
# That may differ from the "attr" value we just grabbed
|
||||||
|
# from here, due to forced ordering in the serializer.
|
||||||
getter = SerializerCallProxy(self.__call_queue, getattr, self)
|
getter = SerializerCallProxy(self.__call_queue, getattr, self)
|
||||||
return getter(self.__object, key)
|
return getter(self.__object, key)
|
||||||
r = SerializerCallProxy(self.__call_queue, attr, self)
|
else:
|
||||||
return r
|
# It is callable, so return an object that will proxy through
|
||||||
|
# the serializer when called.
|
||||||
|
r = SerializerCallProxy(self.__call_queue, attr, self)
|
||||||
|
return r
|
||||||
|
|
||||||
|
# For an interable object, on __iter__(), save the object's
|
||||||
|
# iterator and return this proxy. On next(), call the object's
|
||||||
|
# iterator through this proxy.
|
||||||
|
def __iter__(self):
|
||||||
|
attr = getattr(self.__object, "__iter__")
|
||||||
|
self.__iter = SerializerCallProxy(self.__call_queue, attr, self)()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __next__(self):
|
||||||
|
return SerializerCallProxy(self.__call_queue,
|
||||||
|
self.__iter.__next__, self)()
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
return self.__getattr__("__getitem__")(key)
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
"""Call this to instantiate the type, if a type was passed
|
"""Call this to instantiate the type, if a type was passed
|
||||||
to serializer_proxy. Otherwise, pass the call through."""
|
to serializer_proxy. Otherwise, pass the call through."""
|
||||||
ret = SerializerCallProxy(self.__call_queue,
|
ret = SerializerCallProxy(self.__call_queue,
|
||||||
self.__object, self)(*args, **kwargs)
|
self.__object, self)(*args, **kwargs)
|
||||||
if type(self.__object) in (types.TypeType, types.ClassType):
|
if isinstance(self.__object, type):
|
||||||
# Instantiation
|
# Instantiation
|
||||||
self.__object = ret
|
self.__object = ret
|
||||||
return self
|
return self
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
self.__call_queue.put((None, None, None, None))
|
try:
|
||||||
self.__thread.join()
|
# Signal thread to exit, but don't wait for it.
|
||||||
|
self.__call_queue.put((None, None, None, None))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
return SerializerObjectProxy(obj_or_type)
|
return SerializerObjectProxy(obj_or_type)
|
||||||
|
|||||||
19
nilmdb/utils/sort.py
Normal file
19
nilmdb/utils/sort.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def sort_human(items, key=None):
|
||||||
|
"""Human-friendly sort (/stream/2 before /stream/10)"""
|
||||||
|
def to_num(val):
|
||||||
|
try:
|
||||||
|
return int(val)
|
||||||
|
except Exception:
|
||||||
|
return val
|
||||||
|
|
||||||
|
def human_key(text):
|
||||||
|
if key:
|
||||||
|
text = key(text)
|
||||||
|
# Break into character and numeric chunks.
|
||||||
|
chunks = re.split(r'([0-9]+)', text)
|
||||||
|
return [to_num(c) for c in chunks]
|
||||||
|
|
||||||
|
return sorted(items, key=human_key)
|
||||||
@@ -1,26 +1,25 @@
|
|||||||
from nilmdb.utils.printf import *
|
|
||||||
import threading
|
import threading
|
||||||
import warnings
|
from nilmdb.utils.printf import sprintf
|
||||||
import types
|
|
||||||
|
|
||||||
def verify_proxy(obj_or_type, exception = False, check_thread = True,
|
|
||||||
check_concurrent = True):
|
def verify_proxy(obj_or_type, check_thread=True,
|
||||||
|
check_concurrent=True):
|
||||||
"""Wrap the given object or type in a VerifyObjectProxy.
|
"""Wrap the given object or type in a VerifyObjectProxy.
|
||||||
|
|
||||||
Returns a VerifyObjectProxy that proxies all method calls to the
|
Returns a VerifyObjectProxy that proxies all method calls to the
|
||||||
given object, as well as attribute retrievals.
|
given object, as well as attribute retrievals.
|
||||||
|
|
||||||
When calling methods, the following checks are performed. If
|
When calling methods, the following checks are performed. On
|
||||||
exception is True, an exception is raised. Otherwise, a warning
|
failure, an exception is raised.
|
||||||
is printed.
|
|
||||||
|
|
||||||
check_thread = True # Warn/fail if two different threads call methods.
|
check_thread = True # Fail if two different threads call methods.
|
||||||
check_concurrent = True # Warn/fail if two functions are concurrently
|
check_concurrent = True # Fail if two functions are concurrently
|
||||||
# run through this proxy
|
# run through this proxy
|
||||||
"""
|
"""
|
||||||
class Namespace(object):
|
class Namespace():
|
||||||
pass
|
pass
|
||||||
class VerifyCallProxy(object):
|
|
||||||
|
class VerifyCallProxy():
|
||||||
def __init__(self, func, parent_namespace):
|
def __init__(self, func, parent_namespace):
|
||||||
self.func = func
|
self.func = func
|
||||||
self.parent_namespace = parent_namespace
|
self.parent_namespace = parent_namespace
|
||||||
@@ -42,22 +41,16 @@ def verify_proxy(obj_or_type, exception = False, check_thread = True,
|
|||||||
" but %s called %s.%s",
|
" but %s called %s.%s",
|
||||||
p.thread.name, p.classname, p.thread_callee,
|
p.thread.name, p.classname, p.thread_callee,
|
||||||
this.name, p.classname, callee)
|
this.name, p.classname, callee)
|
||||||
if exception:
|
raise AssertionError(err)
|
||||||
raise AssertionError(err)
|
|
||||||
else: # pragma: no cover
|
|
||||||
warnings.warn(err)
|
|
||||||
|
|
||||||
need_concur_unlock = False
|
need_concur_unlock = False
|
||||||
if check_concurrent:
|
if check_concurrent:
|
||||||
if p.concur_lock.acquire(False) == False:
|
if not p.concur_lock.acquire(False):
|
||||||
err = sprintf("unsafe concurrency: %s called %s.%s "
|
err = sprintf("unsafe concurrency: %s called %s.%s "
|
||||||
"while %s is still in %s.%s",
|
"while %s is still in %s.%s",
|
||||||
this.name, p.classname, callee,
|
this.name, p.classname, callee,
|
||||||
p.concur_tname, p.classname, p.concur_callee)
|
p.concur_tname, p.classname, p.concur_callee)
|
||||||
if exception:
|
raise AssertionError(err)
|
||||||
raise AssertionError(err)
|
|
||||||
else: # pragma: no cover
|
|
||||||
warnings.warn(err)
|
|
||||||
else:
|
else:
|
||||||
p.concur_tname = this.name
|
p.concur_tname = this.name
|
||||||
p.concur_callee = callee
|
p.concur_callee = callee
|
||||||
@@ -70,7 +63,7 @@ def verify_proxy(obj_or_type, exception = False, check_thread = True,
|
|||||||
p.concur_lock.release()
|
p.concur_lock.release()
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
class VerifyObjectProxy(object):
|
class VerifyObjectProxy():
|
||||||
def __init__(self, obj_or_type, *args, **kwargs):
|
def __init__(self, obj_or_type, *args, **kwargs):
|
||||||
p = Namespace()
|
p = Namespace()
|
||||||
self.__ns = p
|
self.__ns = p
|
||||||
@@ -80,17 +73,12 @@ def verify_proxy(obj_or_type, exception = False, check_thread = True,
|
|||||||
p.concur_tname = None
|
p.concur_tname = None
|
||||||
p.concur_callee = None
|
p.concur_callee = None
|
||||||
self.__obj = obj_or_type
|
self.__obj = obj_or_type
|
||||||
try:
|
if isinstance(obj_or_type, type):
|
||||||
if type(obj_or_type) in (types.TypeType, types.ClassType):
|
p.classname = self.__obj.__name__
|
||||||
p.classname = self.__obj.__name__
|
else:
|
||||||
else:
|
p.classname = self.__obj.__class__.__name__
|
||||||
p.classname = self.__obj.__class__.__name__
|
|
||||||
except AttributeError: # pragma: no cover
|
|
||||||
p.classname = "???"
|
|
||||||
|
|
||||||
def __getattr__(self, key):
|
def __getattr__(self, key):
|
||||||
if key.startswith("_VerifyObjectProxy__"): # pragma: no cover
|
|
||||||
raise AttributeError
|
|
||||||
attr = getattr(self.__obj, key)
|
attr = getattr(self.__obj, key)
|
||||||
if not callable(attr):
|
if not callable(attr):
|
||||||
return VerifyCallProxy(getattr, self.__ns)(self.__obj, key)
|
return VerifyCallProxy(getattr, self.__ns)(self.__obj, key)
|
||||||
@@ -100,7 +88,7 @@ def verify_proxy(obj_or_type, exception = False, check_thread = True,
|
|||||||
"""Call this to instantiate the type, if a type was passed
|
"""Call this to instantiate the type, if a type was passed
|
||||||
to verify_proxy. Otherwise, pass the call through."""
|
to verify_proxy. Otherwise, pass the call through."""
|
||||||
ret = VerifyCallProxy(self.__obj, self.__ns)(*args, **kwargs)
|
ret = VerifyCallProxy(self.__obj, self.__ns)(*args, **kwargs)
|
||||||
if type(self.__obj) in (types.TypeType, types.ClassType):
|
if isinstance(self.__obj, type):
|
||||||
# Instantiation
|
# Instantiation
|
||||||
self.__obj = ret
|
self.__obj = ret
|
||||||
return self
|
return self
|
||||||
|
|||||||
@@ -1,12 +1,95 @@
|
|||||||
from nilmdb.utils import datetime_tz
|
|
||||||
import re
|
import re
|
||||||
|
import time
|
||||||
|
import datetime_tz
|
||||||
|
|
||||||
|
# Range
|
||||||
|
min_timestamp = (-2**63)
|
||||||
|
max_timestamp = (2**63 - 1)
|
||||||
|
|
||||||
|
# Smallest representable step
|
||||||
|
epsilon = 1
|
||||||
|
|
||||||
|
|
||||||
|
def string_to_timestamp(string):
|
||||||
|
"""Convert a string that represents an integer number of microseconds
|
||||||
|
since epoch."""
|
||||||
|
try:
|
||||||
|
# Parse a string like "1234567890123456" and return an integer
|
||||||
|
return int(string)
|
||||||
|
except ValueError:
|
||||||
|
# Try parsing as a float, in case it's "1234567890123456.0"
|
||||||
|
return int(round(float(string)))
|
||||||
|
|
||||||
|
|
||||||
|
def timestamp_to_string(timestamp):
|
||||||
|
"""Convert a timestamp (integer microseconds since epoch) to a string"""
|
||||||
|
if isinstance(timestamp, float):
|
||||||
|
return str(int(round(timestamp)))
|
||||||
|
else:
|
||||||
|
return str(timestamp)
|
||||||
|
|
||||||
|
|
||||||
|
def timestamp_to_bytes(timestamp):
|
||||||
|
"""Convert a timestamp (integer microseconds since epoch) to a Python
|
||||||
|
bytes object"""
|
||||||
|
return timestamp_to_string(timestamp).encode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
|
def timestamp_to_human(timestamp):
|
||||||
|
"""Convert a timestamp (integer microseconds since epoch) to a
|
||||||
|
human-readable string, using the local timezone for display
|
||||||
|
(e.g. from the TZ env var)."""
|
||||||
|
if timestamp == min_timestamp:
|
||||||
|
return "(minimum)"
|
||||||
|
if timestamp == max_timestamp:
|
||||||
|
return "(maximum)"
|
||||||
|
dt = datetime_tz.datetime_tz.fromtimestamp(timestamp_to_unix(timestamp))
|
||||||
|
return dt.strftime("%a, %d %b %Y %H:%M:%S.%f %z")
|
||||||
|
|
||||||
|
|
||||||
|
def unix_to_timestamp(unix):
|
||||||
|
"""Convert a Unix timestamp (floating point seconds since epoch)
|
||||||
|
into a NILM timestamp (integer microseconds since epoch)"""
|
||||||
|
return int(round(unix * 1e6))
|
||||||
|
|
||||||
|
|
||||||
|
def timestamp_to_unix(timestamp):
|
||||||
|
"""Convert a NILM timestamp (integer microseconds since epoch)
|
||||||
|
into a Unix timestamp (floating point seconds since epoch)"""
|
||||||
|
return timestamp / 1e6
|
||||||
|
|
||||||
|
|
||||||
|
seconds_to_timestamp = unix_to_timestamp
|
||||||
|
timestamp_to_seconds = timestamp_to_unix
|
||||||
|
|
||||||
|
|
||||||
|
def rate_to_period(hz, cycles=1):
|
||||||
|
"""Convert a rate (in Hz) to a period (in timestamp units).
|
||||||
|
Returns an integer."""
|
||||||
|
period = unix_to_timestamp(cycles) / float(hz)
|
||||||
|
return int(round(period))
|
||||||
|
|
||||||
|
|
||||||
def parse_time(toparse):
|
def parse_time(toparse):
|
||||||
"""
|
"""
|
||||||
Parse a free-form time string and return a datetime_tz object.
|
Parse a free-form time string and return a nilmdb timestamp
|
||||||
If the string doesn't contain a timestamp, the current local
|
(integer microseconds since epoch). If the string doesn't contain a
|
||||||
timezone is assumed (e.g. from the TZ env var).
|
timestamp, the current local timezone is assumed (e.g. from the TZ
|
||||||
|
env var).
|
||||||
"""
|
"""
|
||||||
|
if toparse == "min":
|
||||||
|
return min_timestamp
|
||||||
|
if toparse == "max":
|
||||||
|
return max_timestamp
|
||||||
|
|
||||||
|
# If it starts with @, treat it as a NILM timestamp
|
||||||
|
# (integer microseconds since epoch)
|
||||||
|
try:
|
||||||
|
if toparse[0] == '@':
|
||||||
|
return int(toparse[1:])
|
||||||
|
except (ValueError, KeyError, IndexError):
|
||||||
|
pass
|
||||||
|
|
||||||
# If string isn't "now" and doesn't contain at least 4 digits,
|
# If string isn't "now" and doesn't contain at least 4 digits,
|
||||||
# consider it invalid. smartparse might otherwise accept
|
# consider it invalid. smartparse might otherwise accept
|
||||||
# empty strings and strings with just separators.
|
# empty strings and strings with just separators.
|
||||||
@@ -15,17 +98,20 @@ def parse_time(toparse):
|
|||||||
|
|
||||||
# Try to just parse the time as given
|
# Try to just parse the time as given
|
||||||
try:
|
try:
|
||||||
return datetime_tz.datetime_tz.smartparse(toparse)
|
return unix_to_timestamp(datetime_tz.datetime_tz.
|
||||||
except ValueError:
|
smartparse(toparse).totimestamp())
|
||||||
|
except (ValueError, OverflowError, TypeError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Try to treat it as a single double
|
# If it's parseable as a float, treat it as a Unix or NILM
|
||||||
|
# timestamp based on its range.
|
||||||
try:
|
try:
|
||||||
timestamp = float(toparse)
|
val = float(toparse)
|
||||||
# range is from about year 2001 - 2065
|
# range is from about year 2001 - 2128
|
||||||
if timestamp < 1e9 or timestamp > 3e9:
|
if 1e9 < val < 5e9:
|
||||||
raise ValueError
|
return unix_to_timestamp(val)
|
||||||
return datetime_tz.datetime_tz.fromtimestamp(timestamp)
|
if 1e15 < val < 5e15:
|
||||||
|
return val
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -47,7 +133,8 @@ def parse_time(toparse):
|
|||||||
r")", toparse)
|
r")", toparse)
|
||||||
if res is not None:
|
if res is not None:
|
||||||
try:
|
try:
|
||||||
return datetime_tz.datetime_tz.smartparse(res.group(2))
|
return unix_to_timestamp(datetime_tz.datetime_tz.
|
||||||
|
smartparse(res.group(2)).totimestamp())
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -55,15 +142,7 @@ def parse_time(toparse):
|
|||||||
# just give up for now.
|
# just give up for now.
|
||||||
raise ValueError("unable to parse timestamp")
|
raise ValueError("unable to parse timestamp")
|
||||||
|
|
||||||
def format_time(timestamp):
|
|
||||||
"""
|
|
||||||
Convert a Unix timestamp to a string for printing, using the
|
|
||||||
local timezone for display (e.g. from the TZ env var).
|
|
||||||
"""
|
|
||||||
dt = datetime_tz.datetime_tz.fromtimestamp(timestamp)
|
|
||||||
return dt.strftime("%a, %d %b %Y %H:%M:%S.%f %z")
|
|
||||||
|
|
||||||
def float_time_to_string(timestamp):
|
def now():
|
||||||
"""Convert a floating-point Unix timestamp to a string,
|
"""Return current timestamp"""
|
||||||
like '1234567890.000000'"""
|
return unix_to_timestamp(time.time())
|
||||||
return "%.6f" % timestamp
|
|
||||||
|
|||||||
@@ -5,18 +5,17 @@
|
|||||||
# with nilmdb.utils.Timer("flush"):
|
# with nilmdb.utils.Timer("flush"):
|
||||||
# foo.flush()
|
# foo.flush()
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def Timer(name = None, tosyslog = False):
|
def Timer(name=None, tosyslog=False):
|
||||||
start = time.time()
|
start = time.time()
|
||||||
yield
|
yield
|
||||||
elapsed = int((time.time() - start) * 1000)
|
elapsed = int((time.time() - start) * 1000)
|
||||||
msg = (name or 'elapsed') + ": " + str(elapsed) + " ms"
|
msg = (name or 'elapsed') + ": " + str(elapsed) + " ms"
|
||||||
if tosyslog: # pragma: no cover
|
if tosyslog:
|
||||||
import syslog
|
import syslog
|
||||||
syslog.syslog(msg)
|
syslog.syslog(msg)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,16 +1,17 @@
|
|||||||
"""File-like objects that add timestamps to the input lines"""
|
"""File-like objects that add timestamps to the input lines"""
|
||||||
|
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import sprintf
|
||||||
from nilmdb.utils import datetime_tz
|
import nilmdb.utils.time
|
||||||
|
|
||||||
class Timestamper(object):
|
|
||||||
|
class Timestamper():
|
||||||
"""A file-like object that adds timestamps to lines of an input file."""
|
"""A file-like object that adds timestamps to lines of an input file."""
|
||||||
def __init__(self, infile, ts_iter):
|
def __init__(self, infile, ts_iter):
|
||||||
"""file: filename, or another file-like object
|
"""file: filename, or another file-like object
|
||||||
ts_iter: iterator that returns a timestamp string for
|
ts_iter: iterator that returns a timestamp string for
|
||||||
each line of the file"""
|
each line of the file"""
|
||||||
if isinstance(infile, basestring):
|
if isinstance(infile, str):
|
||||||
self.file = open(infile, "r")
|
self.file = open(infile, "rb")
|
||||||
else:
|
else:
|
||||||
self.file = infile
|
self.file = infile
|
||||||
self.ts_iter = ts_iter
|
self.ts_iter = ts_iter
|
||||||
@@ -22,17 +23,17 @@ class Timestamper(object):
|
|||||||
while True:
|
while True:
|
||||||
line = self.file.readline(*args)
|
line = self.file.readline(*args)
|
||||||
if not line:
|
if not line:
|
||||||
return ""
|
return b""
|
||||||
if line[0] == '#':
|
if line[0:1] == b'#':
|
||||||
continue
|
continue
|
||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
return self.ts_iter.next() + line
|
return next(self.ts_iter) + line
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
return ""
|
return b""
|
||||||
|
|
||||||
def readlines(self, size = None):
|
def readlines(self, size=None):
|
||||||
out = ""
|
out = b""
|
||||||
while True:
|
while True:
|
||||||
line = self.readline()
|
line = self.readline()
|
||||||
out += line
|
out += line
|
||||||
@@ -43,15 +44,16 @@ class Timestamper(object):
|
|||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def next(self):
|
def __next__(self):
|
||||||
result = self.readline()
|
result = self.readline()
|
||||||
if not result:
|
if not result:
|
||||||
raise StopIteration
|
raise StopIteration
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class TimestamperRate(Timestamper):
|
class TimestamperRate(Timestamper):
|
||||||
"""Timestamper that uses a start time and a fixed rate"""
|
"""Timestamper that uses a start time and a fixed rate"""
|
||||||
def __init__(self, infile, start, rate, end = None):
|
def __init__(self, infile, start, rate, end=None):
|
||||||
"""
|
"""
|
||||||
file: file name or object
|
file: file name or object
|
||||||
|
|
||||||
@@ -61,31 +63,39 @@ class TimestamperRate(Timestamper):
|
|||||||
|
|
||||||
end: If specified, raise StopIteration before outputting a value
|
end: If specified, raise StopIteration before outputting a value
|
||||||
greater than this."""
|
greater than this."""
|
||||||
|
timestamp_to_bytes = nilmdb.utils.time.timestamp_to_bytes
|
||||||
|
rate_to_period = nilmdb.utils.time.rate_to_period
|
||||||
|
|
||||||
def iterator(start, rate, end):
|
def iterator(start, rate, end):
|
||||||
n = 0
|
n = 0
|
||||||
rate = float(rate)
|
rate = float(rate)
|
||||||
while True:
|
while True:
|
||||||
now = start + n / rate
|
now = start + rate_to_period(rate, n)
|
||||||
if end and now >= end:
|
if end and now >= end:
|
||||||
raise StopIteration
|
return
|
||||||
yield sprintf("%.6f ", start + n / rate)
|
yield timestamp_to_bytes(now) + b" "
|
||||||
n += 1
|
n += 1
|
||||||
Timestamper.__init__(self, infile, iterator(start, rate, end))
|
Timestamper.__init__(self, infile, iterator(start, rate, end))
|
||||||
self.start = start
|
self.start = start
|
||||||
self.rate = rate
|
self.rate = rate
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
start = datetime_tz.datetime_tz.fromtimestamp(self.start)
|
|
||||||
start = start.strftime("%a, %d %b %Y %H:%M:%S %Z")
|
|
||||||
return sprintf("TimestamperRate(..., start=\"%s\", rate=%g)",
|
return sprintf("TimestamperRate(..., start=\"%s\", rate=%g)",
|
||||||
str(start), self.rate)
|
nilmdb.utils.time.timestamp_to_human(self.start),
|
||||||
|
self.rate)
|
||||||
|
|
||||||
|
|
||||||
class TimestamperNow(Timestamper):
|
class TimestamperNow(Timestamper):
|
||||||
"""Timestamper that uses current time"""
|
"""Timestamper that uses current time"""
|
||||||
def __init__(self, infile):
|
def __init__(self, infile):
|
||||||
|
timestamp_to_bytes = nilmdb.utils.time.timestamp_to_bytes
|
||||||
|
get_now = nilmdb.utils.time.now
|
||||||
|
|
||||||
def iterator():
|
def iterator():
|
||||||
while True:
|
while True:
|
||||||
now = datetime_tz.datetime_tz.utcnow().totimestamp()
|
yield timestamp_to_bytes(get_now()) + b" "
|
||||||
yield sprintf("%.6f ", now)
|
|
||||||
Timestamper.__init__(self, infile, iterator())
|
Timestamper.__init__(self, infile, iterator())
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "TimestamperNow(...)"
|
return "TimestamperNow(...)"
|
||||||
|
|||||||
16
requirements.txt
Normal file
16
requirements.txt
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
argcomplete>=1.10.0
|
||||||
|
CherryPy>=18.1.2
|
||||||
|
coverage>=4.5.4
|
||||||
|
cython>=0.29.13
|
||||||
|
decorator>=4.4.0
|
||||||
|
fallocate>=1.6.4
|
||||||
|
flake8>=3.7.8
|
||||||
|
nose>=1.3.7
|
||||||
|
numpy>=1.17.0
|
||||||
|
progressbar>=2.5
|
||||||
|
psutil>=5.6.3
|
||||||
|
python-datetime-tz>=0.5.4
|
||||||
|
python-dateutil>=2.8.0
|
||||||
|
requests>=2.22.0
|
||||||
|
tz>=0.2.2
|
||||||
|
WebTest>=2.0.33
|
||||||
19
setup.cfg
19
setup.cfg
@@ -13,8 +13,6 @@ cover-package=nilmdb
|
|||||||
cover-erase=1
|
cover-erase=1
|
||||||
# this works, puts html output in cover/ dir:
|
# this works, puts html output in cover/ dir:
|
||||||
# cover-html=1
|
# cover-html=1
|
||||||
# need nose 1.1.3 for this:
|
|
||||||
# cover-branches=1
|
|
||||||
#debug=nose
|
#debug=nose
|
||||||
#debug-log=nose.log
|
#debug-log=nose.log
|
||||||
stop=1
|
stop=1
|
||||||
@@ -39,3 +37,20 @@ tests=tests
|
|||||||
#with-profile=1
|
#with-profile=1
|
||||||
#profile-sort=time
|
#profile-sort=time
|
||||||
##profile-restrict=10 # doesn't work right, treated as string or something
|
##profile-restrict=10 # doesn't work right, treated as string or something
|
||||||
|
|
||||||
|
[versioneer]
|
||||||
|
VCS=git
|
||||||
|
style=pep440
|
||||||
|
versionfile_source=nilmdb/_version.py
|
||||||
|
versionfile_build=nilmdb/_version.py
|
||||||
|
tag_prefix=nilmdb-
|
||||||
|
parentdir_prefix=nilmdb-
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
exclude=_version.py,fsck.py,nilmdb_fsck.py
|
||||||
|
extend-ignore=E731
|
||||||
|
per-file-ignores=__init__.py:F401,E402 serializer.py:E722 mustclose.py:E722
|
||||||
|
|
||||||
|
[pylint]
|
||||||
|
ignore=_version.py,fsck.py,nilmdb_fsck.py
|
||||||
|
disable=C0103,C0111,R0913,R0914
|
||||||
|
|||||||
108
setup.py
108
setup.py
@@ -6,92 +6,31 @@
|
|||||||
# Then just package it up:
|
# Then just package it up:
|
||||||
# python setup.py sdist
|
# python setup.py sdist
|
||||||
|
|
||||||
# This is supposed to be using Distribute:
|
|
||||||
#
|
|
||||||
# distutils provides a "setup" method.
|
|
||||||
# setuptools is a set of monkeypatches on top of that.
|
|
||||||
# distribute is a particular version/implementation of setuptools.
|
|
||||||
#
|
|
||||||
# So we don't really know if this is using the old setuptools or the
|
|
||||||
# Distribute-provided version of setuptools.
|
|
||||||
|
|
||||||
import traceback
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
from setuptools import setup
|
||||||
try:
|
from distutils.extension import Extension
|
||||||
from setuptools import setup, find_packages
|
|
||||||
from distutils.extension import Extension
|
|
||||||
import distutils.version
|
|
||||||
except ImportError:
|
|
||||||
traceback.print_exc()
|
|
||||||
print "Please install the prerequisites listed in README.txt"
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Versioneer manages version numbers from git tags.
|
# Versioneer manages version numbers from git tags.
|
||||||
# https://github.com/warner/python-versioneer
|
# https://github.com/warner/python-versioneer
|
||||||
import versioneer
|
import versioneer
|
||||||
versioneer.versionfile_source = 'nilmdb/_version.py'
|
|
||||||
versioneer.versionfile_build = 'nilmdb/_version.py'
|
|
||||||
versioneer.tag_prefix = 'nilmdb-'
|
|
||||||
versioneer.parentdir_prefix = 'nilmdb-'
|
|
||||||
|
|
||||||
# Hack to workaround logging/multiprocessing issue:
|
|
||||||
# https://groups.google.com/d/msg/nose-users/fnJ-kAUbYHQ/_UsLN786ygcJ
|
|
||||||
try: import multiprocessing
|
|
||||||
except: pass
|
|
||||||
|
|
||||||
# Use Cython if it's new enough, otherwise use preexisting C files.
|
|
||||||
cython_modules = [ 'nilmdb.server.interval',
|
|
||||||
'nilmdb.server.layout',
|
|
||||||
'nilmdb.server.rbtree' ]
|
|
||||||
try:
|
|
||||||
import Cython
|
|
||||||
from Cython.Build import cythonize
|
|
||||||
if (distutils.version.LooseVersion(Cython.__version__) <
|
|
||||||
distutils.version.LooseVersion("0.16")):
|
|
||||||
print "Cython version", Cython.__version__, "is too old; not using it."
|
|
||||||
raise ImportError()
|
|
||||||
use_cython = True
|
|
||||||
except ImportError:
|
|
||||||
use_cython = False
|
|
||||||
|
|
||||||
|
# External modules that need to be built
|
||||||
ext_modules = [ Extension('nilmdb.server.rocket', ['nilmdb/server/rocket.c' ]) ]
|
ext_modules = [ Extension('nilmdb.server.rocket', ['nilmdb/server/rocket.c' ]) ]
|
||||||
|
|
||||||
|
# Use Cython.
|
||||||
|
cython_modules = [ 'nilmdb.server.interval', 'nilmdb.server.rbtree' ]
|
||||||
|
import Cython
|
||||||
|
from Cython.Build import cythonize
|
||||||
for modulename in cython_modules:
|
for modulename in cython_modules:
|
||||||
filename = modulename.replace('.','/')
|
filename = modulename.replace('.','/')
|
||||||
if use_cython:
|
ext_modules.extend(cythonize(filename + ".pyx"))
|
||||||
ext_modules.extend(cythonize(filename + ".pyx"))
|
|
||||||
else:
|
|
||||||
cfile = filename + ".c"
|
|
||||||
if not os.path.exists(cfile):
|
|
||||||
raise Exception("Missing source file " + cfile + ". "
|
|
||||||
"Try installing cython >= 0.16.")
|
|
||||||
ext_modules.append(Extension(modulename, [ cfile ]))
|
|
||||||
|
|
||||||
# We need a MANIFEST.in. Generate it here rather than polluting the
|
# Get list of requirements to use in `install_requires` below. Note
|
||||||
# repository with yet another setup-related file.
|
# that we don't make a distinction between things that are actually
|
||||||
with open("MANIFEST.in", "w") as m:
|
# required for end-users vs developers (or use `test_requires` or
|
||||||
m.write("""
|
# anything else) -- just install everything for simplicity.
|
||||||
# Root
|
install_requires = open('requirements.txt').readlines()
|
||||||
include README.txt
|
|
||||||
include setup.cfg
|
|
||||||
include setup.py
|
|
||||||
include versioneer.py
|
|
||||||
include Makefile
|
|
||||||
include .coveragerc
|
|
||||||
include .pylintrc
|
|
||||||
|
|
||||||
# Cython files -- include source.
|
|
||||||
recursive-include nilmdb/server *.pyx *.pyxdep *.pxd
|
|
||||||
|
|
||||||
# Tests
|
|
||||||
recursive-include tests *.py
|
|
||||||
recursive-include tests/data *
|
|
||||||
include tests/test.order
|
|
||||||
|
|
||||||
# Docs
|
|
||||||
recursive-include docs Makefile *.md
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Run setup
|
# Run setup
|
||||||
setup(name='nilmdb',
|
setup(name='nilmdb',
|
||||||
@@ -103,32 +42,21 @@ setup(name='nilmdb',
|
|||||||
long_description = "NILM Database",
|
long_description = "NILM Database",
|
||||||
license = "Proprietary",
|
license = "Proprietary",
|
||||||
author_email = 'jim@jtan.com',
|
author_email = 'jim@jtan.com',
|
||||||
tests_require = [ 'nose',
|
setup_requires = [ 'setuptools' ],
|
||||||
'coverage',
|
install_requires = install_requires,
|
||||||
],
|
|
||||||
setup_requires = [ 'distribute',
|
|
||||||
],
|
|
||||||
install_requires = [ 'decorator',
|
|
||||||
'cherrypy >= 3.2',
|
|
||||||
'simplejson',
|
|
||||||
'pycurl',
|
|
||||||
'python-dateutil',
|
|
||||||
'pytz',
|
|
||||||
'psutil >= 0.3.0',
|
|
||||||
'requests >= 1.1.0, < 2.0.0',
|
|
||||||
],
|
|
||||||
packages = [ 'nilmdb',
|
packages = [ 'nilmdb',
|
||||||
'nilmdb.utils',
|
'nilmdb.utils',
|
||||||
'nilmdb.utils.datetime_tz',
|
|
||||||
'nilmdb.server',
|
'nilmdb.server',
|
||||||
'nilmdb.client',
|
'nilmdb.client',
|
||||||
'nilmdb.cmdline',
|
'nilmdb.cmdline',
|
||||||
'nilmdb.scripts',
|
'nilmdb.scripts',
|
||||||
|
'nilmdb.fsck',
|
||||||
],
|
],
|
||||||
entry_points = {
|
entry_points = {
|
||||||
'console_scripts': [
|
'console_scripts': [
|
||||||
'nilmtool = nilmdb.scripts.nilmtool:main',
|
'nilmtool = nilmdb.scripts.nilmtool:main',
|
||||||
'nilmdb-server = nilmdb.scripts.nilmdb_server:main',
|
'nilmdb-server = nilmdb.scripts.nilmdb_server:main',
|
||||||
|
'nilmdb-fsck = nilmdb.scripts.nilmdb_fsck:main',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
ext_modules = ext_modules,
|
ext_modules = ext_modules,
|
||||||
|
|||||||
@@ -2,123 +2,123 @@
|
|||||||
# layout: float32_8
|
# layout: float32_8
|
||||||
# start: Fri, 23 Mar 2012 10:00:30.000000 +0000
|
# start: Fri, 23 Mar 2012 10:00:30.000000 +0000
|
||||||
# end: Fri, 23 Mar 2012 10:00:31.000000 +0000
|
# end: Fri, 23 Mar 2012 10:00:31.000000 +0000
|
||||||
1332496830.000000 2.517740e+05 2.242410e+05 5.688100e+03 1.915530e+03 9.329220e+03 4.183710e+03 1.212350e+03 2.641790e+03
|
1332496830000000 2.517740e+05 2.242410e+05 5.688100e+03 1.915530e+03 9.329220e+03 4.183710e+03 1.212350e+03 2.641790e+03
|
||||||
1332496830.008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
||||||
1332496830.016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
1332496830016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
||||||
1332496830.025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
|
1332496830025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
|
||||||
1332496830.033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
|
1332496830033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
|
||||||
1332496830.041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
|
1332496830041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
|
||||||
1332496830.050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
|
1332496830050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
|
||||||
1332496830.058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
|
1332496830058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
|
||||||
1332496830.066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
|
1332496830066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
|
||||||
1332496830.075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
|
1332496830075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
|
||||||
1332496830.083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
|
1332496830083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
|
||||||
1332496830.091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
|
1332496830091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
|
||||||
1332496830.100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
|
1332496830100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
|
||||||
1332496830.108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
|
1332496830108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
|
||||||
1332496830.116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
|
1332496830116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
|
||||||
1332496830.125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
|
1332496830125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
|
||||||
1332496830.133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
|
1332496830133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
|
||||||
1332496830.141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
|
1332496830141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
|
||||||
1332496830.150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
|
1332496830150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
|
||||||
1332496830.158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
|
1332496830158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
|
||||||
1332496830.166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
|
1332496830166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
|
||||||
1332496830.175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
|
1332496830175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
|
||||||
1332496830.183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
|
1332496830183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
|
||||||
1332496830.191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
|
1332496830191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
|
||||||
1332496830.200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
|
1332496830200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
|
||||||
1332496830.208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
|
1332496830208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
|
||||||
1332496830.216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
|
1332496830216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
|
||||||
1332496830.225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
|
1332496830225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
|
||||||
1332496830.233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
|
1332496830233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
|
||||||
1332496830.241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
|
1332496830241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
|
||||||
1332496830.250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
|
1332496830250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
|
||||||
1332496830.258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
|
1332496830258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
|
||||||
1332496830.266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
|
1332496830266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
|
||||||
1332496830.275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
|
1332496830275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
|
||||||
1332496830.283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
|
1332496830283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
|
||||||
1332496830.291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
|
1332496830291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
|
||||||
1332496830.300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
|
1332496830300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
|
||||||
1332496830.308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
|
1332496830308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
|
||||||
1332496830.316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
|
1332496830316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
|
||||||
1332496830.325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
|
1332496830325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
|
||||||
1332496830.333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
|
1332496830333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
|
||||||
1332496830.341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
|
1332496830341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
|
||||||
1332496830.350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
|
1332496830350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
|
||||||
1332496830.358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
|
1332496830358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
|
||||||
1332496830.366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
|
1332496830366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
|
||||||
1332496830.375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
|
1332496830375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
|
||||||
1332496830.383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
|
1332496830383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
|
||||||
1332496830.391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
|
1332496830391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
|
||||||
1332496830.400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
|
1332496830400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
|
||||||
1332496830.408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
|
1332496830408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
|
||||||
1332496830.416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
|
1332496830416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
|
||||||
1332496830.425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
|
1332496830425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
|
||||||
1332496830.433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
|
1332496830433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
|
||||||
1332496830.441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
|
1332496830441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
|
||||||
1332496830.450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
|
1332496830450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
|
||||||
1332496830.458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
|
1332496830458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
|
||||||
1332496830.466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
|
1332496830466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
|
||||||
1332496830.475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
|
1332496830475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
|
||||||
1332496830.483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
|
1332496830483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
|
||||||
1332496830.491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
|
1332496830491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
|
||||||
1332496830.500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
|
1332496830500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
|
||||||
1332496830.508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
|
1332496830508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
|
||||||
1332496830.516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
|
1332496830516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
|
||||||
1332496830.525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
|
1332496830525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
|
||||||
1332496830.533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
|
1332496830533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
|
||||||
1332496830.541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
|
1332496830541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
|
||||||
1332496830.550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
|
1332496830550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
|
||||||
1332496830.558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
|
1332496830558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
|
||||||
1332496830.566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
|
1332496830566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
|
||||||
1332496830.575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
|
1332496830575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
|
||||||
1332496830.583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
|
1332496830583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
|
||||||
1332496830.591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
|
1332496830591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
|
||||||
1332496830.600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
|
1332496830600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
|
||||||
1332496830.608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
|
1332496830608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
|
||||||
1332496830.616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
|
1332496830616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
|
||||||
1332496830.625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
|
1332496830625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
|
||||||
1332496830.633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
|
1332496830633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
|
||||||
1332496830.641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
|
1332496830641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
|
||||||
1332496830.650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
|
1332496830650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
|
||||||
1332496830.658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
|
1332496830658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
|
||||||
1332496830.666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
|
1332496830666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
|
||||||
1332496830.675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
|
1332496830675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
|
||||||
1332496830.683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
|
1332496830683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
|
||||||
1332496830.691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
|
1332496830691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
|
||||||
1332496830.700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
|
1332496830700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
|
||||||
1332496830.708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
|
1332496830708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
|
||||||
1332496830.716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
|
1332496830716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
|
||||||
1332496830.725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
|
1332496830725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
|
||||||
1332496830.733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
|
1332496830733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
|
||||||
1332496830.741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
|
1332496830741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
|
||||||
1332496830.750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
|
1332496830750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
|
||||||
1332496830.758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
|
1332496830758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
|
||||||
1332496830.766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
|
1332496830766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
|
||||||
1332496830.775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
|
1332496830775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
|
||||||
1332496830.783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
|
1332496830783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
|
||||||
1332496830.791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
|
1332496830791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
|
||||||
1332496830.800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
|
1332496830800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
|
||||||
1332496830.808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
|
1332496830808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
|
||||||
1332496830.816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
|
1332496830816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
|
||||||
1332496830.825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
|
1332496830825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
|
||||||
1332496830.833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
|
1332496830833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
|
||||||
1332496830.841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
|
1332496830841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
|
||||||
1332496830.850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
|
1332496830850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
|
||||||
1332496830.858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
|
1332496830858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
|
||||||
1332496830.866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
|
1332496830866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
|
||||||
1332496830.875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
|
1332496830875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
|
||||||
1332496830.883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
|
1332496830883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
|
||||||
1332496830.891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
|
1332496830891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
|
||||||
1332496830.900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
|
1332496830900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
|
||||||
1332496830.908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
|
1332496830908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
|
||||||
1332496830.916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
|
1332496830916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
|
||||||
1332496830.925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
|
1332496830925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
|
||||||
1332496830.933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
|
1332496830933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
|
||||||
1332496830.941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
|
1332496830941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
|
||||||
1332496830.950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
|
1332496830950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
|
||||||
1332496830.958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
|
1332496830958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
|
||||||
1332496830.966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
|
1332496830966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
|
||||||
1332496830.975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
|
1332496830975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
|
||||||
1332496830.983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
|
1332496830983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
|
||||||
1332496830.991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03
|
1332496830991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03
|
||||||
|
|||||||
@@ -1,119 +1,119 @@
|
|||||||
1332496830.008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
||||||
1332496830.016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
1332496830016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
||||||
1332496830.025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
|
1332496830025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
|
||||||
1332496830.033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
|
1332496830033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
|
||||||
1332496830.041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
|
1332496830041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
|
||||||
1332496830.050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
|
1332496830050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
|
||||||
1332496830.058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
|
1332496830058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
|
||||||
1332496830.066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
|
1332496830066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
|
||||||
1332496830.075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
|
1332496830075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
|
||||||
1332496830.083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
|
1332496830083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
|
||||||
1332496830.091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
|
1332496830091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
|
||||||
1332496830.100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
|
1332496830100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
|
||||||
1332496830.108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
|
1332496830108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
|
||||||
1332496830.116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
|
1332496830116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
|
||||||
1332496830.125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
|
1332496830125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
|
||||||
1332496830.133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
|
1332496830133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
|
||||||
1332496830.141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
|
1332496830141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
|
||||||
1332496830.150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
|
1332496830150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
|
||||||
1332496830.158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
|
1332496830158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
|
||||||
1332496830.166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
|
1332496830166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
|
||||||
1332496830.175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
|
1332496830175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
|
||||||
1332496830.183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
|
1332496830183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
|
||||||
1332496830.191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
|
1332496830191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
|
||||||
1332496830.200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
|
1332496830200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
|
||||||
1332496830.208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
|
1332496830208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
|
||||||
1332496830.216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
|
1332496830216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
|
||||||
1332496830.225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
|
1332496830225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
|
||||||
1332496830.233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
|
1332496830233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
|
||||||
1332496830.241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
|
1332496830241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
|
||||||
1332496830.250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
|
1332496830250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
|
||||||
1332496830.258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
|
1332496830258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
|
||||||
1332496830.266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
|
1332496830266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
|
||||||
1332496830.275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
|
1332496830275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
|
||||||
1332496830.283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
|
1332496830283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
|
||||||
1332496830.291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
|
1332496830291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
|
||||||
1332496830.300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
|
1332496830300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
|
||||||
1332496830.308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
|
1332496830308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
|
||||||
1332496830.316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
|
1332496830316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
|
||||||
1332496830.325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
|
1332496830325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
|
||||||
1332496830.333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
|
1332496830333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
|
||||||
1332496830.341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
|
1332496830341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
|
||||||
1332496830.350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
|
1332496830350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
|
||||||
1332496830.358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
|
1332496830358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
|
||||||
1332496830.366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
|
1332496830366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
|
||||||
1332496830.375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
|
1332496830375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
|
||||||
1332496830.383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
|
1332496830383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
|
||||||
1332496830.391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
|
1332496830391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
|
||||||
1332496830.400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
|
1332496830400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
|
||||||
1332496830.408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
|
1332496830408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
|
||||||
1332496830.416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
|
1332496830416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
|
||||||
1332496830.425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
|
1332496830425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
|
||||||
1332496830.433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
|
1332496830433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
|
||||||
1332496830.441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
|
1332496830441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
|
||||||
1332496830.450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
|
1332496830450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
|
||||||
1332496830.458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
|
1332496830458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
|
||||||
1332496830.466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
|
1332496830466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
|
||||||
1332496830.475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
|
1332496830475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
|
||||||
1332496830.483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
|
1332496830483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
|
||||||
1332496830.491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
|
1332496830491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
|
||||||
1332496830.500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
|
1332496830500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
|
||||||
1332496830.508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
|
1332496830508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
|
||||||
1332496830.516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
|
1332496830516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
|
||||||
1332496830.525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
|
1332496830525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
|
||||||
1332496830.533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
|
1332496830533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
|
||||||
1332496830.541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
|
1332496830541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
|
||||||
1332496830.550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
|
1332496830550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
|
||||||
1332496830.558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
|
1332496830558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
|
||||||
1332496830.566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
|
1332496830566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
|
||||||
1332496830.575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
|
1332496830575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
|
||||||
1332496830.583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
|
1332496830583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
|
||||||
1332496830.591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
|
1332496830591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
|
||||||
1332496830.600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
|
1332496830600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
|
||||||
1332496830.608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
|
1332496830608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
|
||||||
1332496830.616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
|
1332496830616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
|
||||||
1332496830.625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
|
1332496830625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
|
||||||
1332496830.633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
|
1332496830633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
|
||||||
1332496830.641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
|
1332496830641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
|
||||||
1332496830.650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
|
1332496830650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
|
||||||
1332496830.658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
|
1332496830658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
|
||||||
1332496830.666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
|
1332496830666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
|
||||||
1332496830.675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
|
1332496830675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
|
||||||
1332496830.683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
|
1332496830683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
|
||||||
1332496830.691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
|
1332496830691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
|
||||||
1332496830.700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
|
1332496830700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
|
||||||
1332496830.708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
|
1332496830708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
|
||||||
1332496830.716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
|
1332496830716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
|
||||||
1332496830.725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
|
1332496830725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
|
||||||
1332496830.733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
|
1332496830733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
|
||||||
1332496830.741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
|
1332496830741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
|
||||||
1332496830.750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
|
1332496830750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
|
||||||
1332496830.758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
|
1332496830758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
|
||||||
1332496830.766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
|
1332496830766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
|
||||||
1332496830.775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
|
1332496830775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
|
||||||
1332496830.783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
|
1332496830783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
|
||||||
1332496830.791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
|
1332496830791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
|
||||||
1332496830.800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
|
1332496830800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
|
||||||
1332496830.808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
|
1332496830808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
|
||||||
1332496830.816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
|
1332496830816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
|
||||||
1332496830.825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
|
1332496830825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
|
||||||
1332496830.833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
|
1332496830833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
|
||||||
1332496830.841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
|
1332496830841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
|
||||||
1332496830.850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
|
1332496830850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
|
||||||
1332496830.858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
|
1332496830858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
|
||||||
1332496830.866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
|
1332496830866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
|
||||||
1332496830.875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
|
1332496830875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
|
||||||
1332496830.883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
|
1332496830883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
|
||||||
1332496830.891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
|
1332496830891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
|
||||||
1332496830.900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
|
1332496830900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
|
||||||
1332496830.908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
|
1332496830908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
|
||||||
1332496830.916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
|
1332496830916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
|
||||||
1332496830.925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
|
1332496830925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
|
||||||
1332496830.933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
|
1332496830933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
|
||||||
1332496830.941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
|
1332496830941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
|
||||||
1332496830.950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
|
1332496830950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
|
||||||
1332496830.958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
|
1332496830958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
|
||||||
1332496830.966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
|
1332496830966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
|
||||||
1332496830.975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
|
1332496830975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
|
||||||
1332496830.983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
|
1332496830983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
|
||||||
1332496830.991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03
|
1332496830991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
1332496830.008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
1332496830.008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
||||||
1332496830.016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
1332496830016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
||||||
|
|||||||
@@ -1,124 +1,124 @@
|
|||||||
# path: /newton/prep
|
# path: /newton/prep
|
||||||
# layout: float32_8
|
# layout: float32_8
|
||||||
# start: 1332496830.000000
|
# start: 1332496830000000
|
||||||
# end: 1332496830.999000
|
# end: 1332496830999000
|
||||||
1332496830.000000 2.517740e+05 2.242410e+05 5.688100e+03 1.915530e+03 9.329220e+03 4.183710e+03 1.212350e+03 2.641790e+03
|
1332496830000000 2.517740e+05 2.242410e+05 5.688100e+03 1.915530e+03 9.329220e+03 4.183710e+03 1.212350e+03 2.641790e+03
|
||||||
1332496830.008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
|
||||||
1332496830.016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
1332496830016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
|
||||||
1332496830.025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
|
1332496830025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
|
||||||
1332496830.033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
|
1332496830033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
|
||||||
1332496830.041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
|
1332496830041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
|
||||||
1332496830.050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
|
1332496830050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
|
||||||
1332496830.058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
|
1332496830058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
|
||||||
1332496830.066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
|
1332496830066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
|
||||||
1332496830.075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
|
1332496830075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
|
||||||
1332496830.083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
|
1332496830083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
|
||||||
1332496830.091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
|
1332496830091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
|
||||||
1332496830.100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
|
1332496830100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
|
||||||
1332496830.108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
|
1332496830108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
|
||||||
1332496830.116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
|
1332496830116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
|
||||||
1332496830.125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
|
1332496830125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
|
||||||
1332496830.133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
|
1332496830133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
|
||||||
1332496830.141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
|
1332496830141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
|
||||||
1332496830.150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
|
1332496830150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
|
||||||
1332496830.158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
|
1332496830158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
|
||||||
1332496830.166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
|
1332496830166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
|
||||||
1332496830.175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
|
1332496830175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
|
||||||
1332496830.183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
|
1332496830183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
|
||||||
1332496830.191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
|
1332496830191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
|
||||||
1332496830.200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
|
1332496830200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
|
||||||
1332496830.208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
|
1332496830208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
|
||||||
1332496830.216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
|
1332496830216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
|
||||||
1332496830.225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
|
1332496830225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
|
||||||
1332496830.233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
|
1332496830233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
|
||||||
1332496830.241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
|
1332496830241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
|
||||||
1332496830.250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
|
1332496830250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
|
||||||
1332496830.258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
|
1332496830258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
|
||||||
1332496830.266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
|
1332496830266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
|
||||||
1332496830.275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
|
1332496830275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
|
||||||
1332496830.283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
|
1332496830283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
|
||||||
1332496830.291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
|
1332496830291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
|
||||||
1332496830.300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
|
1332496830300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
|
||||||
1332496830.308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
|
1332496830308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
|
||||||
1332496830.316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
|
1332496830316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
|
||||||
1332496830.325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
|
1332496830325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
|
||||||
1332496830.333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
|
1332496830333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
|
||||||
1332496830.341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
|
1332496830341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
|
||||||
1332496830.350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
|
1332496830350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
|
||||||
1332496830.358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
|
1332496830358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
|
||||||
1332496830.366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
|
1332496830366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
|
||||||
1332496830.375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
|
1332496830375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
|
||||||
1332496830.383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
|
1332496830383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
|
||||||
1332496830.391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
|
1332496830391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
|
||||||
1332496830.400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
|
1332496830400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
|
||||||
1332496830.408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
|
1332496830408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
|
||||||
1332496830.416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
|
1332496830416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
|
||||||
1332496830.425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
|
1332496830425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
|
||||||
1332496830.433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
|
1332496830433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
|
||||||
1332496830.441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
|
1332496830441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
|
||||||
1332496830.450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
|
1332496830450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
|
||||||
1332496830.458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
|
1332496830458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
|
||||||
1332496830.466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
|
1332496830466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
|
||||||
1332496830.475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
|
1332496830475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
|
||||||
1332496830.483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
|
1332496830483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
|
||||||
1332496830.491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
|
1332496830491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
|
||||||
1332496830.500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
|
1332496830500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
|
||||||
1332496830.508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
|
1332496830508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
|
||||||
1332496830.516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
|
1332496830516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
|
||||||
1332496830.525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
|
1332496830525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
|
||||||
1332496830.533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
|
1332496830533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
|
||||||
1332496830.541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
|
1332496830541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
|
||||||
1332496830.550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
|
1332496830550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
|
||||||
1332496830.558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
|
1332496830558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
|
||||||
1332496830.566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
|
1332496830566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
|
||||||
1332496830.575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
|
1332496830575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
|
||||||
1332496830.583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
|
1332496830583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
|
||||||
1332496830.591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
|
1332496830591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
|
||||||
1332496830.600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
|
1332496830600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
|
||||||
1332496830.608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
|
1332496830608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
|
||||||
1332496830.616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
|
1332496830616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
|
||||||
1332496830.625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
|
1332496830625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
|
||||||
1332496830.633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
|
1332496830633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
|
||||||
1332496830.641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
|
1332496830641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
|
||||||
1332496830.650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
|
1332496830650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
|
||||||
1332496830.658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
|
1332496830658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
|
||||||
1332496830.666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
|
1332496830666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
|
||||||
1332496830.675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
|
1332496830675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
|
||||||
1332496830.683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
|
1332496830683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
|
||||||
1332496830.691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
|
1332496830691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
|
||||||
1332496830.700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
|
1332496830700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
|
||||||
1332496830.708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
|
1332496830708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
|
||||||
1332496830.716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
|
1332496830716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
|
||||||
1332496830.725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
|
1332496830725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
|
||||||
1332496830.733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
|
1332496830733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
|
||||||
1332496830.741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
|
1332496830741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
|
||||||
1332496830.750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
|
1332496830750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
|
||||||
1332496830.758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
|
1332496830758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
|
||||||
1332496830.766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
|
1332496830766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
|
||||||
1332496830.775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
|
1332496830775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
|
||||||
1332496830.783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
|
1332496830783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
|
||||||
1332496830.791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
|
1332496830791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
|
||||||
1332496830.800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
|
1332496830800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
|
||||||
1332496830.808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
|
1332496830808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
|
||||||
1332496830.816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
|
1332496830816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
|
||||||
1332496830.825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
|
1332496830825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
|
||||||
1332496830.833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
|
1332496830833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
|
||||||
1332496830.841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
|
1332496830841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
|
||||||
1332496830.850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
|
1332496830850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
|
||||||
1332496830.858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
|
1332496830858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
|
||||||
1332496830.866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
|
1332496830866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
|
||||||
1332496830.875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
|
1332496830875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
|
||||||
1332496830.883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
|
1332496830883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
|
||||||
1332496830.891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
|
1332496830891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
|
||||||
1332496830.900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
|
1332496830900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
|
||||||
1332496830.908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
|
1332496830908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
|
||||||
1332496830.916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
|
1332496830916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
|
||||||
1332496830.925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
|
1332496830925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
|
||||||
1332496830.933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
|
1332496830933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
|
||||||
1332496830.941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
|
1332496830941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
|
||||||
1332496830.950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
|
1332496830950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
|
||||||
1332496830.958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
|
1332496830958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
|
||||||
1332496830.966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
|
1332496830966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
|
||||||
1332496830.975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
|
1332496830975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
|
||||||
1332496830.983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
|
1332496830983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
|
||||||
1332496830.991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03
|
1332496830991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03
|
||||||
|
|||||||
28
tests/data/extract-8
Normal file
28
tests/data/extract-8
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# interval-start 1332496919900000
|
||||||
|
1332496919900000 2.523050e+05 2.254020e+05 4.779410e+03 3.638030e+03 8.138070e+03 4.334460e+03 1.083780e+03 3.743730e+03
|
||||||
|
1332496919908333 2.551190e+05 2.237870e+05 5.965640e+03 2.076350e+03 9.468790e+03 3.693880e+03 1.247860e+03 3.393680e+03
|
||||||
|
1332496919916667 2.616370e+05 2.247980e+05 4.848970e+03 2.315620e+03 9.323300e+03 4.225460e+03 1.805780e+03 2.593050e+03
|
||||||
|
1332496919925000 2.606460e+05 2.251300e+05 3.061360e+03 3.951840e+03 7.662910e+03 5.341410e+03 1.986520e+03 2.276780e+03
|
||||||
|
1332496919933333 2.559710e+05 2.235030e+05 4.096030e+03 3.296970e+03 7.827080e+03 5.452120e+03 2.492520e+03 2.929450e+03
|
||||||
|
1332496919941667 2.579260e+05 2.217080e+05 5.472320e+03 1.555700e+03 8.495760e+03 4.491140e+03 2.379780e+03 3.741710e+03
|
||||||
|
1332496919950000 2.610180e+05 2.242350e+05 4.669770e+03 1.876190e+03 8.366680e+03 3.677510e+03 9.021690e+02 3.549040e+03
|
||||||
|
1332496919958333 2.569150e+05 2.274650e+05 2.785070e+03 3.751930e+03 7.440320e+03 3.964860e+03 -3.227860e+02 2.460890e+03
|
||||||
|
1332496919966667 2.509510e+05 2.262000e+05 3.772710e+03 3.131950e+03 8.159860e+03 4.539860e+03 7.375190e+02 2.126750e+03
|
||||||
|
1332496919975000 2.556710e+05 2.223720e+05 5.826200e+03 8.715560e+02 9.120240e+03 4.545110e+03 2.804310e+03 2.721000e+03
|
||||||
|
1332496919983333 2.649730e+05 2.214860e+05 5.839130e+03 4.659180e+02 8.628300e+03 3.934870e+03 2.972490e+03 3.773730e+03
|
||||||
|
1332496919991667 2.652170e+05 2.233920e+05 3.718770e+03 2.834970e+03 7.209900e+03 3.460260e+03 1.324930e+03 4.075960e+03
|
||||||
|
# interval-end 1332496919991668
|
||||||
|
# interval-start 1332496920000000
|
||||||
|
1332496920000000 2.564370e+05 2.244300e+05 4.011610e+03 3.475340e+03 7.495890e+03 3.388940e+03 2.613970e+02 3.731260e+03
|
||||||
|
1332496920008333 2.539630e+05 2.241670e+05 5.621070e+03 1.548010e+03 9.165170e+03 3.522930e+03 1.058930e+03 2.996960e+03
|
||||||
|
1332496920016667 2.585080e+05 2.249300e+05 6.011400e+03 8.188660e+02 9.039950e+03 4.482440e+03 2.490390e+03 2.679340e+03
|
||||||
|
1332496920025000 2.596270e+05 2.260220e+05 4.474500e+03 2.423020e+03 7.414190e+03 5.071970e+03 2.439380e+03 2.962960e+03
|
||||||
|
1332496920033333 2.551870e+05 2.246320e+05 4.738570e+03 3.398040e+03 7.395120e+03 4.726450e+03 1.839030e+03 3.393530e+03
|
||||||
|
1332496920041667 2.571020e+05 2.216230e+05 6.144130e+03 1.441090e+03 8.756480e+03 3.495320e+03 1.869940e+03 3.752530e+03
|
||||||
|
1332496920050000 2.636530e+05 2.217700e+05 6.221770e+03 7.389620e+02 9.547600e+03 2.666820e+03 1.462660e+03 3.332570e+03
|
||||||
|
1332496920058333 2.636130e+05 2.252560e+05 4.477120e+03 2.437450e+03 8.510210e+03 3.855630e+03 9.594420e+02 2.387180e+03
|
||||||
|
1332496920066667 2.553500e+05 2.262640e+05 4.283720e+03 3.923940e+03 7.912470e+03 5.466520e+03 1.284990e+03 2.093720e+03
|
||||||
|
1332496920075000 2.527270e+05 2.246090e+05 5.851930e+03 2.491980e+03 8.540630e+03 5.623050e+03 2.339780e+03 3.007140e+03
|
||||||
|
1332496920083333 2.584750e+05 2.235780e+05 5.924870e+03 1.394480e+03 8.779620e+03 4.544180e+03 2.132030e+03 3.849760e+03
|
||||||
|
1332496920091667 2.615630e+05 2.246090e+05 4.336140e+03 2.455750e+03 8.055380e+03 3.469110e+03 6.278730e+02 3.664200e+03
|
||||||
|
# interval-end 1332496920100000
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
# comments are cool?
|
# comments are cool? what if they contain →UNICODEâ†<C3A2> or invalid utf-8 like Ã(
|
||||||
2.66568e+05 2.24029e+05 5.16140e+03 2.52517e+03 8.35084e+03 3.72470e+03 1.35534e+03 2.03900e+03
|
2.66568e+05 2.24029e+05 5.16140e+03 2.52517e+03 8.35084e+03 3.72470e+03 1.35534e+03 2.03900e+03
|
||||||
2.57914e+05 2.27183e+05 4.30368e+03 4.13080e+03 7.25535e+03 4.89047e+03 1.63859e+03 1.93496e+03
|
2.57914e+05 2.27183e+05 4.30368e+03 4.13080e+03 7.25535e+03 4.89047e+03 1.63859e+03 1.93496e+03
|
||||||
2.51717e+05 2.26047e+05 5.99445e+03 3.49363e+03 8.07250e+03 5.08267e+03 2.26917e+03 2.86231e+03
|
2.51717e+05 2.26047e+05 5.99445e+03 3.49363e+03 8.07250e+03 5.08267e+03 2.26917e+03 2.86231e+03
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
1332497040.000000 2.56439e+05 2.24775e+05 2.92897e+03 4.66646e+03 7.58491e+03 3.57351e+03 -4.34171e+02 2.98819e+03
|
1332497040000000 2.56439e+05 2.24775e+05 2.92897e+03 4.66646e+03 7.58491e+03 3.57351e+03 -4.34171e+02 2.98819e+03
|
||||||
1332497040.010000 2.51903e+05 2.23202e+05 4.23696e+03 3.49363e+03 8.53493e+03 4.29416e+03 8.49573e+02 2.38189e+03
|
1332497040010000 2.51903e+05 2.23202e+05 4.23696e+03 3.49363e+03 8.53493e+03 4.29416e+03 8.49573e+02 2.38189e+03
|
||||||
1332497040.020000 2.57625e+05 2.20247e+05 5.47017e+03 1.35872e+03 9.18903e+03 4.56136e+03 2.65599e+03 2.60912e+03
|
1332497040020000 2.57625e+05 2.20247e+05 5.47017e+03 1.35872e+03 9.18903e+03 4.56136e+03 2.65599e+03 2.60912e+03
|
||||||
1332497040.030000 2.63375e+05 2.20706e+05 4.51842e+03 1.80758e+03 8.17208e+03 4.17463e+03 2.57884e+03 3.32848e+03
|
1332497040030000 2.63375e+05 2.20706e+05 4.51842e+03 1.80758e+03 8.17208e+03 4.17463e+03 2.57884e+03 3.32848e+03
|
||||||
1332497040.040000 2.59221e+05 2.22346e+05 2.98879e+03 3.66264e+03 6.87274e+03 3.94223e+03 1.25928e+03 3.51786e+03
|
1332497040040000 2.59221e+05 2.22346e+05 2.98879e+03 3.66264e+03 6.87274e+03 3.94223e+03 1.25928e+03 3.51786e+03
|
||||||
1332497040.050000 2.51918e+05 2.22281e+05 4.22677e+03 2.84764e+03 7.78323e+03 3.81659e+03 8.04944e+02 3.46314e+03
|
1332497040050000 2.51918e+05 2.22281e+05 4.22677e+03 2.84764e+03 7.78323e+03 3.81659e+03 8.04944e+02 3.46314e+03
|
||||||
1332497040.050000 2.54478e+05 2.21701e+05 5.61366e+03 1.02262e+03 9.26581e+03 3.50152e+03 1.29331e+03 3.07271e+03
|
1332497040050000 2.54478e+05 2.21701e+05 5.61366e+03 1.02262e+03 9.26581e+03 3.50152e+03 1.29331e+03 3.07271e+03
|
||||||
1332497040.060000 2.59568e+05 2.22945e+05 4.97190e+03 1.28250e+03 8.62081e+03 4.06316e+03 1.85717e+03 2.61990e+03
|
1332497040060000 2.59568e+05 2.22945e+05 4.97190e+03 1.28250e+03 8.62081e+03 4.06316e+03 1.85717e+03 2.61990e+03
|
||||||
1332497040.070000 2.57269e+05 2.23697e+05 3.60527e+03 3.05749e+03 7.22363e+03 4.90330e+03 1.93736e+03 2.35357e+03
|
1332497040070000 2.57269e+05 2.23697e+05 3.60527e+03 3.05749e+03 7.22363e+03 4.90330e+03 1.93736e+03 2.35357e+03
|
||||||
1332497040.080000 2.52274e+05 2.21438e+05 5.01228e+03 2.86309e+03 7.87115e+03 4.80448e+03 2.18291e+03 2.93397e+03
|
1332497040080000 2.52274e+05 2.21438e+05 5.01228e+03 2.86309e+03 7.87115e+03 4.80448e+03 2.18291e+03 2.93397e+03
|
||||||
1332497040.090000 2.56468e+05 2.19205e+05 6.29804e+03 8.09467e+02 9.12895e+03 3.52055e+03 2.16980e+03 3.88739e+03
|
1332497040090000 2.56468e+05 2.19205e+05 6.29804e+03 8.09467e+02 9.12895e+03 3.52055e+03 2.16980e+03 3.88739e+03
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
8
tests/data/timestamped
Normal file
8
tests/data/timestamped
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
-10000000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
||||||
|
-100000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
||||||
|
-100000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
||||||
|
-1000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
||||||
|
1 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
||||||
|
1000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
||||||
|
1000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
||||||
|
1000000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
|
||||||
@@ -24,7 +24,7 @@ class JimOrderPlugin(nose.plugins.Plugin):
|
|||||||
name, workingDir=loader.workingDir)
|
name, workingDir=loader.workingDir)
|
||||||
try:
|
try:
|
||||||
order = os.path.join(addr.filename, "test.order")
|
order = os.path.join(addr.filename, "test.order")
|
||||||
except:
|
except Exception:
|
||||||
order = None
|
order = None
|
||||||
if order and os.path.exists(order):
|
if order and os.path.exists(order):
|
||||||
files = []
|
files = []
|
||||||
|
|||||||
@@ -2,18 +2,19 @@ test_printf.py
|
|||||||
test_threadsafety.py
|
test_threadsafety.py
|
||||||
test_lrucache.py
|
test_lrucache.py
|
||||||
test_mustclose.py
|
test_mustclose.py
|
||||||
|
test_misc.py
|
||||||
|
|
||||||
test_serializer.py
|
test_serializer.py
|
||||||
test_iteratorizer.py
|
|
||||||
|
|
||||||
test_timestamper.py
|
test_timestamper.py
|
||||||
test_layout.py
|
|
||||||
test_rbtree.py
|
test_rbtree.py
|
||||||
test_interval.py
|
test_interval.py
|
||||||
|
|
||||||
test_bulkdata.py
|
test_bulkdata.py
|
||||||
test_nilmdb.py
|
test_nilmdb.py
|
||||||
|
test_wsgi.py
|
||||||
test_client.py
|
test_client.py
|
||||||
|
test_numpyclient.py
|
||||||
test_cmdline.py
|
test_cmdline.py
|
||||||
|
|
||||||
test_*.py
|
test_*.py
|
||||||
|
|||||||
@@ -5,10 +5,12 @@ from nilmdb.utils.printf import *
|
|||||||
from nose.tools import *
|
from nose.tools import *
|
||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
import itertools
|
import itertools
|
||||||
|
import errno
|
||||||
|
import pickle
|
||||||
|
|
||||||
from testutil.helpers import *
|
from testutil.helpers import *
|
||||||
|
|
||||||
testdb = "tests/bulkdata-testdb"
|
testdb = b"tests/bulkdata-testdb"
|
||||||
|
|
||||||
import nilmdb.server.bulkdata
|
import nilmdb.server.bulkdata
|
||||||
from nilmdb.server.bulkdata import BulkData
|
from nilmdb.server.bulkdata import BulkData
|
||||||
@@ -16,19 +18,31 @@ from nilmdb.server.bulkdata import BulkData
|
|||||||
class TestBulkData(object):
|
class TestBulkData(object):
|
||||||
|
|
||||||
def test_bulkdata(self):
|
def test_bulkdata(self):
|
||||||
for (size, files, db) in [ ( 0, 0, testdb ),
|
for (size, files, db) in [ ( None, None, testdb ),
|
||||||
( 25, 1000, testdb ),
|
( 25, 1000, testdb ),
|
||||||
( 1000, 3, testdb.decode("utf-8") ) ]:
|
( 1000, 3, testdb.decode("utf-8") ) ]:
|
||||||
recursive_unlink(db)
|
recursive_unlink(db)
|
||||||
os.mkdir(db)
|
os.mkdir(db)
|
||||||
self.do_basic(db, size, files)
|
self.do_basic(db, size, files)
|
||||||
|
|
||||||
|
def test_corruption(self):
|
||||||
|
db = testdb
|
||||||
|
recursive_unlink(db)
|
||||||
|
os.mkdir(db)
|
||||||
|
|
||||||
|
# Remove lock before close
|
||||||
|
data = BulkData(db)
|
||||||
|
os.unlink(data.lock)
|
||||||
|
data.close()
|
||||||
|
|
||||||
def do_basic(self, db, size, files):
|
def do_basic(self, db, size, files):
|
||||||
"""Do the basic test with variable file_size and files_per_dir"""
|
"""Do the basic test with variable file_size and files_per_dir"""
|
||||||
if not size or not files:
|
data = BulkData(db, file_size = size, files_per_dir = files)
|
||||||
data = BulkData(db)
|
|
||||||
else:
|
# Try opening it again (should result in locking error)
|
||||||
data = BulkData(db, file_size = size, files_per_dir = files)
|
with assert_raises(IOError) as e:
|
||||||
|
data2 = BulkData(db)
|
||||||
|
in_("already locked by another process", str(e.exception))
|
||||||
|
|
||||||
# create empty
|
# create empty
|
||||||
with assert_raises(ValueError):
|
with assert_raises(ValueError):
|
||||||
@@ -36,12 +50,19 @@ class TestBulkData(object):
|
|||||||
with assert_raises(ValueError):
|
with assert_raises(ValueError):
|
||||||
data.create("foo/bar", "uint16_8")
|
data.create("foo/bar", "uint16_8")
|
||||||
data.create("/foo/bar", "uint16_8")
|
data.create("/foo/bar", "uint16_8")
|
||||||
data.create(u"/foo/baz/quux", "float64_16")
|
data.create("/foo/baz/quux", "float64_16")
|
||||||
with assert_raises(ValueError):
|
with assert_raises(ValueError) as e:
|
||||||
data.create("/foo/bar/baz", "uint16_8")
|
data.create("/foo/bar/baz", "uint16_8")
|
||||||
|
in_("path is subdir of existing node", str(e.exception))
|
||||||
with assert_raises(ValueError):
|
with assert_raises(ValueError):
|
||||||
data.create("/foo/baz", "float64_16")
|
data.create("/foo/baz", "float64_16")
|
||||||
|
|
||||||
|
# filename too long (tests error paths in _create_parents)
|
||||||
|
with assert_raises(OSError) as e:
|
||||||
|
data.create("/test/long/" + "a"*10000 + "/foo", "int32_1")
|
||||||
|
eq_(e.exception.errno, errno.ENAMETOOLONG)
|
||||||
|
|
||||||
|
|
||||||
# get node -- see if caching works
|
# get node -- see if caching works
|
||||||
nodes = []
|
nodes = []
|
||||||
for i in range(5000):
|
for i in range(5000):
|
||||||
@@ -49,31 +70,40 @@ class TestBulkData(object):
|
|||||||
nodes.append(data.getnode("/foo/baz/quux"))
|
nodes.append(data.getnode("/foo/baz/quux"))
|
||||||
del nodes
|
del nodes
|
||||||
|
|
||||||
|
def get_node_slice(key):
|
||||||
|
if isinstance(key, slice):
|
||||||
|
return [ node.get_data(x, x+1) for x in
|
||||||
|
range(*key.indices(node.nrows)) ]
|
||||||
|
return node.get_data(key, key+1)
|
||||||
|
|
||||||
# Test node
|
# Test node
|
||||||
node = data.getnode("/foo/bar")
|
node = data.getnode("/foo/bar")
|
||||||
with assert_raises(IndexError):
|
with assert_raises(IndexError):
|
||||||
x = node[0]
|
x = get_node_slice(0)
|
||||||
|
with assert_raises(IndexError):
|
||||||
|
x = node[0] # timestamp
|
||||||
raw = []
|
raw = []
|
||||||
for i in range(1000):
|
for i in range(1000):
|
||||||
raw.append([10000+i, 1, 2, 3, 4, 5, 6, 7, 8 ])
|
raw.append(b"%d 1 2 3 4 5 6 7 8\n" % (10000 + i))
|
||||||
node.append(raw[0:1])
|
node.append_data(b"".join(raw[0:1]), 0, 50000)
|
||||||
node.append(raw[1:100])
|
node.append_data(b"".join(raw[1:100]), 0, 50000)
|
||||||
node.append(raw[100:])
|
node.append_data(b"".join(raw[100:]), 0, 50000)
|
||||||
|
|
||||||
misc_slices = [ 0, 100, slice(None), slice(0), slice(10),
|
misc_slices = [ 0, 100, slice(None), slice(0), slice(10),
|
||||||
slice(5,10), slice(3,None), slice(3,-3),
|
slice(5,10), slice(3,None), slice(3,-3),
|
||||||
slice(20,10), slice(200,100,-1), slice(None,0,-1),
|
slice(20,10), slice(200,100,-1), slice(None,0,-1),
|
||||||
slice(100,500,5) ]
|
slice(100,500,5) ]
|
||||||
|
|
||||||
# Extract slices
|
# Extract slices
|
||||||
for s in misc_slices:
|
for s in misc_slices:
|
||||||
eq_(node[s], raw[s])
|
eq_(get_node_slice(s), raw[s])
|
||||||
|
|
||||||
# Extract misc slices while appending, to make sure the
|
# Extract misc slices while appending, to make sure the
|
||||||
# data isn't being added in the middle of the file
|
# data isn't being added in the middle of the file
|
||||||
for s in [2, slice(1,5), 2, slice(1,5)]:
|
for s in [2, slice(1,5), 2, slice(1,5)]:
|
||||||
node.append([[0,0,0,0,0,0,0,0,0]])
|
node.append_data(b"0 0 0 0 0 0 0 0 0\n", 0, 50000)
|
||||||
raw.append([0,0,0,0,0,0,0,0,0])
|
raw.append(b"0 0 0 0 0 0 0 0 0\n")
|
||||||
eq_(node[s], raw[s])
|
eq_(get_node_slice(s), raw[s])
|
||||||
|
|
||||||
# Get some coverage of remove; remove is more fully tested
|
# Get some coverage of remove; remove is more fully tested
|
||||||
# in cmdline
|
# in cmdline
|
||||||
@@ -81,17 +111,40 @@ class TestBulkData(object):
|
|||||||
node.remove(9999,9998)
|
node.remove(9999,9998)
|
||||||
|
|
||||||
# close, reopen
|
# close, reopen
|
||||||
# reopen
|
|
||||||
data.close()
|
data.close()
|
||||||
if not size or not files:
|
data = BulkData(db, file_size = size, files_per_dir = files)
|
||||||
data = BulkData(db)
|
|
||||||
else:
|
|
||||||
data = BulkData(db, file_size = size, files_per_dir = files)
|
|
||||||
node = data.getnode("/foo/bar")
|
node = data.getnode("/foo/bar")
|
||||||
|
|
||||||
|
# make an empty dir that will get ignored by _get_nrows
|
||||||
|
data.close()
|
||||||
|
os.mkdir(os.path.join(testdb, b"data/foo/bar/0123"))
|
||||||
|
data = BulkData(db, file_size = size, files_per_dir = files)
|
||||||
|
node = data.getnode("/foo/bar")
|
||||||
|
|
||||||
|
# make a corrupted file that's the wrong size
|
||||||
|
data.close()
|
||||||
|
with open(os.path.join(testdb, b"data/foo/bar/0123/0123"), "wb") as f:
|
||||||
|
f.write(b"x"*17)
|
||||||
|
data = BulkData(db, file_size = size, files_per_dir = files)
|
||||||
|
with assert_raises(ValueError) as e:
|
||||||
|
node = data.getnode("/foo/bar")
|
||||||
|
in_("file offset is not a multiple of data size", str(e.exception))
|
||||||
|
|
||||||
|
# mess with format
|
||||||
|
data.close()
|
||||||
|
with open(os.path.join(testdb, b"data/foo/bar/_format"), "rb") as f:
|
||||||
|
fmt = pickle.load(f)
|
||||||
|
fmt["version"] = 2
|
||||||
|
with open(os.path.join(testdb, b"data/foo/bar/_format"), "wb") as f:
|
||||||
|
pickle.dump(fmt, f, 2)
|
||||||
|
data = BulkData(db, file_size = size, files_per_dir = files)
|
||||||
|
with assert_raises(NotImplementedError) as e:
|
||||||
|
node = data.getnode("/foo/bar")
|
||||||
|
in_("old version 2 bulk data store is not supported", str(e.exception))
|
||||||
|
|
||||||
# Extract slices
|
# Extract slices
|
||||||
for s in misc_slices:
|
for s in misc_slices:
|
||||||
eq_(node[s], raw[s])
|
eq_(get_node_slice(s), raw[s])
|
||||||
|
|
||||||
# destroy
|
# destroy
|
||||||
with assert_raises(ValueError):
|
with assert_raises(ValueError):
|
||||||
|
|||||||
@@ -5,8 +5,9 @@ import nilmdb.client
|
|||||||
|
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import *
|
||||||
from nilmdb.utils import timestamper
|
from nilmdb.utils import timestamper
|
||||||
from nilmdb.client import ClientError, ServerError
|
from nilmdb.client import ClientError, ServerError, Error
|
||||||
from nilmdb.utils import datetime_tz
|
from nilmdb.utils.sort import sort_human
|
||||||
|
import datetime_tz
|
||||||
|
|
||||||
from nose.plugins.skip import SkipTest
|
from nose.plugins.skip import SkipTest
|
||||||
from nose.tools import *
|
from nose.tools import *
|
||||||
@@ -16,13 +17,14 @@ import distutils.version
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import cStringIO
|
import io
|
||||||
import simplejson as json
|
import json
|
||||||
import unittest
|
import unittest
|
||||||
import warnings
|
import warnings
|
||||||
import resource
|
import resource
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
|
import struct
|
||||||
|
|
||||||
from testutil.helpers import *
|
from testutil.helpers import *
|
||||||
|
|
||||||
@@ -77,6 +79,32 @@ class TestClient(object):
|
|||||||
# Bad URLs should give 404, not 500
|
# Bad URLs should give 404, not 500
|
||||||
with assert_raises(ClientError):
|
with assert_raises(ClientError):
|
||||||
client.http.get("/stream/create")
|
client.http.get("/stream/create")
|
||||||
|
|
||||||
|
# Test error handling
|
||||||
|
url = testurl
|
||||||
|
args = { "url": url,
|
||||||
|
"status": "400",
|
||||||
|
"message": "Something went wrong",
|
||||||
|
"traceback": None }
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
client.http._handle_error(url, 400, json.dumps(args))
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
client.http._handle_error(url, 400, "this is not JSON.. {")
|
||||||
|
args["status"] = "500"
|
||||||
|
with assert_raises(ServerError):
|
||||||
|
client.http._handle_error(url, 500, json.dumps(args))
|
||||||
|
args["message"] = None
|
||||||
|
with assert_raises(ServerError):
|
||||||
|
client.http._handle_error(url, 500, json.dumps(args))
|
||||||
|
args["status"] = "600"
|
||||||
|
with assert_raises(Error):
|
||||||
|
client.http._handle_error(url, 600, json.dumps(args))
|
||||||
|
|
||||||
|
# Use get_gen for an endpoint that doesn't have newlines,
|
||||||
|
# for better test coverage.
|
||||||
|
for line in client.http.get_gen("/version"):
|
||||||
|
pass
|
||||||
|
|
||||||
client.close()
|
client.close()
|
||||||
|
|
||||||
def test_client_02_createlist(self):
|
def test_client_02_createlist(self):
|
||||||
@@ -97,23 +125,29 @@ class TestClient(object):
|
|||||||
|
|
||||||
# Bad method types
|
# Bad method types
|
||||||
with assert_raises(ClientError):
|
with assert_raises(ClientError):
|
||||||
client.http.put("/stream/list","")
|
client.http.put("/stream/list",b"")
|
||||||
# Try a bunch of times to make sure the request body is getting consumed
|
# Try a bunch of times to make sure the request body is getting consumed
|
||||||
for x in range(10):
|
for x in range(10):
|
||||||
with assert_raises(ClientError):
|
with assert_raises(ClientError):
|
||||||
client.http.post("/stream/list")
|
client.http.post("/stream/list")
|
||||||
client = nilmdb.client.Client(url = testurl)
|
client = nilmdb.client.Client(url = testurl)
|
||||||
|
|
||||||
# Create three streams
|
# Create four streams
|
||||||
client.stream_create("/newton/prep", "float32_8")
|
client.stream_create("/newton/prep", "float32_8")
|
||||||
client.stream_create("/newton/raw", "uint16_6")
|
client.stream_create("/newton/raw", "uint16_6")
|
||||||
client.stream_create("/newton/zzz/rawnotch", "uint16_9")
|
client.stream_create("/newton/zzz/rawnotch2", "uint16_9")
|
||||||
|
client.stream_create("/newton/zzz/rawnotch11", "uint16_9")
|
||||||
|
|
||||||
# Verify we got 3 streams
|
# Test sort_human (used by stream_list)
|
||||||
|
eq_(sort_human(["/s/10", "/s/2"]), ["/s/2", "/s/10"])
|
||||||
|
|
||||||
|
# Verify we got 4 streams in the right order
|
||||||
eq_(client.stream_list(), [ ["/newton/prep", "float32_8"],
|
eq_(client.stream_list(), [ ["/newton/prep", "float32_8"],
|
||||||
["/newton/raw", "uint16_6"],
|
["/newton/raw", "uint16_6"],
|
||||||
["/newton/zzz/rawnotch", "uint16_9"]
|
["/newton/zzz/rawnotch2", "uint16_9"],
|
||||||
|
["/newton/zzz/rawnotch11", "uint16_9"]
|
||||||
])
|
])
|
||||||
|
|
||||||
# Match just one type or one path
|
# Match just one type or one path
|
||||||
eq_(client.stream_list(layout="uint16_6"),
|
eq_(client.stream_list(layout="uint16_6"),
|
||||||
[ ["/newton/raw", "uint16_6"] ])
|
[ ["/newton/raw", "uint16_6"] ])
|
||||||
@@ -126,8 +160,19 @@ class TestClient(object):
|
|||||||
# we create a table.
|
# we create a table.
|
||||||
limit = resource.getrlimit(resource.RLIMIT_FSIZE)
|
limit = resource.getrlimit(resource.RLIMIT_FSIZE)
|
||||||
resource.setrlimit(resource.RLIMIT_FSIZE, (1, limit[1]))
|
resource.setrlimit(resource.RLIMIT_FSIZE, (1, limit[1]))
|
||||||
|
|
||||||
|
# normal
|
||||||
with assert_raises(ServerError) as e:
|
with assert_raises(ServerError) as e:
|
||||||
client.stream_create("/newton/hello", "uint16_6")
|
client.stream_create("/newton/hello", "uint16_6")
|
||||||
|
|
||||||
|
# same but with force_traceback == False, to improve coverage
|
||||||
|
global test_server
|
||||||
|
test_server.force_traceback = False
|
||||||
|
with assert_raises(ServerError) as e:
|
||||||
|
client.stream_create("/newton/world", "uint16_6")
|
||||||
|
test_server.force_traceback = True
|
||||||
|
|
||||||
|
# Reset resource limit
|
||||||
resource.setrlimit(resource.RLIMIT_FSIZE, limit)
|
resource.setrlimit(resource.RLIMIT_FSIZE, limit)
|
||||||
|
|
||||||
client.close()
|
client.close()
|
||||||
@@ -186,15 +231,14 @@ class TestClient(object):
|
|||||||
datetime_tz.localtz_set("America/New_York")
|
datetime_tz.localtz_set("America/New_York")
|
||||||
|
|
||||||
testfile = "tests/data/prep-20120323T1000"
|
testfile = "tests/data/prep-20120323T1000"
|
||||||
start = datetime_tz.datetime_tz.smartparse("20120323T1000")
|
start = nilmdb.utils.time.parse_time("20120323T1000")
|
||||||
start = start.totimestamp()
|
|
||||||
rate = 120
|
rate = 120
|
||||||
|
|
||||||
# First try a nonexistent path
|
# First try a nonexistent path
|
||||||
data = timestamper.TimestamperRate(testfile, start, 120)
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
||||||
with assert_raises(ClientError) as e:
|
with assert_raises(ClientError) as e:
|
||||||
result = client.stream_insert("/newton/no-such-path", data)
|
result = client.stream_insert("/newton/no-such-path", data)
|
||||||
in_("404 Not Found", str(e.exception))
|
in_("404 Not Found", repr(e.exception))
|
||||||
|
|
||||||
# Now try reversed timestamps
|
# Now try reversed timestamps
|
||||||
data = timestamper.TimestamperRate(testfile, start, 120)
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
||||||
@@ -206,27 +250,27 @@ class TestClient(object):
|
|||||||
"start must precede end", str(e.exception))
|
"start must precede end", str(e.exception))
|
||||||
|
|
||||||
# Now try empty data (no server request made)
|
# Now try empty data (no server request made)
|
||||||
empty = cStringIO.StringIO("")
|
empty = io.StringIO("")
|
||||||
data = timestamper.TimestamperRate(empty, start, 120)
|
data = timestamper.TimestamperRate(empty, start, 120)
|
||||||
result = client.stream_insert("/newton/prep", data)
|
result = client.stream_insert("/newton/prep", data)
|
||||||
eq_(result, None)
|
eq_(result, None)
|
||||||
|
|
||||||
# It's OK to insert an empty interval
|
# It's OK to insert an empty interval
|
||||||
client.http.put("stream/insert", "", { "path": "/newton/prep",
|
client.http.put("stream/insert", b"", { "path": "/newton/prep",
|
||||||
"start": 1, "end": 2 })
|
"start": 1, "end": 2 })
|
||||||
eq_(list(client.stream_intervals("/newton/prep")), [[1, 2]])
|
eq_(list(client.stream_intervals("/newton/prep")), [[1, 2]])
|
||||||
client.stream_remove("/newton/prep")
|
client.stream_remove("/newton/prep")
|
||||||
eq_(list(client.stream_intervals("/newton/prep")), [])
|
eq_(list(client.stream_intervals("/newton/prep")), [])
|
||||||
|
|
||||||
# Timestamps can be negative too
|
# Timestamps can be negative too
|
||||||
client.http.put("stream/insert", "", { "path": "/newton/prep",
|
client.http.put("stream/insert", b"", { "path": "/newton/prep",
|
||||||
"start": -2, "end": -1 })
|
"start": -2, "end": -1 })
|
||||||
eq_(list(client.stream_intervals("/newton/prep")), [[-2, -1]])
|
eq_(list(client.stream_intervals("/newton/prep")), [[-2, -1]])
|
||||||
client.stream_remove("/newton/prep")
|
client.stream_remove("/newton/prep")
|
||||||
eq_(list(client.stream_intervals("/newton/prep")), [])
|
eq_(list(client.stream_intervals("/newton/prep")), [])
|
||||||
|
|
||||||
# Intervals that end at zero shouldn't be any different
|
# Intervals that end at zero shouldn't be any different
|
||||||
client.http.put("stream/insert", "", { "path": "/newton/prep",
|
client.http.put("stream/insert", b"", { "path": "/newton/prep",
|
||||||
"start": -1, "end": 0 })
|
"start": -1, "end": 0 })
|
||||||
eq_(list(client.stream_intervals("/newton/prep")), [[-1, 0]])
|
eq_(list(client.stream_intervals("/newton/prep")), [[-1, 0]])
|
||||||
client.stream_remove("/newton/prep")
|
client.stream_remove("/newton/prep")
|
||||||
@@ -234,48 +278,86 @@ class TestClient(object):
|
|||||||
|
|
||||||
# Try forcing a server request with equal start and end
|
# Try forcing a server request with equal start and end
|
||||||
with assert_raises(ClientError) as e:
|
with assert_raises(ClientError) as e:
|
||||||
client.http.put("stream/insert", "", { "path": "/newton/prep",
|
client.http.put("stream/insert", b"", { "path": "/newton/prep",
|
||||||
"start": 0, "end": 0 })
|
"start": 0, "end": 0 })
|
||||||
in_("400 Bad Request", str(e.exception))
|
in_("400 Bad Request", str(e.exception))
|
||||||
in_("start must precede end", str(e.exception))
|
in_("start must precede end", str(e.exception))
|
||||||
|
|
||||||
|
# Invalid times in HTTP request
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.http.put("stream/insert", b"", { "path": "/newton/prep",
|
||||||
|
"start": "asdf", "end": 0 })
|
||||||
|
in_("400 Bad Request", str(e.exception))
|
||||||
|
in_("invalid start", str(e.exception))
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.http.put("stream/insert", b"", { "path": "/newton/prep",
|
||||||
|
"start": 0, "end": "asdf" })
|
||||||
|
in_("400 Bad Request", str(e.exception))
|
||||||
|
in_("invalid end", str(e.exception))
|
||||||
|
|
||||||
|
# Good content type
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.http.put("stream/insert", b"",
|
||||||
|
{ "path": "xxxx", "start": 0, "end": 1,
|
||||||
|
"binary": 1 })
|
||||||
|
in_("No such stream", str(e.exception))
|
||||||
|
|
||||||
|
# Bad content type
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.http.put("stream/insert", b"",
|
||||||
|
{ "path": "xxxx", "start": 0, "end": 1,
|
||||||
|
"binary": 1 },
|
||||||
|
content_type="text/plain; charset=utf-8")
|
||||||
|
in_("Content type must be application/octet-stream", str(e.exception))
|
||||||
|
|
||||||
# Specify start/end (starts too late)
|
# Specify start/end (starts too late)
|
||||||
data = timestamper.TimestamperRate(testfile, start, 120)
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
||||||
with assert_raises(ClientError) as e:
|
with assert_raises(ClientError) as e:
|
||||||
result = client.stream_insert("/newton/prep", data,
|
result = client.stream_insert("/newton/prep", data,
|
||||||
start + 5, start + 120)
|
start + 5000000, start + 120000000)
|
||||||
in_("400 Bad Request", str(e.exception))
|
in_("400 Bad Request", str(e.exception))
|
||||||
in_("Data timestamp 1332511200.000000 < start time 1332511205.000000",
|
in_("Data timestamp 1332511200000000 < start time 1332511205000000",
|
||||||
str(e.exception))
|
str(e.exception))
|
||||||
|
|
||||||
# Specify start/end (ends too early)
|
# Specify start/end (ends too early)
|
||||||
data = timestamper.TimestamperRate(testfile, start, 120)
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
||||||
with assert_raises(ClientError) as e:
|
with assert_raises(ClientError) as e:
|
||||||
result = client.stream_insert("/newton/prep", data,
|
result = client.stream_insert("/newton/prep", data,
|
||||||
start, start + 1)
|
start, start + 1000000)
|
||||||
in_("400 Bad Request", str(e.exception))
|
in_("400 Bad Request", str(e.exception))
|
||||||
# Client chunks the input, so the exact timestamp here might change
|
# Client chunks the input, so the exact timestamp here might change
|
||||||
# if the chunk positions change.
|
# if the chunk positions change.
|
||||||
assert(re.search("Data timestamp 13325[0-9]+\.[0-9]+ "
|
assert(re.search("Data timestamp 13325[0-9]+ "
|
||||||
">= end time 1332511201.000000", str(e.exception))
|
">= end time 1332511201000000", str(e.exception))
|
||||||
is not None)
|
is not None)
|
||||||
|
|
||||||
|
def check_data():
|
||||||
|
# Verify the intervals. Should be just one, even if the data
|
||||||
|
# was inserted in chunks, due to nilmdb interval concatenation.
|
||||||
|
intervals = list(client.stream_intervals("/newton/prep"))
|
||||||
|
eq_(intervals, [[start, start + 119999777]])
|
||||||
|
|
||||||
|
# Try some overlapping data -- just insert it again
|
||||||
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
result = client.stream_insert("/newton/prep", data)
|
||||||
|
in_("400 Bad Request", str(e.exception))
|
||||||
|
in_("verlap", str(e.exception))
|
||||||
|
|
||||||
# Now do the real load
|
# Now do the real load
|
||||||
data = timestamper.TimestamperRate(testfile, start, 120)
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
||||||
result = client.stream_insert("/newton/prep", data,
|
result = client.stream_insert("/newton/prep", data,
|
||||||
start, start + 119.999777)
|
start, start + 119999777)
|
||||||
|
check_data()
|
||||||
|
|
||||||
# Verify the intervals. Should be just one, even if the data
|
# Try inserting directly-passed data
|
||||||
# was inserted in chunks, due to nilmdb interval concatenation.
|
client.stream_remove("/newton/prep", start, start + 119999777)
|
||||||
intervals = list(client.stream_intervals("/newton/prep"))
|
|
||||||
eq_(intervals, [[start, start + 119.999777]])
|
|
||||||
|
|
||||||
# Try some overlapping data -- just insert it again
|
|
||||||
data = timestamper.TimestamperRate(testfile, start, 120)
|
data = timestamper.TimestamperRate(testfile, start, 120)
|
||||||
with assert_raises(ClientError) as e:
|
data_bytes = b''.join(data)
|
||||||
result = client.stream_insert("/newton/prep", data)
|
result = client.stream_insert("/newton/prep", data_bytes,
|
||||||
in_("400 Bad Request", str(e.exception))
|
start, start + 119999777)
|
||||||
in_("verlap", str(e.exception))
|
check_data()
|
||||||
|
|
||||||
nilmdb.client.client.StreamInserter._max_data = old_max_data
|
nilmdb.client.client.StreamInserter._max_data = old_max_data
|
||||||
client.close()
|
client.close()
|
||||||
@@ -284,15 +366,37 @@ class TestClient(object):
|
|||||||
# Misc tests for extract and remove. Most of them are in test_cmdline.
|
# Misc tests for extract and remove. Most of them are in test_cmdline.
|
||||||
client = nilmdb.client.Client(url = testurl)
|
client = nilmdb.client.Client(url = testurl)
|
||||||
|
|
||||||
for x in client.stream_extract("/newton/prep", 999123, 999124):
|
for x in client.stream_extract("/newton/prep",
|
||||||
|
999123000000, 999124000000):
|
||||||
raise AssertionError("shouldn't be any data for this request")
|
raise AssertionError("shouldn't be any data for this request")
|
||||||
|
|
||||||
with assert_raises(ClientError) as e:
|
with assert_raises(ClientError) as e:
|
||||||
client.stream_remove("/newton/prep", 123, 120)
|
client.stream_remove("/newton/prep", 123000000, 120000000)
|
||||||
|
|
||||||
# Test count
|
# Test count
|
||||||
eq_(client.stream_count("/newton/prep"), 14400)
|
eq_(client.stream_count("/newton/prep"), 14400)
|
||||||
|
|
||||||
|
# Test binary output
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
list(client.stream_extract("/newton/prep",
|
||||||
|
markup = True, binary = True))
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
list(client.stream_extract("/newton/prep",
|
||||||
|
count = True, binary = True))
|
||||||
|
data = b"".join(client.stream_extract("/newton/prep", binary = True))
|
||||||
|
# Quick check using struct
|
||||||
|
unpacker = struct.Struct("<qffffffff")
|
||||||
|
out = []
|
||||||
|
for i in range(14400):
|
||||||
|
out.append(unpacker.unpack_from(data, i * unpacker.size))
|
||||||
|
eq_(out[0], (1332511200000000, 266568.0, 224029.0, 5161.39990234375,
|
||||||
|
2525.169921875, 8350.83984375, 3724.699951171875,
|
||||||
|
1355.3399658203125, 2039.0))
|
||||||
|
|
||||||
|
# Just get some coverage
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.http.post("/stream/remove", { "path": "/none" })
|
||||||
|
|
||||||
client.close()
|
client.close()
|
||||||
|
|
||||||
def test_client_06_generators(self):
|
def test_client_06_generators(self):
|
||||||
@@ -301,28 +405,22 @@ class TestClient(object):
|
|||||||
client = nilmdb.client.Client(url = testurl)
|
client = nilmdb.client.Client(url = testurl)
|
||||||
|
|
||||||
# Trigger a client error in generator
|
# Trigger a client error in generator
|
||||||
start = datetime_tz.datetime_tz.smartparse("20120323T2000")
|
start = nilmdb.utils.time.parse_time("20120323T2000")
|
||||||
end = datetime_tz.datetime_tz.smartparse("20120323T1000")
|
end = nilmdb.utils.time.parse_time("20120323T1000")
|
||||||
for function in [ client.stream_intervals, client.stream_extract ]:
|
for function in [ client.stream_intervals, client.stream_extract ]:
|
||||||
with assert_raises(ClientError) as e:
|
with assert_raises(ClientError) as e:
|
||||||
function("/newton/prep",
|
next(function("/newton/prep", start, end))
|
||||||
start.totimestamp(),
|
|
||||||
end.totimestamp()).next()
|
|
||||||
in_("400 Bad Request", str(e.exception))
|
in_("400 Bad Request", str(e.exception))
|
||||||
in_("start must precede end", str(e.exception))
|
in_("start must precede end", str(e.exception))
|
||||||
|
|
||||||
# Trigger a curl error in generator
|
# Trigger a curl error in generator
|
||||||
with assert_raises(ServerError) as e:
|
with assert_raises(ServerError) as e:
|
||||||
client.http.get_gen("http://nosuchurl/").next()
|
next(client.http.get_gen("http://nosuchurl.example.com./"))
|
||||||
|
|
||||||
# Trigger a curl error in generator
|
|
||||||
with assert_raises(ServerError) as e:
|
|
||||||
client.http.get_gen("http://nosuchurl/").next()
|
|
||||||
|
|
||||||
# Check 404 for missing streams
|
# Check 404 for missing streams
|
||||||
for function in [ client.stream_intervals, client.stream_extract ]:
|
for function in [ client.stream_intervals, client.stream_extract ]:
|
||||||
with assert_raises(ClientError) as e:
|
with assert_raises(ClientError) as e:
|
||||||
function("/no/such/stream").next()
|
next(function("/no/such/stream"))
|
||||||
in_("404 Not Found", str(e.exception))
|
in_("404 Not Found", str(e.exception))
|
||||||
in_("No such stream", str(e.exception))
|
in_("No such stream", str(e.exception))
|
||||||
|
|
||||||
@@ -343,7 +441,7 @@ class TestClient(object):
|
|||||||
|
|
||||||
def headers():
|
def headers():
|
||||||
h = ""
|
h = ""
|
||||||
for (k, v) in http._last_response.headers.items():
|
for (k, v) in list(http._last_response.headers.items()):
|
||||||
h += k + ": " + v + "\n"
|
h += k + ": " + v + "\n"
|
||||||
return h.lower()
|
return h.lower()
|
||||||
|
|
||||||
@@ -357,16 +455,38 @@ class TestClient(object):
|
|||||||
headers())
|
headers())
|
||||||
|
|
||||||
# Extract
|
# Extract
|
||||||
x = http.get("stream/extract",
|
x = http.get("stream/extract", { "path": "/newton/prep",
|
||||||
{ "path": "/newton/prep",
|
"start": "123", "end": "124" })
|
||||||
"start": "123",
|
|
||||||
"end": "124" })
|
|
||||||
if "transfer-encoding: chunked" not in headers():
|
if "transfer-encoding: chunked" not in headers():
|
||||||
warnings.warn("Non-chunked HTTP response for /stream/extract")
|
warnings.warn("Non-chunked HTTP response for /stream/extract")
|
||||||
if "content-type: text/plain;charset=utf-8" not in headers():
|
if "content-type: text/plain;charset=utf-8" not in headers():
|
||||||
raise AssertionError("/stream/extract is not text/plain:\n" +
|
raise AssertionError("/stream/extract is not text/plain:\n" +
|
||||||
headers())
|
headers())
|
||||||
|
|
||||||
|
x = http.get("stream/extract", { "path": "/newton/prep",
|
||||||
|
"start": "123", "end": "124",
|
||||||
|
"binary": "1" })
|
||||||
|
if "transfer-encoding: chunked" not in headers():
|
||||||
|
warnings.warn("Non-chunked HTTP response for /stream/extract")
|
||||||
|
if "content-type: application/octet-stream" not in headers():
|
||||||
|
raise AssertionError("/stream/extract is not binary:\n" +
|
||||||
|
headers())
|
||||||
|
|
||||||
|
# Make sure a binary of "0" is really off
|
||||||
|
x = http.get("stream/extract", { "path": "/newton/prep",
|
||||||
|
"start": "123", "end": "124",
|
||||||
|
"binary": "0" })
|
||||||
|
if "content-type: application/octet-stream" in headers():
|
||||||
|
raise AssertionError("/stream/extract is not text:\n" +
|
||||||
|
headers())
|
||||||
|
|
||||||
|
# Invalid parameters
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
x = http.get("stream/extract", { "path": "/newton/prep",
|
||||||
|
"start": "123", "end": "124",
|
||||||
|
"binary": "asdfasfd" })
|
||||||
|
in_("can't parse parameter", str(e.exception))
|
||||||
|
|
||||||
client.close()
|
client.close()
|
||||||
|
|
||||||
def test_client_08_unicode(self):
|
def test_client_08_unicode(self):
|
||||||
@@ -377,14 +497,15 @@ class TestClient(object):
|
|||||||
|
|
||||||
# Delete streams that exist
|
# Delete streams that exist
|
||||||
for stream in client.stream_list():
|
for stream in client.stream_list():
|
||||||
|
client.stream_remove(stream[0])
|
||||||
client.stream_destroy(stream[0])
|
client.stream_destroy(stream[0])
|
||||||
|
|
||||||
# Database is empty
|
# Database is empty
|
||||||
eq_(client.stream_list(), [])
|
eq_(client.stream_list(), [])
|
||||||
|
|
||||||
# Create Unicode stream, match it
|
# Create Unicode stream, match it
|
||||||
raw = [ u"/düsseldorf/raw", u"uint16_6" ]
|
raw = [ "/düsseldorf/raw", "uint16_6" ]
|
||||||
prep = [ u"/düsseldorf/prep", u"uint16_6" ]
|
prep = [ "/düsseldorf/prep", "uint16_6" ]
|
||||||
client.stream_create(*raw)
|
client.stream_create(*raw)
|
||||||
eq_(client.stream_list(), [raw])
|
eq_(client.stream_list(), [raw])
|
||||||
eq_(client.stream_list(layout=raw[1]), [raw])
|
eq_(client.stream_list(layout=raw[1]), [raw])
|
||||||
@@ -395,10 +516,10 @@ class TestClient(object):
|
|||||||
# Set / get metadata with Unicode keys and values
|
# Set / get metadata with Unicode keys and values
|
||||||
eq_(client.stream_get_metadata(raw[0]), {})
|
eq_(client.stream_get_metadata(raw[0]), {})
|
||||||
eq_(client.stream_get_metadata(prep[0]), {})
|
eq_(client.stream_get_metadata(prep[0]), {})
|
||||||
meta1 = { u"alpha": u"α",
|
meta1 = { "alpha": "α",
|
||||||
u"β": u"beta" }
|
"β": "beta" }
|
||||||
meta2 = { u"alpha": u"α" }
|
meta2 = { "alpha": "α" }
|
||||||
meta3 = { u"β": u"beta" }
|
meta3 = { "β": "beta" }
|
||||||
client.stream_set_metadata(prep[0], meta1)
|
client.stream_set_metadata(prep[0], meta1)
|
||||||
client.stream_update_metadata(prep[0], {})
|
client.stream_update_metadata(prep[0], {})
|
||||||
client.stream_update_metadata(raw[0], meta2)
|
client.stream_update_metadata(raw[0], meta2)
|
||||||
@@ -442,72 +563,86 @@ class TestClient(object):
|
|||||||
# override _max_data to trigger frequent server updates
|
# override _max_data to trigger frequent server updates
|
||||||
ctx._max_data = 15
|
ctx._max_data = 15
|
||||||
|
|
||||||
ctx.insert("100 1\n")
|
ctx.insert(b"1000 1\n")
|
||||||
|
|
||||||
ctx.insert("101 ")
|
ctx.insert(b"1010 ")
|
||||||
ctx.insert("1\n102 1")
|
ctx.insert(b"1\n1020 1")
|
||||||
ctx.insert("")
|
ctx.insert(b"")
|
||||||
ctx.insert("\n103 1\n")
|
ctx.insert(b"\n1030 1\n")
|
||||||
|
|
||||||
ctx.insert("104 1\n")
|
ctx.insert(b"1040 1\n")
|
||||||
ctx.insert("# hello\n")
|
ctx.insert(b"# hello\n")
|
||||||
ctx.insert(" # hello\n")
|
ctx.insert(b" # hello\n")
|
||||||
ctx.insert(" 105 1\n")
|
ctx.insert(b" 1050 1\n")
|
||||||
ctx.finalize()
|
ctx.finalize()
|
||||||
|
|
||||||
ctx.insert("106 1\n")
|
ctx.insert(b"1070 1\n")
|
||||||
ctx.update_end(106.5)
|
ctx.update_end(1080)
|
||||||
ctx.finalize()
|
ctx.finalize()
|
||||||
ctx.update_start(106.8)
|
ctx.update_start(1090)
|
||||||
ctx.insert("107 1\n")
|
ctx.insert(b"1100 1\n")
|
||||||
ctx.insert("108 1\n")
|
ctx.insert(b"1110 1\n")
|
||||||
ctx.insert("109 1\n")
|
ctx.send()
|
||||||
ctx.insert("110 1\n")
|
ctx.insert(b"1120 1\n")
|
||||||
ctx.insert("111 1\n")
|
ctx.insert(b"1130 1\n")
|
||||||
ctx.update_end(113)
|
ctx.insert(b"1140 1\n")
|
||||||
ctx.insert("112 1\n")
|
ctx.update_end(1160)
|
||||||
ctx.update_end(114)
|
ctx.insert(b"1150 1\n")
|
||||||
ctx.insert("113 1\n")
|
ctx.update_end(1170)
|
||||||
ctx.update_end(115)
|
ctx.insert(b"1160 1\n")
|
||||||
ctx.insert("114 1" +
|
ctx.update_end(1180)
|
||||||
" # this is super long" * 100 +
|
ctx.insert(b"1170 1" +
|
||||||
"\n")
|
b" # this is super long" * 100 +
|
||||||
|
b"\n")
|
||||||
ctx.finalize()
|
ctx.finalize()
|
||||||
ctx.insert("# this is super long" * 100)
|
ctx.insert(b"# this is super long" * 100)
|
||||||
|
|
||||||
|
# override _max_data_after_send to trigger ValueError on a
|
||||||
|
# long nonterminated line
|
||||||
|
ctx._max_data_after_send = 1000
|
||||||
|
with assert_raises(ValueError):
|
||||||
|
ctx.insert(b"# this is super long" * 100)
|
||||||
|
|
||||||
with assert_raises(ClientError):
|
with assert_raises(ClientError):
|
||||||
with client.stream_insert_context("/context/test", 100, 200) as ctx:
|
with client.stream_insert_context("/context/test",
|
||||||
ctx.insert("115 1\n")
|
1000, 2000) as ctx:
|
||||||
|
ctx.insert(b"1180 1\n")
|
||||||
|
|
||||||
with assert_raises(ClientError):
|
with assert_raises(ClientError):
|
||||||
with client.stream_insert_context("/context/test", 200, 300) as ctx:
|
with client.stream_insert_context("/context/test",
|
||||||
ctx.insert("115 1\n")
|
2000, 3000) as ctx:
|
||||||
|
ctx.insert(b"1180 1\n")
|
||||||
|
|
||||||
with assert_raises(ClientError):
|
with assert_raises(ClientError):
|
||||||
with client.stream_insert_context("/context/test") as ctx:
|
with client.stream_insert_context("/context/test") as ctx:
|
||||||
ctx.insert("bogus data\n")
|
ctx.insert(b"bogus data\n")
|
||||||
|
|
||||||
with client.stream_insert_context("/context/test", 200, 300) as ctx:
|
with client.stream_insert_context("/context/test", 2000, 3000) as ctx:
|
||||||
# make sure our override wasn't permanent
|
# make sure our override wasn't permanent
|
||||||
ne_(ctx._max_data, 15)
|
ne_(ctx._max_data, 15)
|
||||||
ctx.insert("225 1\n")
|
ctx.insert(b"2250 1\n")
|
||||||
ctx.finalize()
|
ctx.finalize()
|
||||||
|
|
||||||
with assert_raises(ClientError):
|
with assert_raises(ClientError):
|
||||||
with client.stream_insert_context("/context/test", 300, 400) as ctx:
|
with client.stream_insert_context("/context/test",
|
||||||
ctx.insert("301 1\n")
|
3000, 4000) as ctx:
|
||||||
ctx.insert("302 2\n")
|
ctx.insert(b"3010 1\n")
|
||||||
ctx.insert("303 3\n")
|
ctx.insert(b"3020 2\n")
|
||||||
ctx.insert("304 4\n")
|
ctx.insert(b"3030 3\n")
|
||||||
ctx.insert("304 4\n") # non-monotonic after a few lines
|
ctx.insert(b"3040 4\n")
|
||||||
|
ctx.insert(b"3040 4\n") # non-monotonic after a few lines
|
||||||
ctx.finalize()
|
ctx.finalize()
|
||||||
|
|
||||||
eq_(list(client.stream_intervals("/context/test")),
|
eq_(list(client.stream_intervals("/context/test")),
|
||||||
[ [ 100, 105.000001 ],
|
[ [ 1000, 1051 ],
|
||||||
[ 106, 106.5 ],
|
[ 1070, 1080 ],
|
||||||
[ 106.8, 115 ],
|
[ 1090, 1180 ],
|
||||||
[ 200, 300 ] ])
|
[ 2000, 3000 ] ])
|
||||||
|
|
||||||
|
# destroy stream (try without removing data first)
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
client.stream_destroy("/context/test")
|
||||||
|
client.stream_remove("/context/test")
|
||||||
client.stream_destroy("/context/test")
|
client.stream_destroy("/context/test")
|
||||||
client.close()
|
client.close()
|
||||||
|
|
||||||
@@ -530,9 +665,9 @@ class TestClient(object):
|
|||||||
# Insert a region with just a few points
|
# Insert a region with just a few points
|
||||||
with client.stream_insert_context("/empty/test") as ctx:
|
with client.stream_insert_context("/empty/test") as ctx:
|
||||||
ctx.update_start(100)
|
ctx.update_start(100)
|
||||||
ctx.insert("140 1\n")
|
ctx.insert(b"140 1\n")
|
||||||
ctx.insert("150 1\n")
|
ctx.insert(b"150 1\n")
|
||||||
ctx.insert("160 1\n")
|
ctx.insert(b"160 1\n")
|
||||||
ctx.update_end(200)
|
ctx.update_end(200)
|
||||||
ctx.finalize()
|
ctx.finalize()
|
||||||
|
|
||||||
@@ -545,7 +680,7 @@ class TestClient(object):
|
|||||||
|
|
||||||
# Try also creating a completely empty interval from scratch,
|
# Try also creating a completely empty interval from scratch,
|
||||||
# in a few different ways.
|
# in a few different ways.
|
||||||
client.stream_insert("/empty/test", "", 300, 350)
|
client.stream_insert("/empty/test", b"", 300, 350)
|
||||||
client.stream_insert("/empty/test", [], 400, 450)
|
client.stream_insert("/empty/test", [], 400, 450)
|
||||||
with client.stream_insert_context("/empty/test", 500, 550):
|
with client.stream_insert_context("/empty/test", 500, 550):
|
||||||
pass
|
pass
|
||||||
@@ -561,15 +696,19 @@ class TestClient(object):
|
|||||||
with client.stream_insert_context("/empty/test", end = 950):
|
with client.stream_insert_context("/empty/test", end = 950):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# Equal start and end is OK as long as there's no data
|
||||||
|
with client.stream_insert_context("/empty/test", start=9, end=9):
|
||||||
|
pass
|
||||||
|
|
||||||
# Try various things that might cause problems
|
# Try various things that might cause problems
|
||||||
with client.stream_insert_context("/empty/test", 1000, 1050):
|
with client.stream_insert_context("/empty/test", 1000, 1050) as ctx:
|
||||||
ctx.finalize() # inserts [1000, 1050]
|
ctx.finalize() # inserts [1000, 1050]
|
||||||
ctx.finalize() # nothing
|
ctx.finalize() # nothing
|
||||||
ctx.finalize() # nothing
|
ctx.finalize() # nothing
|
||||||
ctx.insert("1100 1\n")
|
ctx.insert(b"1100 1\n")
|
||||||
ctx.finalize() # inserts [1100, 1100.000001]
|
ctx.finalize() # inserts [1100, 1101]
|
||||||
ctx.update_start(1199)
|
ctx.update_start(1199)
|
||||||
ctx.insert("1200 1\n")
|
ctx.insert(b"1200 1\n")
|
||||||
ctx.update_end(1250)
|
ctx.update_end(1250)
|
||||||
ctx.finalize() # inserts [1199, 1250]
|
ctx.finalize() # inserts [1199, 1250]
|
||||||
ctx.update_start(1299)
|
ctx.update_start(1299)
|
||||||
@@ -577,14 +716,14 @@ class TestClient(object):
|
|||||||
ctx.update_end(1350)
|
ctx.update_end(1350)
|
||||||
ctx.finalize() # nothing
|
ctx.finalize() # nothing
|
||||||
ctx.update_start(1400)
|
ctx.update_start(1400)
|
||||||
ctx.insert("# nothing!\n")
|
ctx.insert(b"# nothing!\n")
|
||||||
ctx.update_end(1450)
|
ctx.update_end(1450)
|
||||||
ctx.finalize()
|
ctx.finalize()
|
||||||
ctx.update_start(1500)
|
ctx.update_start(1500)
|
||||||
ctx.insert("# nothing!")
|
ctx.insert(b"# nothing!")
|
||||||
ctx.update_end(1550)
|
ctx.update_end(1550)
|
||||||
ctx.finalize()
|
ctx.finalize()
|
||||||
ctx.insert("# nothing!\n" * 10)
|
ctx.insert(b"# nothing!\n" * 10)
|
||||||
ctx.finalize()
|
ctx.finalize()
|
||||||
# implicit last finalize inserts [1400, 1450]
|
# implicit last finalize inserts [1400, 1450]
|
||||||
|
|
||||||
@@ -595,47 +734,51 @@ class TestClient(object):
|
|||||||
(0, [400, 450]),
|
(0, [400, 450]),
|
||||||
(0, [500, 550]),
|
(0, [500, 550]),
|
||||||
(0, [1000, 1050]),
|
(0, [1000, 1050]),
|
||||||
(1, [1100, 1100.000001]),
|
(1, [1100, 1101]),
|
||||||
(1, [1199, 1250]),
|
(1, [1199, 1250]),
|
||||||
(0, [1400, 1450]),
|
(0, [1400, 1450]),
|
||||||
(0, [1500, 1550]),
|
(0, [1500, 1550]),
|
||||||
])
|
])
|
||||||
|
|
||||||
# Clean up
|
# Clean up
|
||||||
|
client.stream_remove("/empty/test")
|
||||||
client.stream_destroy("/empty/test")
|
client.stream_destroy("/empty/test")
|
||||||
client.close()
|
client.close()
|
||||||
|
|
||||||
def test_client_12_persistent(self):
|
def test_client_12_persistent(self):
|
||||||
# Check that connections are persistent when they should be.
|
# Check that connections are NOT persistent. Rather than trying
|
||||||
# This is pretty hard to test; we have to poke deep into
|
# to verify this at the TCP level, just make sure that the response
|
||||||
# the Requests library.
|
# contained a "Connection: close" header.
|
||||||
with nilmdb.client.Client(url = testurl) as c:
|
with nilmdb.client.Client(url = testurl) as c:
|
||||||
def connections():
|
|
||||||
try:
|
|
||||||
poolmanager = c.http._last_response.connection.poolmanager
|
|
||||||
pool = poolmanager.pools[('http','localhost',32180)]
|
|
||||||
return (pool.num_connections, pool.num_requests)
|
|
||||||
except:
|
|
||||||
raise SkipTest("can't get connection info")
|
|
||||||
|
|
||||||
# First request makes a connection
|
|
||||||
c.stream_create("/persist/test", "uint16_1")
|
c.stream_create("/persist/test", "uint16_1")
|
||||||
eq_(connections(), (1, 1))
|
eq_(c.http._last_response.headers["Connection"], "close")
|
||||||
|
|
||||||
# Non-generator
|
|
||||||
c.stream_list("/persist/test")
|
|
||||||
eq_(connections(), (1, 2))
|
|
||||||
c.stream_list("/persist/test")
|
|
||||||
eq_(connections(), (1, 3))
|
|
||||||
|
|
||||||
# Generators
|
|
||||||
for x in c.stream_intervals("/persist/test"):
|
|
||||||
pass
|
|
||||||
eq_(connections(), (1, 4))
|
|
||||||
for x in c.stream_intervals("/persist/test"):
|
|
||||||
pass
|
|
||||||
eq_(connections(), (1, 5))
|
|
||||||
|
|
||||||
# Clean up
|
|
||||||
c.stream_destroy("/persist/test")
|
c.stream_destroy("/persist/test")
|
||||||
eq_(connections(), (1, 6))
|
eq_(c.http._last_response.headers["Connection"], "close")
|
||||||
|
|
||||||
|
def test_client_13_timestamp_rounding(self):
|
||||||
|
# Test potentially bad timestamps (due to floating point
|
||||||
|
# roundoff etc). The server will round floating point values
|
||||||
|
# to the nearest int.
|
||||||
|
client = nilmdb.client.Client(testurl)
|
||||||
|
|
||||||
|
client.stream_create("/rounding/test", "uint16_1")
|
||||||
|
with client.stream_insert_context("/rounding/test",
|
||||||
|
100000000, 200000000.1) as ctx:
|
||||||
|
ctx.insert(b"100000000.1 1\n")
|
||||||
|
ctx.insert(b"150000000.00003 1\n")
|
||||||
|
ctx.insert(b"199999999.4 1\n")
|
||||||
|
eq_(list(client.stream_intervals("/rounding/test")),
|
||||||
|
[ [ 100000000, 200000000 ] ])
|
||||||
|
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
with client.stream_insert_context("/rounding/test",
|
||||||
|
200000000, 300000000) as ctx:
|
||||||
|
ctx.insert(b"200000000 1\n")
|
||||||
|
ctx.insert(b"250000000 1\n")
|
||||||
|
# Server will round this and give an error on finalize()
|
||||||
|
ctx.insert(b"299999999.99 1\n")
|
||||||
|
|
||||||
|
client.stream_remove("/rounding/test")
|
||||||
|
client.stream_destroy("/rounding/test")
|
||||||
|
client.close()
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import nilmdb.server
|
|||||||
|
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import *
|
||||||
import nilmdb.cmdline
|
import nilmdb.cmdline
|
||||||
from nilmdb.utils import datetime_tz
|
import datetime_tz
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
from nose.tools import *
|
from nose.tools import *
|
||||||
@@ -13,19 +13,25 @@ import itertools
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import StringIO
|
import io
|
||||||
import shlex
|
import shlex
|
||||||
|
import warnings
|
||||||
|
|
||||||
from testutil.helpers import *
|
from testutil.helpers import *
|
||||||
|
|
||||||
testdb = "tests/cmdline-testdb"
|
testdb = "tests/cmdline-testdb"
|
||||||
|
|
||||||
def server_start(max_results = None, bulkdata_args = {}):
|
def server_start(max_results = None,
|
||||||
|
max_removals = None,
|
||||||
|
max_int_removals = None,
|
||||||
|
bulkdata_args = {}):
|
||||||
global test_server, test_db
|
global test_server, test_db
|
||||||
# Start web app on a custom port
|
# Start web app on a custom port
|
||||||
test_db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(
|
test_db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(
|
||||||
testdb,
|
testdb,
|
||||||
max_results = max_results,
|
max_results = max_results,
|
||||||
|
max_removals = max_removals,
|
||||||
|
max_int_removals = max_int_removals,
|
||||||
bulkdata_args = bulkdata_args)
|
bulkdata_args = bulkdata_args)
|
||||||
test_server = nilmdb.server.Server(test_db, host = "127.0.0.1",
|
test_server = nilmdb.server.Server(test_db, host = "127.0.0.1",
|
||||||
port = 32180, stoppable = False,
|
port = 32180, stoppable = False,
|
||||||
@@ -48,17 +54,11 @@ def setup_module():
|
|||||||
def teardown_module():
|
def teardown_module():
|
||||||
server_stop()
|
server_stop()
|
||||||
|
|
||||||
# Add an encoding property to StringIO so Python will convert Unicode
|
|
||||||
# properly when writing or reading.
|
|
||||||
class UTF8StringIO(StringIO.StringIO):
|
|
||||||
encoding = 'utf-8'
|
|
||||||
|
|
||||||
class TestCmdline(object):
|
class TestCmdline(object):
|
||||||
|
|
||||||
def run(self, arg_string, infile=None, outfile=None):
|
def run(self, arg_string, infile=None, outfile=None):
|
||||||
"""Run a cmdline client with the specified argument string,
|
"""Run a cmdline client with the specified argument string,
|
||||||
passing the given input. Returns a tuple with the output and
|
passing the given input. Save the output and exit code."""
|
||||||
exit code"""
|
|
||||||
# printf("TZ=UTC ./nilmtool.py %s\n", arg_string)
|
# printf("TZ=UTC ./nilmtool.py %s\n", arg_string)
|
||||||
os.environ['NILMDB_URL'] = "http://localhost:32180/"
|
os.environ['NILMDB_URL'] = "http://localhost:32180/"
|
||||||
class stdio_wrapper:
|
class stdio_wrapper:
|
||||||
@@ -71,23 +71,29 @@ class TestCmdline(object):
|
|||||||
( sys.stdin, sys.stdout, sys.stderr ) = self.saved
|
( sys.stdin, sys.stdout, sys.stderr ) = self.saved
|
||||||
# Empty input if none provided
|
# Empty input if none provided
|
||||||
if infile is None:
|
if infile is None:
|
||||||
infile = UTF8StringIO("")
|
infile = io.TextIOWrapper(io.BytesIO(b""))
|
||||||
# Capture stderr
|
# Capture stderr
|
||||||
errfile = UTF8StringIO()
|
errfile = io.TextIOWrapper(io.BytesIO())
|
||||||
if outfile is None:
|
if outfile is None:
|
||||||
# If no output file, capture stdout with stderr
|
# If no output file, capture stdout with stderr
|
||||||
outfile = errfile
|
outfile = errfile
|
||||||
with stdio_wrapper(infile, outfile, errfile) as s:
|
with stdio_wrapper(infile, outfile, errfile) as s:
|
||||||
try:
|
try:
|
||||||
# shlex doesn't support Unicode very well. Encode the
|
args = shlex.split(arg_string)
|
||||||
# string as UTF-8 explicitly before splitting.
|
|
||||||
args = shlex.split(arg_string.encode('utf-8'))
|
|
||||||
nilmdb.cmdline.Cmdline(args).run()
|
nilmdb.cmdline.Cmdline(args).run()
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
except SystemExit as e:
|
except SystemExit as e:
|
||||||
exitcode = e.code
|
exitcode = e.code
|
||||||
captured = outfile.getvalue()
|
|
||||||
self.captured = captured
|
# Capture raw binary output, and also try to decode a Unicode
|
||||||
|
# string copy.
|
||||||
|
self.captured_binary = outfile.buffer.getvalue()
|
||||||
|
try:
|
||||||
|
outfile.seek(0)
|
||||||
|
self.captured = outfile.read()
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
self.captured = None
|
||||||
|
|
||||||
self.exitcode = exitcode
|
self.exitcode = exitcode
|
||||||
|
|
||||||
def ok(self, arg_string, infile = None):
|
def ok(self, arg_string, infile = None):
|
||||||
@@ -126,16 +132,16 @@ class TestCmdline(object):
|
|||||||
with open(file) as f:
|
with open(file) as f:
|
||||||
contents = f.read()
|
contents = f.read()
|
||||||
if contents != self.captured:
|
if contents != self.captured:
|
||||||
print "--- reference file (first 1000 bytes):\n"
|
print("--- reference file (first 1000 bytes):\n")
|
||||||
print contents[0:1000] + "\n"
|
print(contents[0:1000] + "\n")
|
||||||
print "--- captured data (first 1000 bytes):\n"
|
print("--- captured data (first 1000 bytes):\n")
|
||||||
print self.captured[0:1000] + "\n"
|
print(self.captured[0:1000] + "\n")
|
||||||
zipped = itertools.izip_longest(contents, self.captured)
|
zipped = itertools.zip_longest(contents, self.captured)
|
||||||
for (n, (a, b)) in enumerate(zipped):
|
for (n, (a, b)) in enumerate(zipped):
|
||||||
if a != b:
|
if a != b:
|
||||||
print "--- first difference is at offset", n
|
print("--- first difference is at offset", n)
|
||||||
print "--- reference:", repr(a)
|
print("--- reference:", repr(a))
|
||||||
print "--- captured:", repr(b)
|
print("--- captured:", repr(b))
|
||||||
break
|
break
|
||||||
raise AssertionError("captured data doesn't match " + file)
|
raise AssertionError("captured data doesn't match " + file)
|
||||||
|
|
||||||
@@ -158,6 +164,12 @@ class TestCmdline(object):
|
|||||||
self.ok("--help")
|
self.ok("--help")
|
||||||
self.contain("usage:")
|
self.contain("usage:")
|
||||||
|
|
||||||
|
# help
|
||||||
|
self.ok("--version")
|
||||||
|
ver = self.captured
|
||||||
|
self.ok("list --version")
|
||||||
|
eq_(self.captured, ver)
|
||||||
|
|
||||||
# fail for no args
|
# fail for no args
|
||||||
self.fail("")
|
self.fail("")
|
||||||
|
|
||||||
@@ -215,9 +227,11 @@ class TestCmdline(object):
|
|||||||
def test_02_parsetime(self):
|
def test_02_parsetime(self):
|
||||||
os.environ['TZ'] = "America/New_York"
|
os.environ['TZ'] = "America/New_York"
|
||||||
test = datetime_tz.datetime_tz.now()
|
test = datetime_tz.datetime_tz.now()
|
||||||
|
u2ts = nilmdb.utils.time.unix_to_timestamp
|
||||||
parse_time = nilmdb.utils.time.parse_time
|
parse_time = nilmdb.utils.time.parse_time
|
||||||
eq_(parse_time(str(test)), test)
|
eq_(parse_time(str(test)), u2ts(test.totimestamp()))
|
||||||
test = datetime_tz.datetime_tz.smartparse("20120405 1400-0400")
|
test = u2ts(datetime_tz.datetime_tz.smartparse("20120405 1400-0400").
|
||||||
|
totimestamp())
|
||||||
eq_(parse_time("hi there 20120405 1400-0400 testing! 123"), test)
|
eq_(parse_time("hi there 20120405 1400-0400 testing! 123"), test)
|
||||||
eq_(parse_time("20120405 1800 UTC"), test)
|
eq_(parse_time("20120405 1800 UTC"), test)
|
||||||
eq_(parse_time("20120405 1400-0400 UTC"), test)
|
eq_(parse_time("20120405 1400-0400 UTC"), test)
|
||||||
@@ -227,6 +241,13 @@ class TestCmdline(object):
|
|||||||
x = parse_time("now")
|
x = parse_time("now")
|
||||||
eq_(parse_time("snapshot-20120405-140000.raw.gz"), test)
|
eq_(parse_time("snapshot-20120405-140000.raw.gz"), test)
|
||||||
eq_(parse_time("prep-20120405T1400"), test)
|
eq_(parse_time("prep-20120405T1400"), test)
|
||||||
|
eq_(parse_time("1333648800.0"), test)
|
||||||
|
eq_(parse_time("1333648800000000"), test)
|
||||||
|
eq_(parse_time("@1333648800000000"), test)
|
||||||
|
eq_(parse_time("min"), nilmdb.utils.time.min_timestamp)
|
||||||
|
eq_(parse_time("max"), nilmdb.utils.time.max_timestamp)
|
||||||
|
with assert_raises(ValueError):
|
||||||
|
parse_time("@hashtag12345")
|
||||||
|
|
||||||
def test_03_info(self):
|
def test_03_info(self):
|
||||||
self.ok("info")
|
self.ok("info")
|
||||||
@@ -234,8 +255,10 @@ class TestCmdline(object):
|
|||||||
self.contain("Client version: " + nilmdb.__version__)
|
self.contain("Client version: " + nilmdb.__version__)
|
||||||
self.contain("Server version: " + test_server.version)
|
self.contain("Server version: " + test_server.version)
|
||||||
self.contain("Server database path")
|
self.contain("Server database path")
|
||||||
self.contain("Server database size")
|
self.contain("Server disk space used by NilmDB")
|
||||||
self.contain("Server database free space")
|
self.contain("Server disk space used by other")
|
||||||
|
self.contain("Server disk space reserved")
|
||||||
|
self.contain("Server disk space free")
|
||||||
|
|
||||||
def test_04_createlist(self):
|
def test_04_createlist(self):
|
||||||
# Basic stream tests, like those in test_client.
|
# Basic stream tests, like those in test_client.
|
||||||
@@ -250,6 +273,15 @@ class TestCmdline(object):
|
|||||||
|
|
||||||
self.fail("create /foo float32_8")
|
self.fail("create /foo float32_8")
|
||||||
self.contain("invalid path")
|
self.contain("invalid path")
|
||||||
|
self.fail("create /newton/prep/ float32_8")
|
||||||
|
self.contain("invalid path")
|
||||||
|
|
||||||
|
self.fail("create /newton/_format/prep float32_8")
|
||||||
|
self.contain("path name is invalid")
|
||||||
|
self.fail("create /_format/newton/prep float32_8")
|
||||||
|
self.contain("path name is invalid")
|
||||||
|
self.fail("create /newton/prep/_format float32_8")
|
||||||
|
self.contain("path name is invalid")
|
||||||
|
|
||||||
# Bad layout type
|
# Bad layout type
|
||||||
self.fail("create /newton/prep NoSuchLayout")
|
self.fail("create /newton/prep NoSuchLayout")
|
||||||
@@ -263,6 +295,11 @@ class TestCmdline(object):
|
|||||||
self.ok("create /newton/zzz/rawnotch uint16_9")
|
self.ok("create /newton/zzz/rawnotch uint16_9")
|
||||||
self.ok("create /newton/prep float32_8")
|
self.ok("create /newton/prep float32_8")
|
||||||
self.ok("create /newton/raw uint16_6")
|
self.ok("create /newton/raw uint16_6")
|
||||||
|
self.ok("create /newton/raw~decim-1234 uint16_6")
|
||||||
|
|
||||||
|
# Create a stream that already exists
|
||||||
|
self.fail("create /newton/raw uint16_6")
|
||||||
|
self.contain("stream already exists at this path")
|
||||||
|
|
||||||
# Should not be able to create a stream with another stream as
|
# Should not be able to create a stream with another stream as
|
||||||
# its parent
|
# its parent
|
||||||
@@ -274,40 +311,31 @@ class TestCmdline(object):
|
|||||||
self.fail("create /newton/zzz float32_8")
|
self.fail("create /newton/zzz float32_8")
|
||||||
self.contain("subdirs of this path already exist")
|
self.contain("subdirs of this path already exist")
|
||||||
|
|
||||||
# Verify we got those 3 streams and they're returned in
|
# Verify we got those 4 streams and they're returned in
|
||||||
# alphabetical order.
|
# alphabetical order.
|
||||||
self.ok("list")
|
self.ok("list -l")
|
||||||
|
self.match("/newton/prep float32_8\n"
|
||||||
|
"/newton/raw uint16_6\n"
|
||||||
|
"/newton/raw~decim-1234 uint16_6\n"
|
||||||
|
"/newton/zzz/rawnotch uint16_9\n")
|
||||||
|
|
||||||
|
# No decimated streams if -n specified
|
||||||
|
self.ok("list -n -l")
|
||||||
self.match("/newton/prep float32_8\n"
|
self.match("/newton/prep float32_8\n"
|
||||||
"/newton/raw uint16_6\n"
|
"/newton/raw uint16_6\n"
|
||||||
"/newton/zzz/rawnotch uint16_9\n")
|
"/newton/zzz/rawnotch uint16_9\n")
|
||||||
|
|
||||||
|
# Delete that decimated stream
|
||||||
|
self.ok("destroy /newton/raw~decim-1234")
|
||||||
|
|
||||||
# Match just one type or one path. Also check
|
# Match just one type or one path. Also check
|
||||||
# that --path is optional
|
# that --path is optional
|
||||||
self.ok("list --path /newton/raw")
|
self.ok("list --layout /newton/raw")
|
||||||
self.match("/newton/raw uint16_6\n")
|
|
||||||
|
|
||||||
self.ok("list /newton/raw")
|
|
||||||
self.match("/newton/raw uint16_6\n")
|
|
||||||
|
|
||||||
self.fail("list -p /newton/raw /newton/raw")
|
|
||||||
self.contain("too many paths")
|
|
||||||
|
|
||||||
self.ok("list --layout uint16_6")
|
|
||||||
self.match("/newton/raw uint16_6\n")
|
self.match("/newton/raw uint16_6\n")
|
||||||
|
|
||||||
# Wildcard matches
|
# Wildcard matches
|
||||||
self.ok("list --layout uint16*")
|
self.ok("list *zzz*")
|
||||||
self.match("/newton/raw uint16_6\n"
|
self.match("/newton/zzz/rawnotch\n")
|
||||||
"/newton/zzz/rawnotch uint16_9\n")
|
|
||||||
|
|
||||||
self.ok("list --path *zzz* --layout uint16*")
|
|
||||||
self.match("/newton/zzz/rawnotch uint16_9\n")
|
|
||||||
|
|
||||||
self.ok("list *zzz* --layout uint16*")
|
|
||||||
self.match("/newton/zzz/rawnotch uint16_9\n")
|
|
||||||
|
|
||||||
self.ok("list --path *zzz* --layout float32*")
|
|
||||||
self.match("")
|
|
||||||
|
|
||||||
# reversed range
|
# reversed range
|
||||||
self.fail("list /newton/prep --start 2020-01-01 --end 2000-01-01")
|
self.fail("list /newton/prep --start 2020-01-01 --end 2000-01-01")
|
||||||
@@ -332,6 +360,12 @@ class TestCmdline(object):
|
|||||||
self.ok("metadata /newton/raw --update "
|
self.ok("metadata /newton/raw --update "
|
||||||
"v_scale=1.234")
|
"v_scale=1.234")
|
||||||
|
|
||||||
|
# unicode
|
||||||
|
self.ok("metadata /newton/raw --set "
|
||||||
|
"a_𝓴𝓮𝔂=value a_key=𝓿𝓪𝓵𝓾𝓮 a_𝗸𝗲𝘆=𝘃𝗮𝗹𝘂𝗲")
|
||||||
|
self.ok("metadata /newton/raw --get")
|
||||||
|
self.match("a_key=𝓿𝓪𝓵𝓾𝓮\na_𝓴𝓮𝔂=value\na_𝗸𝗲𝘆=𝘃𝗮𝗹𝘂𝗲\n")
|
||||||
|
|
||||||
# various parsing tests
|
# various parsing tests
|
||||||
self.ok("metadata /newton/raw --update foo=")
|
self.ok("metadata /newton/raw --update foo=")
|
||||||
self.fail("metadata /newton/raw --update =bar")
|
self.fail("metadata /newton/raw --update =bar")
|
||||||
@@ -345,6 +379,8 @@ class TestCmdline(object):
|
|||||||
self.contain("No stream at path")
|
self.contain("No stream at path")
|
||||||
self.fail("metadata /newton/nosuchstream --set foo=bar")
|
self.fail("metadata /newton/nosuchstream --set foo=bar")
|
||||||
self.contain("No stream at path")
|
self.contain("No stream at path")
|
||||||
|
self.fail("metadata /newton/nosuchstream --delete")
|
||||||
|
self.contain("No stream at path")
|
||||||
|
|
||||||
self.ok("metadata /newton/prep")
|
self.ok("metadata /newton/prep")
|
||||||
self.match("description=The Data\nv_scale=1.234\n")
|
self.match("description=The Data\nv_scale=1.234\n")
|
||||||
@@ -370,6 +406,19 @@ class TestCmdline(object):
|
|||||||
self.fail("metadata /newton/nosuchpath")
|
self.fail("metadata /newton/nosuchpath")
|
||||||
self.contain("No stream at path /newton/nosuchpath")
|
self.contain("No stream at path /newton/nosuchpath")
|
||||||
|
|
||||||
|
self.ok("metadata /newton/prep --delete")
|
||||||
|
self.ok("metadata /newton/prep --get")
|
||||||
|
self.match("")
|
||||||
|
self.ok("metadata /newton/prep --set "
|
||||||
|
"'description=The Data' "
|
||||||
|
"v_scale=1.234")
|
||||||
|
self.ok("metadata /newton/prep --delete v_scale")
|
||||||
|
self.ok("metadata /newton/prep --get")
|
||||||
|
self.match("description=The Data\n")
|
||||||
|
self.ok("metadata /newton/prep --set description=")
|
||||||
|
self.ok("metadata /newton/prep --get")
|
||||||
|
self.match("")
|
||||||
|
|
||||||
def test_06_insert(self):
|
def test_06_insert(self):
|
||||||
self.ok("insert --help")
|
self.ok("insert --help")
|
||||||
|
|
||||||
@@ -394,7 +443,7 @@ class TestCmdline(object):
|
|||||||
self.fail("insert -s 20120323T1004 -e 20120323T1006 /newton/prep",
|
self.fail("insert -s 20120323T1004 -e 20120323T1006 /newton/prep",
|
||||||
input)
|
input)
|
||||||
self.contain("error parsing input data")
|
self.contain("error parsing input data")
|
||||||
self.contain("line 7:")
|
self.contain("line 7")
|
||||||
self.contain("timestamp is not monotonically increasing")
|
self.contain("timestamp is not monotonically increasing")
|
||||||
|
|
||||||
# insert pre-timestamped data, from stdin
|
# insert pre-timestamped data, from stdin
|
||||||
@@ -436,41 +485,61 @@ class TestCmdline(object):
|
|||||||
self.fail("insert -t -r 120 -f /newton/raw "
|
self.fail("insert -t -r 120 -f /newton/raw "
|
||||||
"tests/data/prep-20120323T1004")
|
"tests/data/prep-20120323T1004")
|
||||||
self.contain("error parsing input data")
|
self.contain("error parsing input data")
|
||||||
|
self.contain("can't parse value")
|
||||||
|
|
||||||
|
# too few rows per line
|
||||||
|
self.ok("create /insert/test float32_20")
|
||||||
|
self.fail("insert -t -r 120 -f /insert/test "
|
||||||
|
"tests/data/prep-20120323T1004")
|
||||||
|
self.contain("error parsing input data")
|
||||||
|
self.contain("wrong number of values")
|
||||||
|
self.ok("destroy /insert/test")
|
||||||
|
|
||||||
# empty data does nothing
|
# empty data does nothing
|
||||||
self.ok("insert -t -r 120 --start '03/23/2012 06:05:00' /newton/prep "
|
self.ok("insert -t -r 120 --start '03/23/2012 06:05:00' /newton/prep "
|
||||||
"/dev/null")
|
"/dev/null")
|
||||||
|
|
||||||
|
# --quiet option
|
||||||
|
self.ok("insert --quiet -t -r 120 -s @0 /newton/prep /dev/null")
|
||||||
|
self.match("")
|
||||||
|
|
||||||
# bad start time
|
# bad start time
|
||||||
self.fail("insert -t -r 120 --start 'whatever' /newton/prep /dev/null")
|
self.fail("insert -t -r 120 --start 'whatever' /newton/prep /dev/null")
|
||||||
|
|
||||||
|
# Test negative times
|
||||||
|
self.ok("insert --start @-10000000000 --end @1000000001 /newton/prep"
|
||||||
|
" tests/data/timestamped")
|
||||||
|
self.ok("extract -c /newton/prep --start min --end @1000000001")
|
||||||
|
self.match("8\n")
|
||||||
|
self.ok("remove /newton/prep --start min --end @1000000001")
|
||||||
|
|
||||||
def test_07_detail_extended(self):
|
def test_07_detail_extended(self):
|
||||||
# Just count the number of lines, it's probably fine
|
# Just count the number of lines, it's probably fine
|
||||||
self.ok("list --detail")
|
self.ok("list --detail")
|
||||||
lines_(self.captured, 8)
|
lines_(self.captured, 8)
|
||||||
|
|
||||||
self.ok("list --detail --path *prep")
|
self.ok("list --detail *prep")
|
||||||
lines_(self.captured, 4)
|
lines_(self.captured, 4)
|
||||||
|
|
||||||
self.ok("list --detail --path *prep --start='23 Mar 2012 10:02'")
|
self.ok("list --detail *prep --start='23 Mar 2012 10:02'")
|
||||||
lines_(self.captured, 3)
|
lines_(self.captured, 3)
|
||||||
|
|
||||||
self.ok("list --detail --path *prep --start='23 Mar 2012 10:05'")
|
self.ok("list --detail *prep --start='23 Mar 2012 10:05'")
|
||||||
lines_(self.captured, 2)
|
lines_(self.captured, 2)
|
||||||
|
|
||||||
self.ok("list --detail --path *prep --start='23 Mar 2012 10:05:15'")
|
self.ok("list --detail *prep --start='23 Mar 2012 10:05:15'")
|
||||||
lines_(self.captured, 2)
|
lines_(self.captured, 2)
|
||||||
self.contain("10:05:15.000")
|
self.contain("10:05:15.000")
|
||||||
|
|
||||||
self.ok("list --detail --path *prep --start='23 Mar 2012 10:05:15.50'")
|
self.ok("list --detail *prep --start='23 Mar 2012 10:05:15.50'")
|
||||||
lines_(self.captured, 2)
|
lines_(self.captured, 2)
|
||||||
self.contain("10:05:15.500")
|
self.contain("10:05:15.500")
|
||||||
|
|
||||||
self.ok("list --detail --path *prep --start='23 Mar 2012 19:05:15.50'")
|
self.ok("list --detail *prep --start='23 Mar 2012 19:05:15.50'")
|
||||||
lines_(self.captured, 2)
|
lines_(self.captured, 2)
|
||||||
self.contain("no intervals")
|
self.contain("no intervals")
|
||||||
|
|
||||||
self.ok("list --detail --path *prep --start='23 Mar 2012 10:05:15.50'"
|
self.ok("list --detail *prep --start='23 Mar 2012 10:05:15.50'"
|
||||||
+ " --end='23 Mar 2012 10:05:15.51'")
|
+ " --end='23 Mar 2012 10:05:15.51'")
|
||||||
lines_(self.captured, 2)
|
lines_(self.captured, 2)
|
||||||
self.contain("10:05:15.500")
|
self.contain("10:05:15.500")
|
||||||
@@ -479,17 +548,17 @@ class TestCmdline(object):
|
|||||||
lines_(self.captured, 8)
|
lines_(self.captured, 8)
|
||||||
|
|
||||||
# Verify the "raw timestamp" output
|
# Verify the "raw timestamp" output
|
||||||
self.ok("list --detail --path *prep --timestamp-raw "
|
self.ok("list --detail *prep --timestamp-raw "
|
||||||
"--start='23 Mar 2012 10:05:15.50'")
|
"--start='23 Mar 2012 10:05:15.50'")
|
||||||
lines_(self.captured, 2)
|
lines_(self.captured, 2)
|
||||||
self.contain("[ 1332497115.500000 -> 1332497160.000000 ]")
|
self.contain("[ 1332497115500000 -> 1332497160000000 ]")
|
||||||
|
|
||||||
# bad time
|
# bad time
|
||||||
self.fail("list --detail --path *prep -T --start='9332497115.612'")
|
self.fail("list --detail *prep -T --start='9332497115.612'")
|
||||||
# good time
|
# good time
|
||||||
self.ok("list --detail --path *prep -T --start='1332497115.612'")
|
self.ok("list --detail *prep -T --start='1332497115.612'")
|
||||||
lines_(self.captured, 2)
|
lines_(self.captured, 2)
|
||||||
self.contain("[ 1332497115.612000 -> 1332497160.000000 ]")
|
self.contain("[ 1332497115612000 -> 1332497160000000 ]")
|
||||||
|
|
||||||
# Check --ext output
|
# Check --ext output
|
||||||
self.ok("list --ext")
|
self.ok("list --ext")
|
||||||
@@ -497,7 +566,7 @@ class TestCmdline(object):
|
|||||||
|
|
||||||
self.ok("list -E -T")
|
self.ok("list -E -T")
|
||||||
c = self.contain
|
c = self.contain
|
||||||
c("\n interval extents: 1332496800.000000 -> 1332497160.000000\n")
|
c("\n interval extents: 1332496800000000 -> 1332497160000000\n")
|
||||||
c("\n total data: 43200 rows, 359.983336 seconds\n")
|
c("\n total data: 43200 rows, 359.983336 seconds\n")
|
||||||
c("\n interval extents: (no data)\n")
|
c("\n interval extents: (no data)\n")
|
||||||
c("\n total data: 0 rows, 0.000000 seconds\n")
|
c("\n total data: 0 rows, 0.000000 seconds\n")
|
||||||
@@ -532,6 +601,13 @@ class TestCmdline(object):
|
|||||||
exitcode = 2, require_error = False)
|
exitcode = 2, require_error = False)
|
||||||
self.contain("no data")
|
self.contain("no data")
|
||||||
|
|
||||||
|
# unannotated empty extract is just empty, with an exit code of 2
|
||||||
|
self.fail("extract /newton/prep " +
|
||||||
|
"--start '23 Mar 2022 10:00:30' " +
|
||||||
|
"--end '23 Mar 2022 10:00:31'",
|
||||||
|
exitcode = 2, require_error = False)
|
||||||
|
self.match("")
|
||||||
|
|
||||||
# but are ok if we're just counting results
|
# but are ok if we're just counting results
|
||||||
self.ok("extract --count /newton/prep " +
|
self.ok("extract --count /newton/prep " +
|
||||||
"--start '23 Mar 2012 20:00:30' " +
|
"--start '23 Mar 2012 20:00:30' " +
|
||||||
@@ -542,6 +618,14 @@ class TestCmdline(object):
|
|||||||
"--end '23 Mar 2012 20:00:30.000002'")
|
"--end '23 Mar 2012 20:00:30.000002'")
|
||||||
self.match("0\n")
|
self.match("0\n")
|
||||||
|
|
||||||
|
# Extract needs --start and --end
|
||||||
|
self.fail("extract -a /newton/prep")
|
||||||
|
self.contain("arguments are required")
|
||||||
|
self.fail("extract -a /newton/prep --start 2000-01-01")
|
||||||
|
self.contain("arguments are required")
|
||||||
|
self.fail("extract -a /newton/prep --end 2000-01-01")
|
||||||
|
self.contain("arguments are required")
|
||||||
|
|
||||||
# Check various dumps against stored copies of how they should appear
|
# Check various dumps against stored copies of how they should appear
|
||||||
def test(file, start, end, extra=""):
|
def test(file, start, end, extra=""):
|
||||||
self.ok("extract " + extra + " /newton/prep " +
|
self.ok("extract " + extra + " /newton/prep " +
|
||||||
@@ -563,13 +647,28 @@ class TestCmdline(object):
|
|||||||
test(6, "10:00:30", "10:00:31", extra="-b")
|
test(6, "10:00:30", "10:00:31", extra="-b")
|
||||||
test(7, "10:00:30", "10:00:30.999", extra="-a -T")
|
test(7, "10:00:30", "10:00:30.999", extra="-a -T")
|
||||||
test(7, "10:00:30", "10:00:30.999", extra="-a --timestamp-raw")
|
test(7, "10:00:30", "10:00:30.999", extra="-a --timestamp-raw")
|
||||||
|
test(8, "10:01:59.9", "10:02:00.1", extra="--markup")
|
||||||
|
test(8, "10:01:59.9", "10:02:00.1", extra="-m")
|
||||||
|
|
||||||
# all data put in by tests
|
# all data put in by tests
|
||||||
self.ok("extract -a /newton/prep --start 2000-01-01 --end 2020-01-01")
|
self.ok("extract -a /newton/prep --start min --end max")
|
||||||
lines_(self.captured, 43204)
|
lines_(self.captured, 43204)
|
||||||
self.ok("extract -c /newton/prep --start 2000-01-01 --end 2020-01-01")
|
self.ok("extract -c /newton/prep --start 2000-01-01 --end 2020-01-01")
|
||||||
self.match("43200\n")
|
self.match("43200\n")
|
||||||
|
|
||||||
|
# test binary mode
|
||||||
|
self.fail("extract -c -B /newton/prep -s min -e max")
|
||||||
|
self.contain("binary cannot be combined")
|
||||||
|
self.fail("extract -m -B /newton/prep -s min -e max")
|
||||||
|
self.contain("binary cannot be combined")
|
||||||
|
self.ok("extract -B /newton/prep -s min -e max")
|
||||||
|
eq_(len(self.captured_binary), 43200 * (8 + 8*4))
|
||||||
|
|
||||||
|
# markup for 3 intervals, plus extra markup lines whenever we had
|
||||||
|
# a "restart" from the nilmdb.stream_extract function
|
||||||
|
self.ok("extract -m /newton/prep --start 2000-01-01 --end 2020-01-01")
|
||||||
|
lines_(self.captured, 43210)
|
||||||
|
|
||||||
def test_09_truncated(self):
|
def test_09_truncated(self):
|
||||||
# Test truncated responses by overriding the nilmdb max_results
|
# Test truncated responses by overriding the nilmdb max_results
|
||||||
server_stop()
|
server_stop()
|
||||||
@@ -584,7 +683,7 @@ class TestCmdline(object):
|
|||||||
|
|
||||||
# Try nonexistent stream
|
# Try nonexistent stream
|
||||||
self.fail("remove /no/such/foo --start 2000-01-01 --end 2020-01-01")
|
self.fail("remove /no/such/foo --start 2000-01-01 --end 2020-01-01")
|
||||||
self.contain("No stream at path")
|
self.contain("no stream matched path")
|
||||||
|
|
||||||
# empty or backward ranges return errors
|
# empty or backward ranges return errors
|
||||||
self.fail("remove /newton/prep --start 2020-01-01 --end 2000-01-01")
|
self.fail("remove /newton/prep --start 2020-01-01 --end 2000-01-01")
|
||||||
@@ -612,9 +711,14 @@ class TestCmdline(object):
|
|||||||
"--start '23 Mar 2022 20:00:30' " +
|
"--start '23 Mar 2022 20:00:30' " +
|
||||||
"--end '23 Mar 2022 20:00:31'")
|
"--end '23 Mar 2022 20:00:31'")
|
||||||
self.match("0\n")
|
self.match("0\n")
|
||||||
|
self.ok("remove -c /newton/prep /newton/pre* " +
|
||||||
|
"--start '23 Mar 2022 20:00:30' " +
|
||||||
|
"--end '23 Mar 2022 20:00:31'")
|
||||||
|
self.match("Removing from /newton/prep\n0\n" +
|
||||||
|
"Removing from /newton/prep\n0\n")
|
||||||
|
|
||||||
# Make sure we have the data we expect
|
# Make sure we have the data we expect
|
||||||
self.ok("list --detail /newton/prep")
|
self.ok("list -l --detail /newton/prep")
|
||||||
self.match("/newton/prep float32_8\n" +
|
self.match("/newton/prep float32_8\n" +
|
||||||
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
|
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
|
||||||
" -> Fri, 23 Mar 2012 10:01:59.991668 +0000 ]\n"
|
" -> Fri, 23 Mar 2012 10:01:59.991668 +0000 ]\n"
|
||||||
@@ -649,7 +753,7 @@ class TestCmdline(object):
|
|||||||
self.match("24000\n")
|
self.match("24000\n")
|
||||||
|
|
||||||
# See the missing chunks in list output
|
# See the missing chunks in list output
|
||||||
self.ok("list --detail /newton/prep")
|
self.ok("list --layout --detail /newton/prep")
|
||||||
self.match("/newton/prep float32_8\n" +
|
self.match("/newton/prep float32_8\n" +
|
||||||
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
|
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
|
||||||
" -> Fri, 23 Mar 2012 10:00:05.000000 +0000 ]\n"
|
" -> Fri, 23 Mar 2012 10:00:05.000000 +0000 ]\n"
|
||||||
@@ -663,33 +767,34 @@ class TestCmdline(object):
|
|||||||
# Remove all data, verify it's missing
|
# Remove all data, verify it's missing
|
||||||
self.ok("remove /newton/prep --start 2000-01-01 --end 2020-01-01")
|
self.ok("remove /newton/prep --start 2000-01-01 --end 2020-01-01")
|
||||||
self.match("") # no count requested this time
|
self.match("") # no count requested this time
|
||||||
self.ok("list --detail /newton/prep")
|
self.ok("list -l --detail /newton/prep")
|
||||||
self.match("/newton/prep float32_8\n" +
|
self.match("/newton/prep float32_8\n" +
|
||||||
" (no intervals)\n")
|
" (no intervals)\n")
|
||||||
|
|
||||||
# Reinsert some data, to verify that no overlaps with deleted
|
# Reinsert some data, to verify that no overlaps with deleted
|
||||||
# data are reported
|
# data are reported
|
||||||
os.environ['TZ'] = "UTC"
|
for minute in ["0", "2"]:
|
||||||
self.ok("insert --timestamp -f --rate 120 /newton/prep "
|
self.ok("insert --timestamp -f --rate 120 /newton/prep"
|
||||||
"tests/data/prep-20120323T1000")
|
" tests/data/prep-20120323T100" + minute)
|
||||||
self.ok("insert -t --filename --rate 120 /newton/prep "
|
|
||||||
"tests/data/prep-20120323T1002")
|
|
||||||
|
|
||||||
def test_11_destroy(self):
|
def test_11_destroy(self):
|
||||||
# Delete records
|
# Delete records
|
||||||
self.ok("destroy --help")
|
self.ok("destroy --help")
|
||||||
|
|
||||||
self.fail("destroy")
|
self.fail("destroy")
|
||||||
self.contain("too few arguments")
|
self.contain("the following arguments are required")
|
||||||
|
|
||||||
self.fail("destroy /no/such/stream")
|
self.fail("destroy /no/such/stream")
|
||||||
self.contain("No stream at path")
|
self.contain("no stream matched path")
|
||||||
|
|
||||||
|
self.fail("destroy -R /no/such/stream")
|
||||||
|
self.contain("no stream matched path")
|
||||||
|
|
||||||
self.fail("destroy asdfasdf")
|
self.fail("destroy asdfasdf")
|
||||||
self.contain("No stream at path")
|
self.contain("no stream matched path")
|
||||||
|
|
||||||
# From previous tests, we have:
|
# From previous tests, we have:
|
||||||
self.ok("list")
|
self.ok("list -l")
|
||||||
self.match("/newton/prep float32_8\n"
|
self.match("/newton/prep float32_8\n"
|
||||||
"/newton/raw uint16_6\n"
|
"/newton/raw uint16_6\n"
|
||||||
"/newton/zzz/rawnotch uint16_9\n")
|
"/newton/zzz/rawnotch uint16_9\n")
|
||||||
@@ -698,19 +803,26 @@ class TestCmdline(object):
|
|||||||
self.ok("list --detail")
|
self.ok("list --detail")
|
||||||
lines_(self.captured, 7)
|
lines_(self.captured, 7)
|
||||||
|
|
||||||
# Delete some
|
# Fail to destroy because intervals still present
|
||||||
self.ok("destroy /newton/prep")
|
self.fail("destroy /newton/prep")
|
||||||
self.ok("list")
|
self.contain("all intervals must be removed")
|
||||||
|
self.ok("list --detail")
|
||||||
|
lines_(self.captured, 7)
|
||||||
|
|
||||||
|
# Destroy for real
|
||||||
|
self.ok("destroy -R /n*/prep")
|
||||||
|
self.ok("list -l")
|
||||||
self.match("/newton/raw uint16_6\n"
|
self.match("/newton/raw uint16_6\n"
|
||||||
"/newton/zzz/rawnotch uint16_9\n")
|
"/newton/zzz/rawnotch uint16_9\n")
|
||||||
|
|
||||||
self.ok("destroy /newton/zzz/rawnotch")
|
self.ok("destroy /newton/zzz/rawnotch")
|
||||||
self.ok("list")
|
self.ok("list -l")
|
||||||
self.match("/newton/raw uint16_6\n")
|
self.match("/newton/raw uint16_6\n")
|
||||||
|
|
||||||
self.ok("destroy /newton/raw")
|
self.ok("destroy /newton/raw")
|
||||||
self.ok("create /newton/raw uint16_6")
|
self.ok("create /newton/raw uint16_6")
|
||||||
self.ok("destroy /newton/raw")
|
# Specify --remove with no data
|
||||||
|
self.ok("destroy --remove /newton/raw")
|
||||||
self.ok("list")
|
self.ok("list")
|
||||||
self.match("")
|
self.match("")
|
||||||
|
|
||||||
@@ -723,35 +835,37 @@ class TestCmdline(object):
|
|||||||
self.ok("list")
|
self.ok("list")
|
||||||
self.contain(path)
|
self.contain(path)
|
||||||
# Make sure it was created empty
|
# Make sure it was created empty
|
||||||
self.ok("list --detail --path " + path)
|
self.ok("list --detail " + path)
|
||||||
self.contain("(no intervals)")
|
self.contain("(no intervals)")
|
||||||
|
|
||||||
def test_12_unicode(self):
|
def test_12_unicode(self):
|
||||||
# Unicode paths.
|
# Unicode paths.
|
||||||
self.ok("destroy /newton/asdf/qwer")
|
self.ok("destroy /newton/asdf/qwer")
|
||||||
self.ok("destroy /newton/prep")
|
self.ok("destroy /newton/prep /newton/raw")
|
||||||
self.ok("destroy /newton/raw")
|
|
||||||
self.ok("destroy /newton/zzz")
|
self.ok("destroy /newton/zzz")
|
||||||
|
|
||||||
self.ok(u"create /düsseldorf/raw uint16_6")
|
self.ok("create /düsseldorf/raw uint16_6")
|
||||||
self.ok("list --detail")
|
self.ok("list -l --detail")
|
||||||
self.contain(u"/düsseldorf/raw uint16_6")
|
self.contain("/düsseldorf/raw uint16_6")
|
||||||
self.contain("(no intervals)")
|
self.contain("(no intervals)")
|
||||||
|
|
||||||
# Unicode metadata
|
# Unicode metadata
|
||||||
self.ok(u"metadata /düsseldorf/raw --set α=beta 'γ=δ'")
|
self.ok("metadata /düsseldorf/raw --set α=beta 'γ=δ'")
|
||||||
self.ok(u"metadata /düsseldorf/raw --update 'α=β ε τ α'")
|
self.ok("metadata /düsseldorf/raw --update 'α=β ε τ α'")
|
||||||
self.ok(u"metadata /düsseldorf/raw")
|
self.ok("metadata /düsseldorf/raw")
|
||||||
self.match(u"α=β ε τ α\nγ=δ\n")
|
self.match("α=β ε τ α\nγ=δ\n")
|
||||||
|
|
||||||
self.ok(u"destroy /düsseldorf/raw")
|
self.ok("destroy /düsseldorf/raw")
|
||||||
|
|
||||||
def test_13_files(self):
|
def test_13_files(self):
|
||||||
# Test BulkData's ability to split into multiple files,
|
# Test BulkData's ability to split into multiple files,
|
||||||
# by forcing the file size to be really small.
|
# by forcing the file size to be really small.
|
||||||
|
# Also increase the initial nrows, so that start/end positions
|
||||||
|
# in the database are very large (> 32 bit)
|
||||||
server_stop()
|
server_stop()
|
||||||
server_start(bulkdata_args = { "file_size" : 920, # 23 rows per file
|
server_start(bulkdata_args = { "file_size" : 920, # 23 rows per file
|
||||||
"files_per_dir" : 3 })
|
"files_per_dir" : 3,
|
||||||
|
"initial_nrows" : 2**40 })
|
||||||
|
|
||||||
# Fill data
|
# Fill data
|
||||||
self.ok("create /newton/prep float32_8")
|
self.ok("create /newton/prep float32_8")
|
||||||
@@ -785,7 +899,7 @@ class TestCmdline(object):
|
|||||||
|
|
||||||
# Now recreate the data one more time and make sure there are
|
# Now recreate the data one more time and make sure there are
|
||||||
# fewer files.
|
# fewer files.
|
||||||
self.ok("destroy /newton/prep")
|
self.ok("destroy --remove /newton/prep")
|
||||||
self.fail("destroy /newton/prep") # already destroyed
|
self.fail("destroy /newton/prep") # already destroyed
|
||||||
self.ok("create /newton/prep float32_8")
|
self.ok("create /newton/prep float32_8")
|
||||||
os.environ['TZ'] = "UTC"
|
os.environ['TZ'] = "UTC"
|
||||||
@@ -796,15 +910,31 @@ class TestCmdline(object):
|
|||||||
for (dirpath, dirnames, filenames) in os.walk(testdb):
|
for (dirpath, dirnames, filenames) in os.walk(testdb):
|
||||||
nfiles += len(filenames)
|
nfiles += len(filenames)
|
||||||
lt_(nfiles, 50)
|
lt_(nfiles, 50)
|
||||||
self.ok("destroy /newton/prep") # destroy again
|
self.ok("destroy -R /newton/prep") # destroy again
|
||||||
|
|
||||||
def test_14_remove_files(self):
|
def test_14_remove_files(self):
|
||||||
|
# Limit max_removals, to cover more functionality.
|
||||||
|
server_stop()
|
||||||
|
server_start(max_removals = 4321,
|
||||||
|
bulkdata_args = { "file_size" : 920, # 23 rows per file
|
||||||
|
"files_per_dir" : 3,
|
||||||
|
"initial_nrows" : 2**40 })
|
||||||
|
self.do_remove_files()
|
||||||
|
self.ok("destroy -R /newton/prep") # destroy again
|
||||||
|
|
||||||
|
def test_14b_remove_files_maxint(self):
|
||||||
|
# Limit max_int_removals, to cover more functionality.
|
||||||
|
server_stop()
|
||||||
|
server_start(max_int_removals = 1,
|
||||||
|
bulkdata_args = { "file_size" : 920, # 23 rows per file
|
||||||
|
"files_per_dir" : 3,
|
||||||
|
"initial_nrows" : 2**40 })
|
||||||
|
self.do_remove_files()
|
||||||
|
|
||||||
|
def do_remove_files(self):
|
||||||
# Test BulkData's ability to remove when data is split into
|
# Test BulkData's ability to remove when data is split into
|
||||||
# multiple files. Should be a fairly comprehensive test of
|
# multiple files. Should be a fairly comprehensive test of
|
||||||
# remove functionality.
|
# remove functionality.
|
||||||
server_stop()
|
|
||||||
server_start(bulkdata_args = { "file_size" : 920, # 23 rows per file
|
|
||||||
"files_per_dir" : 3 })
|
|
||||||
|
|
||||||
# Insert data. Just for fun, insert out of order
|
# Insert data. Just for fun, insert out of order
|
||||||
self.ok("create /newton/prep float32_8")
|
self.ok("create /newton/prep float32_8")
|
||||||
@@ -818,7 +948,7 @@ class TestCmdline(object):
|
|||||||
du_before = nilmdb.utils.diskusage.du(testdb)
|
du_before = nilmdb.utils.diskusage.du(testdb)
|
||||||
|
|
||||||
# Make sure we have the data we expect
|
# Make sure we have the data we expect
|
||||||
self.ok("list --detail")
|
self.ok("list -l --detail")
|
||||||
self.match("/newton/prep float32_8\n" +
|
self.match("/newton/prep float32_8\n" +
|
||||||
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
|
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
|
||||||
" -> Fri, 23 Mar 2012 10:01:59.991668 +0000 ]\n"
|
" -> Fri, 23 Mar 2012 10:01:59.991668 +0000 ]\n"
|
||||||
@@ -854,7 +984,7 @@ class TestCmdline(object):
|
|||||||
self.match("3600\n")
|
self.match("3600\n")
|
||||||
|
|
||||||
# See the missing chunks in list output
|
# See the missing chunks in list output
|
||||||
self.ok("list --detail")
|
self.ok("list -l --detail")
|
||||||
self.match("/newton/prep float32_8\n" +
|
self.match("/newton/prep float32_8\n" +
|
||||||
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
|
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000"
|
||||||
" -> Fri, 23 Mar 2012 10:00:05.000000 +0000 ]\n"
|
" -> Fri, 23 Mar 2012 10:00:05.000000 +0000 ]\n"
|
||||||
@@ -893,3 +1023,223 @@ class TestCmdline(object):
|
|||||||
# See if we can extract it all
|
# See if we can extract it all
|
||||||
self.ok("extract /newton/prep --start 2000-01-01 --end 2020-01-01")
|
self.ok("extract /newton/prep --start 2000-01-01 --end 2020-01-01")
|
||||||
lines_(self.captured, 15600)
|
lines_(self.captured, 15600)
|
||||||
|
|
||||||
|
def test_15_intervals_diff(self):
|
||||||
|
# Test "intervals" and "intervals --diff" command.
|
||||||
|
os.environ['TZ'] = "UTC"
|
||||||
|
|
||||||
|
self.ok("create /diff/1 uint8_1")
|
||||||
|
self.match("")
|
||||||
|
self.ok("intervals /diff/1")
|
||||||
|
self.match("")
|
||||||
|
self.ok("intervals /diff/1 --diff /diff/1")
|
||||||
|
self.match("")
|
||||||
|
self.ok("intervals --diff /diff/1 /diff/1")
|
||||||
|
self.match("")
|
||||||
|
self.fail("intervals /diff/2")
|
||||||
|
self.fail("intervals /diff/1 -d /diff/2")
|
||||||
|
|
||||||
|
self.ok("create /diff/2 uint8_1")
|
||||||
|
self.ok("intervals -T /diff/1 -d /diff/2")
|
||||||
|
self.match("")
|
||||||
|
self.ok("insert -s 01-01-2000 -e 01-01-2001 /diff/1 /dev/null")
|
||||||
|
|
||||||
|
self.ok("intervals /diff/1")
|
||||||
|
self.match("[ Sat, 01 Jan 2000 00:00:00.000000 +0000 -"
|
||||||
|
"> Mon, 01 Jan 2001 00:00:00.000000 +0000 ]\n")
|
||||||
|
|
||||||
|
self.ok("intervals /diff/1 -d /diff/2")
|
||||||
|
self.match("[ Sat, 01 Jan 2000 00:00:00.000000 +0000 -"
|
||||||
|
"> Mon, 01 Jan 2001 00:00:00.000000 +0000 ]\n")
|
||||||
|
|
||||||
|
self.ok("insert -s 01-01-2000 -e 01-01-2001 /diff/2 /dev/null")
|
||||||
|
self.ok("intervals /diff/1 -d /diff/2")
|
||||||
|
self.match("")
|
||||||
|
|
||||||
|
self.ok("insert -s 01-01-2001 -e 01-01-2002 /diff/1 /dev/null")
|
||||||
|
self.ok("insert -s 01-01-2002 -e 01-01-2003 /diff/2 /dev/null")
|
||||||
|
self.ok("intervals /diff/1 -d /diff/2")
|
||||||
|
self.match("[ Mon, 01 Jan 2001 00:00:00.000000 +0000 -"
|
||||||
|
"> Tue, 01 Jan 2002 00:00:00.000000 +0000 ]\n")
|
||||||
|
|
||||||
|
self.ok("insert -s 01-01-2004 -e 01-01-2005 /diff/1 /dev/null")
|
||||||
|
self.ok("intervals /diff/1 -d /diff/2")
|
||||||
|
self.match("[ Mon, 01 Jan 2001 00:00:00.000000 +0000 -"
|
||||||
|
"> Tue, 01 Jan 2002 00:00:00.000000 +0000 ]\n"
|
||||||
|
"[ Thu, 01 Jan 2004 00:00:00.000000 +0000 -"
|
||||||
|
"> Sat, 01 Jan 2005 00:00:00.000000 +0000 ]\n")
|
||||||
|
|
||||||
|
self.fail("intervals -s 01-01-2003 -e 01-01-2000 /diff/1 -d /diff/2")
|
||||||
|
self.ok("intervals -s 01-01-2003 -e 01-01-2008 /diff/1 -d /diff/2")
|
||||||
|
self.match("[ Thu, 01 Jan 2004 00:00:00.000000 +0000 -"
|
||||||
|
"> Sat, 01 Jan 2005 00:00:00.000000 +0000 ]\n")
|
||||||
|
|
||||||
|
# optimize
|
||||||
|
self.ok("insert -s 01-01-2002 -e 01-01-2004 /diff/1 /dev/null")
|
||||||
|
self.ok("intervals /diff/1")
|
||||||
|
self.match("[ Sat, 01 Jan 2000 00:00:00.000000 +0000 -"
|
||||||
|
"> Thu, 01 Jan 2004 00:00:00.000000 +0000 ]\n"
|
||||||
|
"[ Thu, 01 Jan 2004 00:00:00.000000 +0000 -"
|
||||||
|
"> Sat, 01 Jan 2005 00:00:00.000000 +0000 ]\n")
|
||||||
|
self.ok("intervals /diff/1 --optimize")
|
||||||
|
self.ok("intervals /diff/1 -o")
|
||||||
|
self.match("[ Sat, 01 Jan 2000 00:00:00.000000 +0000 -"
|
||||||
|
"> Sat, 01 Jan 2005 00:00:00.000000 +0000 ]\n")
|
||||||
|
|
||||||
|
self.ok("destroy -R /diff/1")
|
||||||
|
self.ok("destroy -R /diff/2")
|
||||||
|
|
||||||
|
def test_16_rename(self):
|
||||||
|
# Test renaming. Force file size smaller so we get more files
|
||||||
|
server_stop()
|
||||||
|
recursive_unlink(testdb)
|
||||||
|
server_start(bulkdata_args = { "file_size" : 920, # 23 rows per file
|
||||||
|
"files_per_dir" : 3 })
|
||||||
|
|
||||||
|
|
||||||
|
# Fill data
|
||||||
|
self.ok("create /newton/prep float32_8")
|
||||||
|
os.environ['TZ'] = "UTC"
|
||||||
|
with open("tests/data/prep-20120323T1004-timestamped") as input:
|
||||||
|
self.ok("insert -s 20120323T1004 -e 20120323T1006 /newton/prep",
|
||||||
|
input)
|
||||||
|
|
||||||
|
# Extract it
|
||||||
|
self.ok("extract /newton/prep --start '2000-01-01' " +
|
||||||
|
"--end '2012-03-23 10:04:01'")
|
||||||
|
extract_before = self.captured
|
||||||
|
|
||||||
|
def check_path(*components):
|
||||||
|
# Verify the paths look right on disk
|
||||||
|
seek = os.path.join(testdb, "data", *components)
|
||||||
|
for (dirpath, dirnames, filenames) in os.walk(testdb):
|
||||||
|
if "_format" in filenames:
|
||||||
|
if dirpath == seek:
|
||||||
|
break
|
||||||
|
raise AssertionError("data also found at " + dirpath)
|
||||||
|
else:
|
||||||
|
raise AssertionError("data not found at " + seek)
|
||||||
|
# Verify "list" output
|
||||||
|
self.ok("list -l")
|
||||||
|
self.match("/" + "/".join(components) + " float32_8\n")
|
||||||
|
|
||||||
|
# Lots of renames
|
||||||
|
check_path("newton", "prep")
|
||||||
|
|
||||||
|
self.fail("rename /newton/prep /newton/prep")
|
||||||
|
self.contain("old and new paths are the same")
|
||||||
|
check_path("newton", "prep")
|
||||||
|
self.fail("rename /newton/prep /newton")
|
||||||
|
self.contain("path must contain at least one folder")
|
||||||
|
self.fail("rename /newton/prep /newton/prep/")
|
||||||
|
self.contain("invalid path")
|
||||||
|
self.ok("rename /newton/prep /newton/foo/1")
|
||||||
|
check_path("newton", "foo", "1")
|
||||||
|
self.ok("rename /newton/foo/1 /newton/foo")
|
||||||
|
check_path("newton", "foo")
|
||||||
|
self.ok("rename /newton/foo /totally/different/thing")
|
||||||
|
check_path("totally", "different", "thing")
|
||||||
|
self.ok("rename /totally/different/thing /totally/something")
|
||||||
|
check_path("totally", "something")
|
||||||
|
self.ok("rename /totally/something /totally/something/cool")
|
||||||
|
check_path("totally", "something", "cool")
|
||||||
|
self.ok("rename /totally/something/cool /foo/bar")
|
||||||
|
check_path("foo", "bar")
|
||||||
|
self.ok("create /xxx/yyy/zzz float32_8")
|
||||||
|
self.fail("rename /foo/bar /xxx/yyy")
|
||||||
|
self.contain("subdirs of this path already exist")
|
||||||
|
self.fail("rename /foo/bar /xxx/yyy/zzz")
|
||||||
|
self.contain("stream already exists at this path")
|
||||||
|
self.fail("rename /foo/bar /xxx/yyy/zzz/www")
|
||||||
|
self.contain("path is subdir of existing node")
|
||||||
|
self.ok("rename /foo/bar /xxx/yyy/mmm")
|
||||||
|
self.ok("destroy -R /xxx/yyy/zzz")
|
||||||
|
check_path("xxx", "yyy", "mmm")
|
||||||
|
|
||||||
|
# Extract it at the final path
|
||||||
|
self.ok("extract /xxx/yyy/mmm --start '2000-01-01' " +
|
||||||
|
"--end '2012-03-23 10:04:01'")
|
||||||
|
eq_(self.captured, extract_before)
|
||||||
|
|
||||||
|
self.ok("destroy -R /xxx/yyy/mmm")
|
||||||
|
|
||||||
|
# Make sure temporary rename dirs weren't left around
|
||||||
|
for (dirpath, dirnames, filenames) in os.walk(testdb):
|
||||||
|
if "rename-" in dirpath:
|
||||||
|
raise AssertionError("temporary directories not cleaned up")
|
||||||
|
if "totally" in dirpath or "newton" in dirpath:
|
||||||
|
raise AssertionError("old directories not cleaned up")
|
||||||
|
|
||||||
|
server_stop()
|
||||||
|
server_start()
|
||||||
|
|
||||||
|
def test_05b_completion(self):
|
||||||
|
# Test bash completion. This depends on some data put in the DB by
|
||||||
|
# earlier tests, so the execution order is important.
|
||||||
|
def complete(line, expect="<unspecified>"):
|
||||||
|
# set env vars
|
||||||
|
env = {
|
||||||
|
'_ARGCOMPLETE': '1',
|
||||||
|
'COMP_LINE': line,
|
||||||
|
'COMP_POINT': str(len(line)),
|
||||||
|
'COMP_TYPE': '8',
|
||||||
|
'NILMDB_URL': "http://localhost:32180/",
|
||||||
|
}
|
||||||
|
for (k, v) in env.items():
|
||||||
|
os.environ[k] = v
|
||||||
|
|
||||||
|
# create pipe for completion output
|
||||||
|
output = io.BytesIO()
|
||||||
|
|
||||||
|
# ensure argcomplete won't mess with any FDs
|
||||||
|
def fake_fdopen(fd, mode):
|
||||||
|
return io.BytesIO()
|
||||||
|
old_fdopen = os.fdopen
|
||||||
|
os.fdopen = fake_fdopen
|
||||||
|
|
||||||
|
# run cli
|
||||||
|
cmdline = nilmdb.cmdline.Cmdline([])
|
||||||
|
cmdline.complete_output_stream = output
|
||||||
|
try:
|
||||||
|
cmdline.run()
|
||||||
|
sys.exit(0)
|
||||||
|
except SystemExit as e:
|
||||||
|
exitcode = e.code
|
||||||
|
eq_(exitcode, 0)
|
||||||
|
|
||||||
|
# clean up
|
||||||
|
os.fdopen = old_fdopen
|
||||||
|
for (k, v) in env.items():
|
||||||
|
del os.environ[k]
|
||||||
|
|
||||||
|
# read completion output
|
||||||
|
comp = output.getvalue()
|
||||||
|
|
||||||
|
# replace completion separators with commas, for clarity
|
||||||
|
cleaned = comp.replace(b'\x0b', b',').decode('utf-8')
|
||||||
|
|
||||||
|
# expect the given match or prefix
|
||||||
|
if expect.endswith('*'):
|
||||||
|
if not cleaned.startswith(expect[:-1]):
|
||||||
|
raise AssertionError(("completions:\n '%s'\n"
|
||||||
|
"don't start with:\n '%s'") %
|
||||||
|
(cleaned, expect[:-1]))
|
||||||
|
else:
|
||||||
|
if cleaned != expect:
|
||||||
|
raise AssertionError(("completions:\n '%s'\n"
|
||||||
|
"don't match:\n '%s'") %
|
||||||
|
(cleaned, expect))
|
||||||
|
|
||||||
|
complete("nilmtool -u ", "")
|
||||||
|
complete("nilmtool list ", "-h,--help,-E,--ext*")
|
||||||
|
complete("nilmtool list --st", "--start ")
|
||||||
|
complete("nilmtool list --start ", "")
|
||||||
|
complete("nilmtool list /", "/newton/prep,/newton/raw*")
|
||||||
|
complete("nilmtool create /foo int3", "int32_1,int32_2*")
|
||||||
|
complete("nilmtool metadata /newton/raw --get a",
|
||||||
|
"a_𝓴𝓮𝔂,a_key,a_𝗸𝗲𝘆")
|
||||||
|
complete("nilmtool metadata /newton/raw --set a",
|
||||||
|
"a_𝓴𝓮𝔂=value,a_key=𝓿𝓪𝓵𝓾𝓮,a_𝗸𝗲𝘆=𝘃𝗮𝗹𝘂𝗲")
|
||||||
|
complete("nilmtool metadata /newton/raw --set a_𝗸", "a_𝗸𝗲𝘆=𝘃𝗮𝗹𝘂𝗲 ")
|
||||||
|
complete("nilmtool metadata '' --set a", "")
|
||||||
|
self.run("list")
|
||||||
|
|||||||
@@ -2,14 +2,17 @@
|
|||||||
|
|
||||||
import nilmdb
|
import nilmdb
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import *
|
||||||
from nilmdb.utils import datetime_tz
|
import datetime_tz
|
||||||
|
|
||||||
from nose.tools import *
|
from nose.tools import *
|
||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
from nilmdb.server.interval import (Interval, DBInterval,
|
from nilmdb.utils.interval import IntervalError
|
||||||
IntervalSet, IntervalError)
|
from nilmdb.server.interval import Interval, DBInterval, IntervalSet
|
||||||
|
|
||||||
|
# so we can test them separately
|
||||||
|
from nilmdb.utils.interval import Interval as UtilsInterval
|
||||||
|
|
||||||
from testutil.helpers import *
|
from testutil.helpers import *
|
||||||
import unittest
|
import unittest
|
||||||
@@ -47,11 +50,29 @@ def makeset(string):
|
|||||||
return iset
|
return iset
|
||||||
|
|
||||||
class TestInterval:
|
class TestInterval:
|
||||||
|
def test_client_interval(self):
|
||||||
|
# Run interval tests against the Python version of Interval.
|
||||||
|
global Interval
|
||||||
|
NilmdbInterval = Interval
|
||||||
|
Interval = UtilsInterval
|
||||||
|
self.test_interval()
|
||||||
|
self.test_interval_intersect()
|
||||||
|
Interval = NilmdbInterval
|
||||||
|
|
||||||
|
# Other helpers in nilmdb.utils.interval
|
||||||
|
i = [ UtilsInterval(1,2), UtilsInterval(2,3), UtilsInterval(4,5) ]
|
||||||
|
eq_(list(nilmdb.utils.interval.optimize(i)),
|
||||||
|
[ UtilsInterval(1,3), UtilsInterval(4,5) ])
|
||||||
|
eq_(list(nilmdb.utils.interval.optimize([])), [])
|
||||||
|
eq_(UtilsInterval(1234567890123456, 1234567890654321).human_string(),
|
||||||
|
"[ Fri, 13 Feb 2009 18:31:30.123456 -0500 -> " +
|
||||||
|
"Fri, 13 Feb 2009 18:31:30.654321 -0500 ]")
|
||||||
|
|
||||||
def test_interval(self):
|
def test_interval(self):
|
||||||
# Test Interval class
|
# Test Interval class
|
||||||
os.environ['TZ'] = "America/New_York"
|
os.environ['TZ'] = "America/New_York"
|
||||||
datetime_tz._localtz = None
|
datetime_tz._localtz = None
|
||||||
(d1, d2, d3) = [ datetime_tz.datetime_tz.smartparse(x).totimestamp()
|
(d1, d2, d3) = [ nilmdb.utils.time.parse_time(x)
|
||||||
for x in [ "03/24/2012", "03/25/2012", "03/26/2012" ] ]
|
for x in [ "03/24/2012", "03/25/2012", "03/26/2012" ] ]
|
||||||
|
|
||||||
# basic construction
|
# basic construction
|
||||||
@@ -73,12 +94,17 @@ class TestInterval:
|
|||||||
|
|
||||||
# compare
|
# compare
|
||||||
assert(Interval(d1, d2) == Interval(d1, d2))
|
assert(Interval(d1, d2) == Interval(d1, d2))
|
||||||
|
assert(Interval(d1, d2) <= Interval(d1, d2))
|
||||||
|
assert(Interval(d1, d2) >= Interval(d1, d2))
|
||||||
|
assert(Interval(d1, d2) != Interval(d1, d3))
|
||||||
assert(Interval(d1, d2) < Interval(d1, d3))
|
assert(Interval(d1, d2) < Interval(d1, d3))
|
||||||
|
assert(Interval(d1, d2) <= Interval(d1, d3))
|
||||||
assert(Interval(d1, d3) > Interval(d1, d2))
|
assert(Interval(d1, d3) > Interval(d1, d2))
|
||||||
|
assert(Interval(d1, d3) >= Interval(d1, d2))
|
||||||
assert(Interval(d1, d2) < Interval(d2, d3))
|
assert(Interval(d1, d2) < Interval(d2, d3))
|
||||||
assert(Interval(d1, d3) < Interval(d2, d3))
|
assert(Interval(d1, d3) < Interval(d2, d3))
|
||||||
assert(Interval(d2, d2+0.01) > Interval(d1, d3))
|
assert(Interval(d2, d2+1) > Interval(d1, d3))
|
||||||
assert(Interval(d3, d3+0.01) == Interval(d3, d3+0.01))
|
assert(Interval(d3, d3+1) == Interval(d3, d3+1))
|
||||||
#with assert_raises(TypeError): # was AttributeError, that's wrong
|
#with assert_raises(TypeError): # was AttributeError, that's wrong
|
||||||
# x = (i == 123)
|
# x = (i == 123)
|
||||||
|
|
||||||
@@ -87,16 +113,16 @@ class TestInterval:
|
|||||||
with assert_raises(IntervalError):
|
with assert_raises(IntervalError):
|
||||||
x = Interval(d2, d3).subset(d1, d2)
|
x = Interval(d2, d3).subset(d1, d2)
|
||||||
|
|
||||||
# big integers and floats
|
# big integers, negative integers
|
||||||
x = Interval(5000111222, 6000111222)
|
x = Interval(5000111222000000, 6000111222000000)
|
||||||
eq_(str(x), "[5000111222.000000 -> 6000111222.000000)")
|
eq_(str(x), "[5000111222000000 -> 6000111222000000)")
|
||||||
x = Interval(123.45, 234.56)
|
x = Interval(-5000111222000000, -4000111222000000)
|
||||||
eq_(str(x), "[123.450000 -> 234.560000)")
|
eq_(str(x), "[-5000111222000000 -> -4000111222000000)")
|
||||||
|
|
||||||
# misc
|
# misc
|
||||||
i = Interval(d1, d2)
|
i = Interval(d1, d2)
|
||||||
eq_(repr(i), repr(eval(repr(i))))
|
eq_(repr(i), repr(eval(repr(i))))
|
||||||
eq_(str(i), "[1332561600.000000 -> 1332648000.000000)")
|
eq_(str(i), "[1332561600000000 -> 1332648000000000)")
|
||||||
|
|
||||||
def test_interval_intersect(self):
|
def test_interval_intersect(self):
|
||||||
# Test Interval intersections
|
# Test Interval intersections
|
||||||
@@ -193,7 +219,7 @@ class TestInterval:
|
|||||||
# misc
|
# misc
|
||||||
eq_(repr(iset), repr(eval(repr(iset))))
|
eq_(repr(iset), repr(eval(repr(iset))))
|
||||||
eq_(str(iset),
|
eq_(str(iset),
|
||||||
"[[100.000000 -> 200.000000), [200.000000 -> 300.000000)]")
|
"[[100 -> 200), [200 -> 300)]")
|
||||||
|
|
||||||
def test_intervalset_geniset(self):
|
def test_intervalset_geniset(self):
|
||||||
# Test basic iset construction
|
# Test basic iset construction
|
||||||
@@ -208,64 +234,109 @@ class TestInterval:
|
|||||||
makeset(" [-|-----|"))
|
makeset(" [-|-----|"))
|
||||||
|
|
||||||
|
|
||||||
def test_intervalset_intersect(self):
|
def test_intervalset_intersect_difference(self):
|
||||||
# Test intersection (&)
|
# Test intersection (&)
|
||||||
with assert_raises(TypeError): # was AttributeError
|
with assert_raises(TypeError): # was AttributeError
|
||||||
x = makeset("[--)") & 1234
|
x = makeset("[--)") & 1234
|
||||||
|
|
||||||
# Intersection with interval
|
def do_test(a, b, c, d):
|
||||||
eq_(makeset("[---|---)[)") &
|
# a & b == c (using nilmdb.server.interval)
|
||||||
list(makeset(" [------) "))[0],
|
ab = IntervalSet()
|
||||||
makeset(" [-----) "))
|
for x in b:
|
||||||
|
for i in (a & x):
|
||||||
|
ab += i
|
||||||
|
eq_(ab,c)
|
||||||
|
|
||||||
# Intersection with sets
|
# a & b == c (using nilmdb.utils.interval)
|
||||||
eq_(makeset("[---------)") &
|
eq_(IntervalSet(nilmdb.utils.interval.intersection(a,b)), c)
|
||||||
makeset(" [---) "),
|
|
||||||
makeset(" [---) "))
|
|
||||||
|
|
||||||
eq_(makeset(" [---) ") &
|
# a \ b == d
|
||||||
makeset("[---------)"),
|
eq_(IntervalSet(nilmdb.utils.interval.set_difference(a,b)), d)
|
||||||
makeset(" [---) "))
|
|
||||||
|
|
||||||
eq_(makeset(" [-----)") &
|
# Intersection with intervals
|
||||||
makeset(" [-----) "),
|
do_test(makeset("[---|---)[)"),
|
||||||
makeset(" [--) "))
|
makeset(" [------) "),
|
||||||
|
makeset(" [-----) "), # intersection
|
||||||
|
makeset("[-) [)")) # difference
|
||||||
|
|
||||||
eq_(makeset(" [--) [--)") &
|
do_test(makeset("[---------)"),
|
||||||
makeset(" [------) "),
|
makeset(" [---) "),
|
||||||
makeset(" [-) [-) "))
|
makeset(" [---) "), # intersection
|
||||||
|
makeset("[) [----)")) # difference
|
||||||
|
|
||||||
eq_(makeset(" [---)") &
|
do_test(makeset(" [---) "),
|
||||||
makeset(" [--) "),
|
makeset("[---------)"),
|
||||||
makeset(" "))
|
makeset(" [---) "), # intersection
|
||||||
|
makeset(" ")) # difference
|
||||||
|
|
||||||
eq_(makeset(" [-|---)") &
|
do_test(makeset(" [-----)"),
|
||||||
makeset(" [-----|-) "),
|
makeset(" [-----) "),
|
||||||
makeset(" [----) "))
|
makeset(" [--) "), # intersection
|
||||||
|
makeset(" [--)")) # difference
|
||||||
|
|
||||||
eq_(makeset(" [-|-) ") &
|
do_test(makeset(" [--) [--)"),
|
||||||
makeset(" [-|--|--) "),
|
makeset(" [------) "),
|
||||||
makeset(" [---) "))
|
makeset(" [-) [-) "), # intersection
|
||||||
|
makeset(" [) [)")) # difference
|
||||||
|
|
||||||
|
do_test(makeset(" [---)"),
|
||||||
|
makeset(" [--) "),
|
||||||
|
makeset(" "), # intersection
|
||||||
|
makeset(" [---)")) # difference
|
||||||
|
|
||||||
|
do_test(makeset(" [-|---)"),
|
||||||
|
makeset(" [-----|-) "),
|
||||||
|
makeset(" [----) "), # intersection
|
||||||
|
makeset(" [)")) # difference
|
||||||
|
|
||||||
|
do_test(makeset(" [-|-) "),
|
||||||
|
makeset(" [-|--|--) "),
|
||||||
|
makeset(" [---) "), # intersection
|
||||||
|
makeset(" ")) # difference
|
||||||
|
|
||||||
|
do_test(makeset("[-)[-)[-)[)"),
|
||||||
|
makeset(" [) [|)[) "),
|
||||||
|
makeset(" [) [) "), # intersection
|
||||||
|
makeset("[) [-) [)[)")) # difference
|
||||||
|
|
||||||
# Border cases -- will give different results if intervals are
|
# Border cases -- will give different results if intervals are
|
||||||
# half open or fully closed. Right now, they are half open,
|
# half open or fully closed. In nilmdb, they are half open.
|
||||||
# although that's a little messy since the database intervals
|
do_test(makeset(" [---)"),
|
||||||
# often contain a data point at the endpoint.
|
|
||||||
half_open = True
|
|
||||||
if half_open:
|
|
||||||
eq_(makeset(" [---)") &
|
|
||||||
makeset(" [----) "),
|
makeset(" [----) "),
|
||||||
makeset(" "))
|
makeset(" "), # intersection
|
||||||
eq_(makeset(" [----)[--)") &
|
makeset(" [---)")) # difference
|
||||||
|
|
||||||
|
do_test(makeset(" [----)[--)"),
|
||||||
makeset("[-) [--) [)"),
|
makeset("[-) [--) [)"),
|
||||||
makeset(" [) [-) [)"))
|
makeset(" [) [-) [)"), # intersection
|
||||||
else:
|
makeset(" [-) [-) ")) # difference
|
||||||
eq_(makeset(" [---)") &
|
|
||||||
makeset(" [----) "),
|
# Set difference with bounds
|
||||||
makeset(" . "))
|
a = makeset(" [----)[--)")
|
||||||
eq_(makeset(" [----)[--)") &
|
b = makeset("[-) [--) [)")
|
||||||
makeset("[-) [--) [)"),
|
c = makeset("[----) ")
|
||||||
makeset(" [) [-). [)"))
|
d = makeset(" [-) ")
|
||||||
|
eq_(nilmdb.utils.interval.set_difference(
|
||||||
|
a.intersection(list(c)[0]), b.intersection(list(c)[0])), d)
|
||||||
|
|
||||||
|
# Fill out test coverage for non-subsets
|
||||||
|
def diff2(a,b, subset):
|
||||||
|
return nilmdb.utils.interval._interval_math_helper(
|
||||||
|
a, b, (lambda a, b: b and not a), subset=subset)
|
||||||
|
with assert_raises(nilmdb.utils.interval.IntervalError):
|
||||||
|
list(diff2(a,b,True))
|
||||||
|
list(diff2(a,b,False))
|
||||||
|
|
||||||
|
# Fill out test coverage with a union operator (not implemented
|
||||||
|
# in interval.py, because nilmdb doesn't need it)
|
||||||
|
def union(a, b):
|
||||||
|
return nilmdb.utils.interval._interval_math_helper(
|
||||||
|
a, b, (lambda a, b: a or b), subset=False)
|
||||||
|
list(union(makeset("[---) "),
|
||||||
|
makeset(" [---)")))
|
||||||
|
|
||||||
|
# Empty second set
|
||||||
|
eq_(nilmdb.utils.interval.set_difference(a, IntervalSet()), a)
|
||||||
|
|
||||||
class TestIntervalDB:
|
class TestIntervalDB:
|
||||||
def test_dbinterval(self):
|
def test_dbinterval(self):
|
||||||
@@ -319,13 +390,13 @@ class TestIntervalTree:
|
|||||||
# make a set of 100 intervals
|
# make a set of 100 intervals
|
||||||
iset = IntervalSet()
|
iset = IntervalSet()
|
||||||
j = 100
|
j = 100
|
||||||
for i in random.sample(xrange(j),j):
|
for i in random.sample(range(j),j):
|
||||||
interval = Interval(i, i+1)
|
interval = Interval(i, i+1)
|
||||||
iset += interval
|
iset += interval
|
||||||
render(iset, "Random Insertion")
|
render(iset, "Random Insertion")
|
||||||
|
|
||||||
# remove about half of them
|
# remove about half of them
|
||||||
for i in random.sample(xrange(j),j):
|
for i in random.sample(range(j),j):
|
||||||
if random.randint(0,1):
|
if random.randint(0,1):
|
||||||
iset -= Interval(i, i+1)
|
iset -= Interval(i, i+1)
|
||||||
|
|
||||||
@@ -337,7 +408,7 @@ class TestIntervalTree:
|
|||||||
# make a set of 100 intervals, inserted in order
|
# make a set of 100 intervals, inserted in order
|
||||||
iset = IntervalSet()
|
iset = IntervalSet()
|
||||||
j = 100
|
j = 100
|
||||||
for i in xrange(j):
|
for i in range(j):
|
||||||
interval = Interval(i, i+1)
|
interval = Interval(i, i+1)
|
||||||
iset += interval
|
iset += interval
|
||||||
render(iset, "In-order insertion")
|
render(iset, "In-order insertion")
|
||||||
@@ -347,18 +418,17 @@ class TestIntervalSpeed:
|
|||||||
def test_interval_speed(self):
|
def test_interval_speed(self):
|
||||||
import yappi
|
import yappi
|
||||||
import time
|
import time
|
||||||
import testutil.aplotter as aplotter
|
|
||||||
import random
|
import random
|
||||||
import math
|
import math
|
||||||
|
|
||||||
print
|
print()
|
||||||
yappi.start()
|
yappi.start()
|
||||||
speeds = {}
|
speeds = {}
|
||||||
limit = 10 # was 20
|
limit = 22 # was 20
|
||||||
for j in [ 2**x for x in range(5,limit) ]:
|
for j in [ 2**x for x in range(5,limit) ]:
|
||||||
start = time.time()
|
start = time.time()
|
||||||
iset = IntervalSet()
|
iset = IntervalSet()
|
||||||
for i in random.sample(xrange(j),j):
|
for i in random.sample(range(j),j):
|
||||||
interval = Interval(i, i+1)
|
interval = Interval(i, i+1)
|
||||||
iset += interval
|
iset += interval
|
||||||
speed = (time.time() - start) * 1000000.0
|
speed = (time.time() - start) * 1000000.0
|
||||||
@@ -368,7 +438,7 @@ class TestIntervalSpeed:
|
|||||||
speed/j,
|
speed/j,
|
||||||
speed / (j*math.log(j))) # should be constant
|
speed / (j*math.log(j))) # should be constant
|
||||||
speeds[j] = speed
|
speeds[j] = speed
|
||||||
aplotter.plot(speeds.keys(), speeds.values(), plot_slope=True)
|
|
||||||
yappi.stop()
|
yappi.stop()
|
||||||
yappi.print_stats(sort_type=yappi.SORTTYPE_TTOT, limit=10)
|
stats = yappi.get_func_stats()
|
||||||
|
stats.sort("ttot")
|
||||||
|
stats.print_all()
|
||||||
|
|||||||
@@ -1,61 +0,0 @@
|
|||||||
import nilmdb
|
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
|
|
||||||
import nose
|
|
||||||
from nose.tools import *
|
|
||||||
from nose.tools import assert_raises
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
|
|
||||||
from testutil.helpers import *
|
|
||||||
|
|
||||||
def func_with_callback(a, b, callback):
|
|
||||||
callback(a)
|
|
||||||
callback(b)
|
|
||||||
callback(a+b)
|
|
||||||
return "return value"
|
|
||||||
|
|
||||||
class TestIteratorizer(object):
|
|
||||||
def test(self):
|
|
||||||
|
|
||||||
# First try it with a normal callback
|
|
||||||
self.result = ""
|
|
||||||
def cb(x):
|
|
||||||
self.result += str(x)
|
|
||||||
func_with_callback(1, 2, cb)
|
|
||||||
eq_(self.result, "123")
|
|
||||||
|
|
||||||
# Now make it an iterator
|
|
||||||
result = ""
|
|
||||||
f = lambda x: func_with_callback(1, 2, x)
|
|
||||||
with nilmdb.utils.Iteratorizer(f) as it:
|
|
||||||
for i in it:
|
|
||||||
result += str(i)
|
|
||||||
eq_(result, "123")
|
|
||||||
eq_(it.retval, "return value")
|
|
||||||
|
|
||||||
# Make sure things work when an exception occurs
|
|
||||||
result = ""
|
|
||||||
with nilmdb.utils.Iteratorizer(
|
|
||||||
lambda x: func_with_callback(1, "a", x)) as it:
|
|
||||||
with assert_raises(TypeError) as e:
|
|
||||||
for i in it:
|
|
||||||
result += str(i)
|
|
||||||
eq_(result, "1a")
|
|
||||||
|
|
||||||
# Now try to trigger the case where we stop iterating
|
|
||||||
# mid-generator, and expect the iteratorizer to clean up after
|
|
||||||
# itself. This doesn't have a particular result in the test,
|
|
||||||
# but gains coverage.
|
|
||||||
def foo():
|
|
||||||
with nilmdb.utils.Iteratorizer(f) as it:
|
|
||||||
it.next()
|
|
||||||
foo()
|
|
||||||
eq_(it.retval, None)
|
|
||||||
|
|
||||||
# Do the same thing when the curl hack is applied
|
|
||||||
def foo():
|
|
||||||
with nilmdb.utils.Iteratorizer(f, curl_hack = True) as it:
|
|
||||||
it.next()
|
|
||||||
foo()
|
|
||||||
eq_(it.retval, None)
|
|
||||||
@@ -1,266 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import nilmdb
|
|
||||||
|
|
||||||
from nilmdb.utils.printf import *
|
|
||||||
|
|
||||||
from nose.tools import *
|
|
||||||
from nose.tools import assert_raises
|
|
||||||
import distutils.version
|
|
||||||
import itertools
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import random
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
from testutil.helpers import *
|
|
||||||
|
|
||||||
from nilmdb.server.layout import *
|
|
||||||
|
|
||||||
class TestLayouts(object):
|
|
||||||
# Some nilmdb.layout tests. Not complete, just fills in missing
|
|
||||||
# coverage.
|
|
||||||
def test_layouts(self):
|
|
||||||
x = nilmdb.server.layout.get_named("float32_8")
|
|
||||||
y = nilmdb.server.layout.get_named("float32_8")
|
|
||||||
eq_(x.count, y.count)
|
|
||||||
eq_(x.datatype, y.datatype)
|
|
||||||
y = nilmdb.server.layout.get_named("float32_7")
|
|
||||||
ne_(x.count, y.count)
|
|
||||||
eq_(x.datatype, y.datatype)
|
|
||||||
|
|
||||||
def test_parsing(self):
|
|
||||||
self.real_t_parsing("float32_8", "uint16_6", "uint16_9")
|
|
||||||
self.real_t_parsing("float32_8", "uint16_6", "uint16_9")
|
|
||||||
def real_t_parsing(self, name_prep, name_raw, name_rawnotch):
|
|
||||||
# invalid layouts
|
|
||||||
with assert_raises(TypeError) as e:
|
|
||||||
parser = Parser("NoSuchLayout")
|
|
||||||
with assert_raises(TypeError) as e:
|
|
||||||
parser = Parser("float32")
|
|
||||||
|
|
||||||
# too little data
|
|
||||||
parser = Parser(name_prep)
|
|
||||||
data = ( "1234567890.000000 1.1 2.2 3.3 4.4 5.5\n" +
|
|
||||||
"1234567890.100000 1.1 2.2 3.3 4.4 5.5\n")
|
|
||||||
with assert_raises(ParserError) as e:
|
|
||||||
parser.parse(data)
|
|
||||||
in_("error", str(e.exception))
|
|
||||||
|
|
||||||
# too much data
|
|
||||||
parser = Parser(name_prep)
|
|
||||||
data = ( "1234567890.000000 1.1 2.2 3.3 4.4 5.5 6.6 7.7 8.8 9.9\n" +
|
|
||||||
"1234567890.100000 1.1 2.2 3.3 4.4 5.5 6.6 7.7 8.8 9.9\n")
|
|
||||||
with assert_raises(ParserError) as e:
|
|
||||||
parser.parse(data)
|
|
||||||
in_("error", str(e.exception))
|
|
||||||
|
|
||||||
# just right
|
|
||||||
parser = Parser(name_prep)
|
|
||||||
data = ( "1234567890.000000 1.1 2.2 3.3 4.4 5.5 6.6 7.7 8.8\n" +
|
|
||||||
"1234567890.100000 1.1 2.2 3.3 4.4 5.5 6.6 7.7 8.8\n")
|
|
||||||
parser.parse(data)
|
|
||||||
eq_(parser.min_timestamp, 1234567890.0)
|
|
||||||
eq_(parser.max_timestamp, 1234567890.1)
|
|
||||||
eq_(parser.data, [[1234567890.0,1.1,2.2,3.3,4.4,5.5,6.6,7.7,8.8],
|
|
||||||
[1234567890.1,1.1,2.2,3.3,4.4,5.5,6.6,7.7,8.8]])
|
|
||||||
|
|
||||||
# try uint16_6 too, with clamping
|
|
||||||
parser = Parser(name_raw)
|
|
||||||
data = ( "1234567890.000000 1 2 3 4 5 6\n" +
|
|
||||||
"1234567890.100000 1 2 3 4 5 6\n" )
|
|
||||||
parser.parse(data)
|
|
||||||
eq_(parser.data, [[1234567890.0,1,2,3,4,5,6],
|
|
||||||
[1234567890.1,1,2,3,4,5,6]])
|
|
||||||
|
|
||||||
# pass an instantiated class
|
|
||||||
parser = Parser(get_named(name_rawnotch))
|
|
||||||
data = ( "1234567890.000000 1 2 3 4 5 6 7 8 9\n" +
|
|
||||||
"1234567890.100000 1 2 3 4 5 6 7 8 9\n" )
|
|
||||||
parser.parse(data)
|
|
||||||
|
|
||||||
# non-monotonic
|
|
||||||
parser = Parser(name_raw)
|
|
||||||
data = ( "1234567890.100000 1 2 3 4 5 6\n" +
|
|
||||||
"1234567890.099999 1 2 3 4 5 6\n" )
|
|
||||||
with assert_raises(ParserError) as e:
|
|
||||||
parser.parse(data)
|
|
||||||
in_("not monotonically increasing", str(e.exception))
|
|
||||||
|
|
||||||
parser = Parser(name_raw)
|
|
||||||
data = ( "1234567890.100000 1 2 3 4 5 6\n" +
|
|
||||||
"1234567890.100000 1 2 3 4 5 6\n" )
|
|
||||||
with assert_raises(ParserError) as e:
|
|
||||||
parser.parse(data)
|
|
||||||
in_("not monotonically increasing", str(e.exception))
|
|
||||||
|
|
||||||
parser = Parser(name_raw)
|
|
||||||
data = ( "1234567890.100000 1 2 3 4 5 6\n" +
|
|
||||||
"1234567890.100001 1 2 3 4 5 6\n" )
|
|
||||||
parser.parse(data)
|
|
||||||
|
|
||||||
# uint16_6 with values out of bounds
|
|
||||||
parser = Parser(name_raw)
|
|
||||||
data = ( "1234567890.000000 1 2 3 4 500000 6\n" +
|
|
||||||
"1234567890.100000 1 2 3 4 5 6\n" )
|
|
||||||
with assert_raises(ParserError) as e:
|
|
||||||
parser.parse(data)
|
|
||||||
in_("value out of range", str(e.exception))
|
|
||||||
|
|
||||||
# Empty data should work but is useless
|
|
||||||
parser = Parser(name_raw)
|
|
||||||
data = ""
|
|
||||||
parser.parse(data)
|
|
||||||
assert(parser.min_timestamp is None)
|
|
||||||
assert(parser.max_timestamp is None)
|
|
||||||
|
|
||||||
def test_formatting(self):
|
|
||||||
self.real_t_formatting("float32_8", "uint16_6", "uint16_9")
|
|
||||||
self.real_t_formatting("float32_8", "uint16_6", "uint16_9")
|
|
||||||
def real_t_formatting(self, name_prep, name_raw, name_rawnotch):
|
|
||||||
# invalid layout
|
|
||||||
with assert_raises(TypeError) as e:
|
|
||||||
formatter = Formatter("NoSuchLayout")
|
|
||||||
|
|
||||||
# too little data
|
|
||||||
formatter = Formatter(name_prep)
|
|
||||||
data = [ [ 1234567890.000000, 1.1, 2.2, 3.3, 4.4, 5.5 ],
|
|
||||||
[ 1234567890.100000, 1.1, 2.2, 3.3, 4.4, 5.5 ] ]
|
|
||||||
with assert_raises(FormatterError) as e:
|
|
||||||
formatter.format(data)
|
|
||||||
in_("error", str(e.exception))
|
|
||||||
|
|
||||||
# too much data
|
|
||||||
formatter = Formatter(name_prep)
|
|
||||||
data = [ [ 1234567890.000000, 1, 2, 3, 4, 5, 6, 7, 8, 9 ],
|
|
||||||
[ 1234567890.100000, 1, 2, 3, 4, 5, 6, 7, 8, 9 ] ]
|
|
||||||
with assert_raises(FormatterError) as e:
|
|
||||||
formatter.format(data)
|
|
||||||
in_("error", str(e.exception))
|
|
||||||
|
|
||||||
# just right
|
|
||||||
formatter = Formatter(name_prep)
|
|
||||||
data = [ [ 1234567890.000000, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8 ],
|
|
||||||
[ 1234567890.100000, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8 ] ]
|
|
||||||
text = formatter.format(data)
|
|
||||||
eq_(text,
|
|
||||||
"1234567890.000000 1.100000e+00 2.200000e+00 3.300000e+00 "
|
|
||||||
"4.400000e+00 5.500000e+00 6.600000e+00 7.700000e+00 "
|
|
||||||
"8.800000e+00\n" +
|
|
||||||
"1234567890.100000 1.100000e+00 2.200000e+00 3.300000e+00 "
|
|
||||||
"4.400000e+00 5.500000e+00 6.600000e+00 7.700000e+00 "
|
|
||||||
"8.800000e+00\n")
|
|
||||||
|
|
||||||
# try uint16_6 too
|
|
||||||
formatter = Formatter(name_raw)
|
|
||||||
data = [ [ 1234567890.000000, 1, 2, 3, 4, 5, 6 ],
|
|
||||||
[ 1234567890.100000, 1, 2, 3, 4, 5, 6 ] ]
|
|
||||||
text = formatter.format(data)
|
|
||||||
eq_(text,
|
|
||||||
"1234567890.000000 1 2 3 4 5 6\n" +
|
|
||||||
"1234567890.100000 1 2 3 4 5 6\n")
|
|
||||||
|
|
||||||
# pass an instantiated class
|
|
||||||
formatter = Formatter(get_named(name_rawnotch))
|
|
||||||
data = [ [ 1234567890.000000, 1, 2, 3, 4, 5, 6, 7, 8, 9 ],
|
|
||||||
[ 1234567890.100000, 1, 2, 3, 4, 5, 6, 7, 8, 9 ] ]
|
|
||||||
text = formatter.format(data)
|
|
||||||
eq_(text,
|
|
||||||
"1234567890.000000 1 2 3 4 5 6 7 8 9\n" +
|
|
||||||
"1234567890.100000 1 2 3 4 5 6 7 8 9\n")
|
|
||||||
|
|
||||||
# Empty data should work but is useless
|
|
||||||
formatter = Formatter(name_raw)
|
|
||||||
data = []
|
|
||||||
text = formatter.format(data)
|
|
||||||
eq_(text, "")
|
|
||||||
|
|
||||||
def test_roundtrip(self):
|
|
||||||
self.real_t_roundtrip("float32_8", "uint16_6", "uint16_9")
|
|
||||||
self.real_t_roundtrip("float32_8", "uint16_6", "uint16_9")
|
|
||||||
def real_t_roundtrip(self, name_prep, name_raw, name_rawnotch):
|
|
||||||
# Verify that textual data passed into the Parser, and then
|
|
||||||
# back through the Formatter, then back into the Parser,
|
|
||||||
# gives identical parsed representations
|
|
||||||
random.seed(12345)
|
|
||||||
|
|
||||||
def do_roundtrip(layout, datagen):
|
|
||||||
for i in range(100):
|
|
||||||
rows = random.randint(1,100)
|
|
||||||
data = ""
|
|
||||||
ts = 1234567890
|
|
||||||
for r in range(rows):
|
|
||||||
ts += random.uniform(0,1)
|
|
||||||
row = sprintf("%f", ts) + " "
|
|
||||||
row += " ".join(datagen())
|
|
||||||
row += "\n"
|
|
||||||
data += row
|
|
||||||
parser1 = Parser(layout)
|
|
||||||
formatter = Formatter(layout)
|
|
||||||
parser2 = Parser(layout)
|
|
||||||
parser1.parse(data)
|
|
||||||
parser2.parse(formatter.format(parser1.data))
|
|
||||||
eq_(parser1.data, parser2.data)
|
|
||||||
|
|
||||||
def datagen():
|
|
||||||
return [ sprintf("%.6e", random.uniform(-1000,1000))
|
|
||||||
for x in range(8) ]
|
|
||||||
do_roundtrip(name_prep, datagen)
|
|
||||||
|
|
||||||
def datagen():
|
|
||||||
return [ sprintf("%d", random.randint(0,65535))
|
|
||||||
for x in range(6) ]
|
|
||||||
do_roundtrip(name_raw, datagen)
|
|
||||||
|
|
||||||
def datagen():
|
|
||||||
return [ sprintf("%d", random.randint(0,65535))
|
|
||||||
for x in range(9) ]
|
|
||||||
do_roundtrip(name_rawnotch, datagen)
|
|
||||||
|
|
||||||
class TestLayoutSpeed:
|
|
||||||
@unittest.skip("this is slow")
|
|
||||||
def test_layout_speed(self):
|
|
||||||
import time
|
|
||||||
|
|
||||||
random.seed(54321)
|
|
||||||
|
|
||||||
def do_speedtest(layout, datagen, rows = 5000, times = 100):
|
|
||||||
# Build data once
|
|
||||||
data = ""
|
|
||||||
ts = 1234567890
|
|
||||||
for r in range(rows):
|
|
||||||
ts += random.uniform(0,1)
|
|
||||||
row = sprintf("%f", ts) + " "
|
|
||||||
row += " ".join(datagen())
|
|
||||||
row += "\n"
|
|
||||||
data += row
|
|
||||||
|
|
||||||
# Do lots of roundtrips
|
|
||||||
start = time.time()
|
|
||||||
for i in range(times):
|
|
||||||
parser = Parser(layout)
|
|
||||||
formatter = Formatter(layout)
|
|
||||||
parser.parse(data)
|
|
||||||
formatter.format(parser.data)
|
|
||||||
elapsed = time.time() - start
|
|
||||||
printf("roundtrip %s: %d ms, %.1f μs/row, %d rows/sec\n",
|
|
||||||
layout,
|
|
||||||
elapsed * 1e3,
|
|
||||||
(elapsed * 1e6) / (rows * times),
|
|
||||||
(rows * times) / elapsed)
|
|
||||||
|
|
||||||
print ""
|
|
||||||
def datagen():
|
|
||||||
return [ sprintf("%.6e", random.uniform(-1000,1000))
|
|
||||||
for x in range(10) ]
|
|
||||||
do_speedtest("float32_10", datagen)
|
|
||||||
|
|
||||||
def datagen():
|
|
||||||
return [ sprintf("%d", random.randint(0,65535))
|
|
||||||
for x in range(10) ]
|
|
||||||
do_speedtest("uint16_10", datagen)
|
|
||||||
|
|
||||||
def datagen():
|
|
||||||
return [ sprintf("%d", random.randint(0,65535))
|
|
||||||
for x in range(6) ]
|
|
||||||
do_speedtest("uint16_6", datagen)
|
|
||||||
139
tests/test_misc.py
Normal file
139
tests/test_misc.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
from nose.tools import *
|
||||||
|
from nose.tools import assert_raises
|
||||||
|
from testutil.helpers import *
|
||||||
|
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import socket
|
||||||
|
import cherrypy
|
||||||
|
|
||||||
|
import nilmdb.server
|
||||||
|
from nilmdb.utils import timer, lock
|
||||||
|
|
||||||
|
class TestMisc(object):
|
||||||
|
def test_timer(self):
|
||||||
|
capture = io.StringIO()
|
||||||
|
old = sys.stdout
|
||||||
|
sys.stdout = capture
|
||||||
|
with nilmdb.utils.Timer("test"):
|
||||||
|
time.sleep(0.01)
|
||||||
|
with nilmdb.utils.Timer("test syslog", tosyslog=True):
|
||||||
|
time.sleep(0.01)
|
||||||
|
sys.stdout = old
|
||||||
|
in_("test: ", capture.getvalue())
|
||||||
|
|
||||||
|
def test_lock(self):
|
||||||
|
with open("/dev/null") as f:
|
||||||
|
eq_(nilmdb.utils.lock.exclusive_lock(f), True)
|
||||||
|
nilmdb.utils.lock.exclusive_unlock(f)
|
||||||
|
# Test error conditions
|
||||||
|
class FakeFile():
|
||||||
|
def __init__(self, fileno):
|
||||||
|
self._fileno = fileno
|
||||||
|
def fileno(self):
|
||||||
|
return self._fileno
|
||||||
|
with assert_raises(TypeError):
|
||||||
|
nilmdb.utils.lock.exclusive_lock(FakeFile('none'))
|
||||||
|
with assert_raises(ValueError):
|
||||||
|
nilmdb.utils.lock.exclusive_lock(FakeFile(-1))
|
||||||
|
with assert_raises(IOError):
|
||||||
|
nilmdb.utils.lock.exclusive_lock(FakeFile(12345))
|
||||||
|
|
||||||
|
# Lock failure is tested in test_bulkdata
|
||||||
|
|
||||||
|
def test_replace_file(self):
|
||||||
|
fn = b"tests/misc-testdb/file"
|
||||||
|
try:
|
||||||
|
os.mkdir(os.path.dirname(fn))
|
||||||
|
except FileExistsError:
|
||||||
|
pass
|
||||||
|
with open(fn, "wb") as f:
|
||||||
|
f.write(b"hello, world")
|
||||||
|
nilmdb.utils.atomic.replace_file(fn, b"goodbye, world")
|
||||||
|
with open(fn, "rb") as f:
|
||||||
|
eq_(f.read(), b"goodbye, world")
|
||||||
|
|
||||||
|
def test_punch(self):
|
||||||
|
fn = b"tests/misc-testdb/punchit"
|
||||||
|
try:
|
||||||
|
os.mkdir(os.path.dirname(fn))
|
||||||
|
except FileExistsError:
|
||||||
|
pass
|
||||||
|
with open(fn, "wb") as f:
|
||||||
|
f.write(b"hello, world")
|
||||||
|
nilmdb.utils.fallocate.punch_hole(fn, 3, 5)
|
||||||
|
with open(fn, "rb") as f:
|
||||||
|
eq_(f.read(), b"hel\0\0\0\0\0orld")
|
||||||
|
with assert_raises(OSError):
|
||||||
|
nilmdb.utils.fallocate.punch_hole(fn, 1, -1, False)
|
||||||
|
with assert_raises(OSError):
|
||||||
|
nilmdb.utils.fallocate.punch_hole("/", 1, 1, False)
|
||||||
|
# no exception because we ignore errors by default
|
||||||
|
nilmdb.utils.fallocate.punch_hole(fn, 1, -1)
|
||||||
|
|
||||||
|
def test_diskusage(self):
|
||||||
|
hs = nilmdb.utils.diskusage.human_size
|
||||||
|
eq_(hs(0), "0 bytes")
|
||||||
|
eq_(hs(1), "1 byte")
|
||||||
|
eq_(hs(1023), "1023 bytes")
|
||||||
|
|
||||||
|
eq_(hs(1024), "1 kiB")
|
||||||
|
|
||||||
|
eq_(hs(1048575), "1024 kiB")
|
||||||
|
eq_(hs(1048576), "1.0 MiB")
|
||||||
|
|
||||||
|
eq_(hs(1073741823), "1024.0 MiB")
|
||||||
|
eq_(hs(1073741824), "1.00 GiB")
|
||||||
|
|
||||||
|
eq_(hs(1099511627775), "1024.00 GiB")
|
||||||
|
eq_(hs(1099511627776), "1.00 TiB")
|
||||||
|
|
||||||
|
eq_(hs(1099511627776 * 5000.1234), "5000.12 TiB")
|
||||||
|
|
||||||
|
nilmdb.utils.diskusage.du("/dev")
|
||||||
|
with assert_raises(OSError):
|
||||||
|
nilmdb.utils.diskusage.du("/dev/null/bogus")
|
||||||
|
nilmdb.utils.diskusage.du("super-bogus-does-not-exist")
|
||||||
|
|
||||||
|
def test_cors_allow(self):
|
||||||
|
# Just to get some test coverage; these code paths aren't actually
|
||||||
|
# used in current code
|
||||||
|
cpy = nilmdb.server.serverutil.cherrypy
|
||||||
|
(req, resp) = (cpy.request, cpy.response)
|
||||||
|
cpy.request.method = "DELETE"
|
||||||
|
with assert_raises(cpy.HTTPError):
|
||||||
|
nilmdb.server.serverutil.CORS_allow(methods="POST")
|
||||||
|
with assert_raises(cpy.HTTPError):
|
||||||
|
nilmdb.server.serverutil.CORS_allow(methods=["POST"])
|
||||||
|
with assert_raises(cpy.HTTPError):
|
||||||
|
nilmdb.server.serverutil.CORS_allow(methods=["GET"])
|
||||||
|
with assert_raises(cpy.HTTPError):
|
||||||
|
nilmdb.server.serverutil.CORS_allow(methods=[])
|
||||||
|
(cpy.request, cpy.response) = (req, resp)
|
||||||
|
|
||||||
|
def test_cherrypy_failure(self):
|
||||||
|
# Test failure of cherrypy to start up because the port is
|
||||||
|
# already in use. This also tests the functionality of
|
||||||
|
# serverutil:cherrypy_patch_exit()
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
try:
|
||||||
|
sock.bind(("127.0.0.1", 32180))
|
||||||
|
sock.listen(1)
|
||||||
|
except OSError:
|
||||||
|
raise AssertionError("port 32180 must be free for tests")
|
||||||
|
|
||||||
|
nilmdb.server.serverutil.cherrypy_patch_exit()
|
||||||
|
cherrypy.config.update({
|
||||||
|
'environment': 'embedded',
|
||||||
|
'server.socket_host': '127.0.0.1',
|
||||||
|
'server.socket_port': 32180,
|
||||||
|
'engine.autoreload.on': False,
|
||||||
|
})
|
||||||
|
with assert_raises(Exception) as e:
|
||||||
|
cherrypy.engine.start()
|
||||||
|
in_("Address already in use", str(e.exception))
|
||||||
|
|
||||||
|
sock.close()
|
||||||
@@ -8,12 +8,12 @@ from nose.tools import assert_raises
|
|||||||
from testutil.helpers import *
|
from testutil.helpers import *
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import cStringIO
|
import io
|
||||||
import gc
|
import gc
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
err = cStringIO.StringIO()
|
err = io.StringIO()
|
||||||
|
|
||||||
@nilmdb.utils.must_close(errorfile = err)
|
@nilmdb.utils.must_close(errorfile = err)
|
||||||
class Foo:
|
class Foo:
|
||||||
@@ -71,6 +71,7 @@ class TestMustClose(object):
|
|||||||
|
|
||||||
# No error
|
# No error
|
||||||
err.truncate(0)
|
err.truncate(0)
|
||||||
|
err.seek(0)
|
||||||
y = Foo("bye")
|
y = Foo("bye")
|
||||||
y.close()
|
y.close()
|
||||||
del y
|
del y
|
||||||
@@ -82,6 +83,7 @@ class TestMustClose(object):
|
|||||||
|
|
||||||
# Verify function calls when wrap_verify is True
|
# Verify function calls when wrap_verify is True
|
||||||
err.truncate(0)
|
err.truncate(0)
|
||||||
|
err.seek(0)
|
||||||
z = Bar()
|
z = Bar()
|
||||||
eq_(inspect.getargspec(z.blah),
|
eq_(inspect.getargspec(z.blah),
|
||||||
inspect.ArgSpec(args = ['self', 'arg'],
|
inspect.ArgSpec(args = ['self', 'arg'],
|
||||||
@@ -90,7 +92,7 @@ class TestMustClose(object):
|
|||||||
z.close()
|
z.close()
|
||||||
with assert_raises(AssertionError) as e:
|
with assert_raises(AssertionError) as e:
|
||||||
z.blah("hello")
|
z.blah("hello")
|
||||||
in_("called <function blah at 0x", str(e.exception))
|
in_("called <function Bar.blah at 0x", str(e.exception))
|
||||||
in_("> after close", str(e.exception))
|
in_("> after close", str(e.exception))
|
||||||
# Since the most recent assertion references 'z',
|
# Since the most recent assertion references 'z',
|
||||||
# we need to raise another assertion here so that
|
# we need to raise another assertion here so that
|
||||||
@@ -107,8 +109,13 @@ class TestMustClose(object):
|
|||||||
|
|
||||||
# Class with missing methods
|
# Class with missing methods
|
||||||
err.truncate(0)
|
err.truncate(0)
|
||||||
|
err.seek(0)
|
||||||
w = Baz()
|
w = Baz()
|
||||||
w.close()
|
w.close()
|
||||||
del w
|
del w
|
||||||
eq_(err.getvalue(), "")
|
eq_(err.getvalue(), "")
|
||||||
|
|
||||||
|
# Test errors during __del__, by closing stderr so the fprintf fails
|
||||||
|
r = Foo("hi")
|
||||||
|
err.close()
|
||||||
|
del r
|
||||||
|
|||||||
@@ -3,18 +3,23 @@ import nilmdb.server
|
|||||||
from nose.tools import *
|
from nose.tools import *
|
||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
import distutils.version
|
import distutils.version
|
||||||
import simplejson as json
|
import json
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import urllib2
|
import urllib.request, urllib.error, urllib.parse
|
||||||
from urllib2 import urlopen, HTTPError
|
from urllib.request import urlopen
|
||||||
import cStringIO
|
from urllib.error import HTTPError
|
||||||
|
import io
|
||||||
import time
|
import time
|
||||||
import requests
|
import requests
|
||||||
|
import socket
|
||||||
|
import sqlite3
|
||||||
|
import cherrypy
|
||||||
|
|
||||||
from nilmdb.utils import serializer_proxy
|
from nilmdb.utils import serializer_proxy
|
||||||
|
from nilmdb.server.interval import Interval
|
||||||
|
|
||||||
testdb = "tests/testdb"
|
testdb = "tests/testdb"
|
||||||
|
|
||||||
@@ -24,26 +29,72 @@ testdb = "tests/testdb"
|
|||||||
|
|
||||||
from testutil.helpers import *
|
from testutil.helpers import *
|
||||||
|
|
||||||
|
def setup_module():
|
||||||
|
# Make sure port is free
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
try:
|
||||||
|
sock.bind(("127.0.0.1", 32180))
|
||||||
|
except OSError:
|
||||||
|
raise AssertionError("port 32180 must be free for tests")
|
||||||
|
sock.close()
|
||||||
|
|
||||||
class Test00Nilmdb(object): # named 00 so it runs first
|
class Test00Nilmdb(object): # named 00 so it runs first
|
||||||
def test_NilmDB(self):
|
def test_NilmDB(self):
|
||||||
recursive_unlink(testdb)
|
recursive_unlink(testdb)
|
||||||
|
|
||||||
with assert_raises(IOError):
|
|
||||||
nilmdb.server.NilmDB("/nonexistant-db/foo")
|
|
||||||
|
|
||||||
db = nilmdb.server.NilmDB(testdb)
|
db = nilmdb.server.NilmDB(testdb)
|
||||||
db.close()
|
db.close()
|
||||||
db = nilmdb.server.NilmDB(testdb)
|
db = nilmdb.server.NilmDB(testdb)
|
||||||
db.close()
|
db.close()
|
||||||
|
db.close()
|
||||||
|
|
||||||
# test timer, just to get coverage
|
def test_error_cases(self):
|
||||||
capture = cStringIO.StringIO()
|
# Test some misc error cases to get better code coverage
|
||||||
old = sys.stdout
|
|
||||||
sys.stdout = capture
|
with assert_raises(OSError) as e:
|
||||||
with nilmdb.utils.Timer("test"):
|
nilmdb.server.NilmDB("/dev/null/bogus")
|
||||||
time.sleep(0.01)
|
in_("can't create tree", str(e.exception))
|
||||||
sys.stdout = old
|
|
||||||
in_("test: ", capture.getvalue())
|
# Version upgrades
|
||||||
|
con = sqlite3.connect(os.path.join(testdb, "data.sql"))
|
||||||
|
con.execute("PRAGMA user_version = 2");
|
||||||
|
con.close()
|
||||||
|
with assert_raises(Exception) as e:
|
||||||
|
db = nilmdb.server.NilmDB(testdb)
|
||||||
|
in_("can't use database version 2", str(e.exception))
|
||||||
|
|
||||||
|
con = sqlite3.connect(os.path.join(testdb, "data.sql"))
|
||||||
|
con.execute("PRAGMA user_version = -1234");
|
||||||
|
con.close()
|
||||||
|
with assert_raises(Exception) as e:
|
||||||
|
db = nilmdb.server.NilmDB(testdb)
|
||||||
|
in_("unknown database version -1234", str(e.exception))
|
||||||
|
|
||||||
|
recursive_unlink(testdb)
|
||||||
|
|
||||||
|
nilmdb.server.NilmDB.verbose = 1
|
||||||
|
(old, sys.stdout) = (sys.stdout, io.StringIO())
|
||||||
|
db = nilmdb.server.NilmDB(testdb)
|
||||||
|
(output, sys.stdout) = (sys.stdout.getvalue(), old)
|
||||||
|
nilmdb.server.NilmDB.verbose = 0
|
||||||
|
db.close()
|
||||||
|
in_("Database schema updated to 1", output)
|
||||||
|
|
||||||
|
# Corrupted database (bad ranges)
|
||||||
|
recursive_unlink(testdb)
|
||||||
|
db = nilmdb.server.NilmDB(testdb)
|
||||||
|
db.con.executescript("""
|
||||||
|
INSERT INTO streams VALUES (1, "/test", "int32_1");
|
||||||
|
INSERT INTO ranges VALUES (1, 100, 200, 100, 200);
|
||||||
|
INSERT INTO ranges VALUES (1, 150, 250, 150, 250);
|
||||||
|
""")
|
||||||
|
db.close()
|
||||||
|
db = nilmdb.server.NilmDB(testdb)
|
||||||
|
with assert_raises(nilmdb.server.NilmDBError):
|
||||||
|
db.stream_intervals("/test")
|
||||||
|
db.close()
|
||||||
|
recursive_unlink(testdb)
|
||||||
|
|
||||||
def test_stream(self):
|
def test_stream(self):
|
||||||
db = nilmdb.server.NilmDB(testdb)
|
db = nilmdb.server.NilmDB(testdb)
|
||||||
@@ -70,15 +121,6 @@ class Test00Nilmdb(object): # named 00 so it runs first
|
|||||||
eq_(db.stream_list(layout="uint16_6"), [ ["/newton/raw", "uint16_6"] ])
|
eq_(db.stream_list(layout="uint16_6"), [ ["/newton/raw", "uint16_6"] ])
|
||||||
eq_(db.stream_list(path="/newton/raw"), [ ["/newton/raw", "uint16_6"] ])
|
eq_(db.stream_list(path="/newton/raw"), [ ["/newton/raw", "uint16_6"] ])
|
||||||
|
|
||||||
# Verify that columns were made right (pytables specific)
|
|
||||||
if "h5file" in db.data.__dict__:
|
|
||||||
h5file = db.data.h5file
|
|
||||||
eq_(len(h5file.getNode("/newton/prep").cols), 9)
|
|
||||||
eq_(len(h5file.getNode("/newton/raw").cols), 7)
|
|
||||||
eq_(len(h5file.getNode("/newton/zzz/rawnotch").cols), 10)
|
|
||||||
assert(not h5file.getNode("/newton/prep").colindexed["timestamp"])
|
|
||||||
assert(not h5file.getNode("/newton/prep").colindexed["c1"])
|
|
||||||
|
|
||||||
# Set / get metadata
|
# Set / get metadata
|
||||||
eq_(db.stream_get_metadata("/newton/prep"), {})
|
eq_(db.stream_get_metadata("/newton/prep"), {})
|
||||||
eq_(db.stream_get_metadata("/newton/raw"), {})
|
eq_(db.stream_get_metadata("/newton/raw"), {})
|
||||||
@@ -93,13 +135,16 @@ class Test00Nilmdb(object): # named 00 so it runs first
|
|||||||
eq_(db.stream_get_metadata("/newton/prep"), meta1)
|
eq_(db.stream_get_metadata("/newton/prep"), meta1)
|
||||||
eq_(db.stream_get_metadata("/newton/raw"), meta1)
|
eq_(db.stream_get_metadata("/newton/raw"), meta1)
|
||||||
|
|
||||||
# fill in some test coverage for start >= end
|
# fill in some misc. test coverage
|
||||||
with assert_raises(nilmdb.server.NilmDBError):
|
with assert_raises(nilmdb.server.NilmDBError):
|
||||||
db.stream_remove("/newton/prep", 0, 0)
|
db.stream_remove("/newton/prep", 0, 0)
|
||||||
with assert_raises(nilmdb.server.NilmDBError):
|
with assert_raises(nilmdb.server.NilmDBError):
|
||||||
db.stream_remove("/newton/prep", 1, 0)
|
db.stream_remove("/newton/prep", 1, 0)
|
||||||
db.stream_remove("/newton/prep", 0, 1)
|
db.stream_remove("/newton/prep", 0, 1)
|
||||||
|
|
||||||
|
with assert_raises(nilmdb.server.NilmDBError):
|
||||||
|
db.stream_extract("/newton/prep", count = True, binary = True)
|
||||||
|
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
class TestBlockingServer(object):
|
class TestBlockingServer(object):
|
||||||
@@ -119,24 +164,38 @@ class TestBlockingServer(object):
|
|||||||
self.server = nilmdb.server.Server(self.db, host = "127.0.0.1",
|
self.server = nilmdb.server.Server(self.db, host = "127.0.0.1",
|
||||||
port = 32180, stoppable = True)
|
port = 32180, stoppable = True)
|
||||||
|
|
||||||
# Run it
|
def start_server():
|
||||||
event = threading.Event()
|
# Run it
|
||||||
def run_server():
|
event = threading.Event()
|
||||||
self.server.start(blocking = True, event = event)
|
def run_server():
|
||||||
thread = threading.Thread(target = run_server)
|
self.server.start(blocking = True, event = event)
|
||||||
thread.start()
|
thread = threading.Thread(target = run_server)
|
||||||
if not event.wait(timeout = 10):
|
thread.start()
|
||||||
raise AssertionError("server didn't start in 10 seconds")
|
if not event.wait(timeout = 10):
|
||||||
|
raise AssertionError("server didn't start in 10 seconds")
|
||||||
|
return thread
|
||||||
|
|
||||||
# Send request to exit.
|
# Start server and request for it to exit
|
||||||
|
thread = start_server()
|
||||||
req = urlopen("http://127.0.0.1:32180/exit/", timeout = 1)
|
req = urlopen("http://127.0.0.1:32180/exit/", timeout = 1)
|
||||||
|
|
||||||
# Wait for it
|
|
||||||
thread.join()
|
thread.join()
|
||||||
|
|
||||||
|
# Mock some signals that should kill the server
|
||||||
|
def try_signal(sig):
|
||||||
|
old = cherrypy.engine.wait
|
||||||
|
def raise_sig(*args, **kwargs):
|
||||||
|
raise sig()
|
||||||
|
cherrypy.engine.wait = raise_sig
|
||||||
|
thread = start_server()
|
||||||
|
thread.join()
|
||||||
|
cherrypy.engine.wait = old
|
||||||
|
try_signal(SystemExit)
|
||||||
|
try_signal(KeyboardInterrupt)
|
||||||
|
|
||||||
def geturl(path):
|
def geturl(path):
|
||||||
req = urlopen("http://127.0.0.1:32180" + path, timeout = 10)
|
resp = urlopen("http://127.0.0.1:32180" + path, timeout = 10)
|
||||||
return req.read()
|
body = resp.read()
|
||||||
|
return body.decode(resp.headers.get_content_charset() or 'utf-8')
|
||||||
|
|
||||||
def getjson(path):
|
def getjson(path):
|
||||||
return json.loads(geturl(path))
|
return json.loads(geturl(path))
|
||||||
@@ -157,11 +216,14 @@ class TestServer(object):
|
|||||||
|
|
||||||
def test_server(self):
|
def test_server(self):
|
||||||
# Make sure we can't force an exit, and test other 404 errors
|
# Make sure we can't force an exit, and test other 404 errors
|
||||||
for url in [ "/exit", "/", "/favicon.ico" ]:
|
for url in [ "/exit", "/favicon.ico" ]:
|
||||||
with assert_raises(HTTPError) as e:
|
with assert_raises(HTTPError) as e:
|
||||||
geturl(url)
|
geturl(url)
|
||||||
eq_(e.exception.code, 404)
|
eq_(e.exception.code, 404)
|
||||||
|
|
||||||
|
# Root page
|
||||||
|
in_("This is NilmDB", geturl("/"))
|
||||||
|
|
||||||
# Check version
|
# Check version
|
||||||
eq_(distutils.version.LooseVersion(getjson("/version")),
|
eq_(distutils.version.LooseVersion(getjson("/version")),
|
||||||
distutils.version.LooseVersion(nilmdb.__version__))
|
distutils.version.LooseVersion(nilmdb.__version__))
|
||||||
|
|||||||
376
tests/test_numpyclient.py
Normal file
376
tests/test_numpyclient.py
Normal file
@@ -0,0 +1,376 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import nilmdb.server
|
||||||
|
import nilmdb.client
|
||||||
|
import nilmdb.client.numpyclient
|
||||||
|
|
||||||
|
from nilmdb.utils.printf import *
|
||||||
|
from nilmdb.utils import timestamper
|
||||||
|
from nilmdb.client import ClientError, ServerError
|
||||||
|
import datetime_tz
|
||||||
|
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
from nose.tools import *
|
||||||
|
from nose.tools import assert_raises
|
||||||
|
import itertools
|
||||||
|
import distutils.version
|
||||||
|
|
||||||
|
from testutil.helpers import *
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
testdb = "tests/numpyclient-testdb"
|
||||||
|
testurl = "http://localhost:32180/"
|
||||||
|
|
||||||
|
def setup_module():
|
||||||
|
global test_server, test_db
|
||||||
|
# Clear out DB
|
||||||
|
recursive_unlink(testdb)
|
||||||
|
|
||||||
|
# Start web app on a custom port
|
||||||
|
test_db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(
|
||||||
|
testdb, bulkdata_args = { "file_size" : 16384,
|
||||||
|
"files_per_dir" : 3 } )
|
||||||
|
|
||||||
|
test_server = nilmdb.server.Server(test_db, host = "127.0.0.1",
|
||||||
|
port = 32180, stoppable = False,
|
||||||
|
fast_shutdown = True,
|
||||||
|
force_traceback = True)
|
||||||
|
test_server.start(blocking = False)
|
||||||
|
|
||||||
|
def teardown_module():
|
||||||
|
global test_server, test_db
|
||||||
|
# Close web app
|
||||||
|
test_server.stop()
|
||||||
|
test_db.close()
|
||||||
|
|
||||||
|
class TestNumpyClient(object):
|
||||||
|
|
||||||
|
def test_numpyclient_01_basic(self):
|
||||||
|
# Test basic connection
|
||||||
|
client = nilmdb.client.numpyclient.NumpyClient(url = testurl)
|
||||||
|
version = client.version()
|
||||||
|
eq_(distutils.version.LooseVersion(version),
|
||||||
|
distutils.version.LooseVersion(test_server.version))
|
||||||
|
|
||||||
|
# Verify subclassing
|
||||||
|
assert(isinstance(client, nilmdb.client.Client))
|
||||||
|
|
||||||
|
# Layouts
|
||||||
|
for layout in "int8_t", "something_8", "integer_1":
|
||||||
|
with assert_raises(ValueError):
|
||||||
|
for x in client.stream_extract_numpy("/foo", layout=layout):
|
||||||
|
pass
|
||||||
|
for layout in "int8_1", "uint8_30", "int16_20", "float64_100":
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
for x in client.stream_extract_numpy("/foo", layout=layout):
|
||||||
|
pass
|
||||||
|
in_("No such stream", str(e.exception))
|
||||||
|
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
for x in client.stream_extract_numpy("/foo"):
|
||||||
|
pass
|
||||||
|
in_("can't get layout for path", str(e.exception))
|
||||||
|
|
||||||
|
client.close()
|
||||||
|
|
||||||
|
def test_numpyclient_02_extract(self):
|
||||||
|
client = nilmdb.client.numpyclient.NumpyClient(url = testurl)
|
||||||
|
|
||||||
|
# Insert some data as text
|
||||||
|
client.stream_create("/newton/prep", "float32_8")
|
||||||
|
testfile = "tests/data/prep-20120323T1000"
|
||||||
|
start = nilmdb.utils.time.parse_time("20120323T1000")
|
||||||
|
rate = 120
|
||||||
|
data = timestamper.TimestamperRate(testfile, start, rate)
|
||||||
|
result = client.stream_insert("/newton/prep", data,
|
||||||
|
start, start + 119999777)
|
||||||
|
|
||||||
|
# Extract Numpy arrays
|
||||||
|
array = None
|
||||||
|
pieces = 0
|
||||||
|
for chunk in client.stream_extract_numpy("/newton/prep", maxrows=1000):
|
||||||
|
pieces += 1
|
||||||
|
if array is not None:
|
||||||
|
array = np.vstack((array, chunk))
|
||||||
|
else:
|
||||||
|
array = chunk
|
||||||
|
eq_(array.shape, (14400, 9))
|
||||||
|
eq_(pieces, 15)
|
||||||
|
|
||||||
|
# Try structured
|
||||||
|
s = list(client.stream_extract_numpy("/newton/prep", structured = True))
|
||||||
|
assert(np.array_equal(np.c_[s[0]['timestamp'], s[0]['data']], array))
|
||||||
|
|
||||||
|
# Compare. Will be close but not exact because the conversion
|
||||||
|
# to and from ASCII was lossy.
|
||||||
|
data = timestamper.TimestamperRate(testfile, start, rate)
|
||||||
|
data_str = b" ".join(data).decode('utf-8', errors='backslashreplace')
|
||||||
|
actual = np.fromstring(data_str, sep=' ').reshape(14400, 9)
|
||||||
|
assert(np.allclose(array, actual))
|
||||||
|
|
||||||
|
client.close()
|
||||||
|
|
||||||
|
def test_numpyclient_03_insert(self):
|
||||||
|
client = nilmdb.client.numpyclient.NumpyClient(url = testurl)
|
||||||
|
|
||||||
|
# Limit _max_data just to get better coverage
|
||||||
|
old_max_data = nilmdb.client.numpyclient.StreamInserterNumpy._max_data
|
||||||
|
nilmdb.client.numpyclient.StreamInserterNumpy._max_data = 100000
|
||||||
|
|
||||||
|
client.stream_create("/test/1", "uint16_1")
|
||||||
|
client.stream_insert_numpy("/test/1",
|
||||||
|
np.array([[0, 1],
|
||||||
|
[1, 2],
|
||||||
|
[2, 3],
|
||||||
|
[3, 4]]))
|
||||||
|
|
||||||
|
# Wrong number of dimensions
|
||||||
|
with assert_raises(ValueError) as e:
|
||||||
|
client.stream_insert_numpy("/test/1",
|
||||||
|
np.array([[[0, 1],
|
||||||
|
[1, 2]],
|
||||||
|
[[3, 4],
|
||||||
|
[4, 5]]]))
|
||||||
|
in_("wrong number of dimensions", str(e.exception))
|
||||||
|
|
||||||
|
# Wrong number of fields
|
||||||
|
with assert_raises(ValueError) as e:
|
||||||
|
client.stream_insert_numpy("/test/1",
|
||||||
|
np.array([[0, 1, 2],
|
||||||
|
[1, 2, 3],
|
||||||
|
[3, 4, 5],
|
||||||
|
[4, 5, 6]]))
|
||||||
|
in_("wrong number of fields", str(e.exception))
|
||||||
|
|
||||||
|
# Unstructured
|
||||||
|
client.stream_create("/test/2", "float32_8")
|
||||||
|
client.stream_insert_numpy(
|
||||||
|
"/test/2",
|
||||||
|
client.stream_extract_numpy(
|
||||||
|
"/newton/prep", structured = False, maxrows = 1000))
|
||||||
|
|
||||||
|
# Structured, and specifying layout.
|
||||||
|
# This also tests the final branch in stream_extract_numpy by specifing
|
||||||
|
# a value of maxrows that exactly matches how much data we had inserted.
|
||||||
|
client.stream_create("/test/3", "float32_8")
|
||||||
|
client.stream_insert_numpy(
|
||||||
|
path = "/test/3", layout = "float32_8",
|
||||||
|
data = client.stream_extract_numpy(
|
||||||
|
"/newton/prep", structured = True, maxrows = 14400))
|
||||||
|
|
||||||
|
# Structured, specifying wrong layout
|
||||||
|
client.stream_create("/test/4", "float32_8")
|
||||||
|
with assert_raises(ValueError) as e:
|
||||||
|
client.stream_insert_numpy(
|
||||||
|
"/test/4", layout = "uint16_1",
|
||||||
|
data = client.stream_extract_numpy(
|
||||||
|
"/newton/prep", structured = True, maxrows = 1000))
|
||||||
|
in_("wrong dtype", str(e.exception))
|
||||||
|
|
||||||
|
# Unstructured, and specifying wrong layout
|
||||||
|
client.stream_create("/test/5", "float32_8")
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
client.stream_insert_numpy(
|
||||||
|
"/test/5", layout = "uint16_8",
|
||||||
|
data = client.stream_extract_numpy(
|
||||||
|
"/newton/prep", structured = False, maxrows = 1000))
|
||||||
|
# timestamps will be screwy here, because data will be parsed wrong
|
||||||
|
in_("error parsing input data", str(e.exception))
|
||||||
|
|
||||||
|
# Make sure the /newton/prep copies are identical
|
||||||
|
a = np.vstack(list(client.stream_extract_numpy("/newton/prep")))
|
||||||
|
b = np.vstack(list(client.stream_extract_numpy("/test/2")))
|
||||||
|
c = np.vstack(list(client.stream_extract_numpy("/test/3")))
|
||||||
|
assert(np.array_equal(a,b))
|
||||||
|
assert(np.array_equal(a,c))
|
||||||
|
|
||||||
|
# Make sure none of the files are greater than 16384 bytes as
|
||||||
|
# we configured with the bulkdata_args above.
|
||||||
|
datapath = os.path.join(testdb, "data")
|
||||||
|
for (dirpath, dirnames, filenames) in os.walk(datapath):
|
||||||
|
for f in filenames:
|
||||||
|
fn = os.path.join(dirpath, f)
|
||||||
|
size = os.path.getsize(fn)
|
||||||
|
if size > 16384:
|
||||||
|
raise AssertionError(sprintf("%s is too big: %d > %d\n",
|
||||||
|
fn, size, 16384))
|
||||||
|
|
||||||
|
nilmdb.client.numpyclient.StreamInserterNumpy._max_data = old_max_data
|
||||||
|
client.close()
|
||||||
|
|
||||||
|
def test_numpyclient_04_context(self):
|
||||||
|
# Like test_client_context, but with Numpy data
|
||||||
|
client = nilmdb.client.numpyclient.NumpyClient(testurl)
|
||||||
|
|
||||||
|
client.stream_create("/context/test", "uint16_1")
|
||||||
|
with client.stream_insert_numpy_context("/context/test") as ctx:
|
||||||
|
# override _max_rows to trigger frequent server updates
|
||||||
|
ctx._max_rows = 2
|
||||||
|
ctx.insert([[1000, 1]])
|
||||||
|
ctx.insert([[1010, 1], [1020, 1], [1030, 1]])
|
||||||
|
ctx.insert([[1040, 1], [1050, 1]])
|
||||||
|
ctx.finalize()
|
||||||
|
ctx.insert([[1070, 1]])
|
||||||
|
ctx.update_end(1080)
|
||||||
|
ctx.finalize()
|
||||||
|
ctx.update_start(1090)
|
||||||
|
ctx.insert([[1100, 1]])
|
||||||
|
ctx.insert([[1110, 1]])
|
||||||
|
ctx.send()
|
||||||
|
ctx.insert([[1120, 1], [1130, 1], [1140, 1]])
|
||||||
|
ctx.update_end(1160)
|
||||||
|
ctx.insert([[1150, 1]])
|
||||||
|
ctx.update_end(1170)
|
||||||
|
ctx.insert([[1160, 1]])
|
||||||
|
ctx.update_end(1180)
|
||||||
|
ctx.insert([[1170, 123456789.0]])
|
||||||
|
ctx.finalize()
|
||||||
|
ctx.insert(np.zeros((0,2)))
|
||||||
|
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
with client.stream_insert_numpy_context("/context/test",
|
||||||
|
1000, 2000) as ctx:
|
||||||
|
ctx.insert([[1180, 1]])
|
||||||
|
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
with client.stream_insert_numpy_context("/context/test",
|
||||||
|
2000, 3000) as ctx:
|
||||||
|
ctx._max_rows = 2
|
||||||
|
ctx.insert([[3180, 1]])
|
||||||
|
ctx.insert([[3181, 1]])
|
||||||
|
|
||||||
|
with client.stream_insert_numpy_context("/context/test",
|
||||||
|
2000, 3000) as ctx:
|
||||||
|
# make sure our override wasn't permanent
|
||||||
|
ne_(ctx._max_rows, 2)
|
||||||
|
ctx.insert([[2250, 1]])
|
||||||
|
ctx.finalize()
|
||||||
|
|
||||||
|
with assert_raises(ClientError):
|
||||||
|
with client.stream_insert_numpy_context("/context/test",
|
||||||
|
3000, 4000) as ctx:
|
||||||
|
ctx.insert([[3010, 1]])
|
||||||
|
ctx.insert([[3020, 2]])
|
||||||
|
ctx.insert([[3030, 3]])
|
||||||
|
ctx.insert([[3040, 4]])
|
||||||
|
ctx.insert([[3040, 4]]) # non-monotonic after a few lines
|
||||||
|
ctx.finalize()
|
||||||
|
|
||||||
|
eq_(list(client.stream_intervals("/context/test")),
|
||||||
|
[ [ 1000, 1051 ],
|
||||||
|
[ 1070, 1080 ],
|
||||||
|
[ 1090, 1180 ],
|
||||||
|
[ 2000, 3000 ] ])
|
||||||
|
|
||||||
|
client.stream_remove("/context/test")
|
||||||
|
client.stream_destroy("/context/test")
|
||||||
|
client.close()
|
||||||
|
|
||||||
|
def test_numpyclient_05_emptyintervals(self):
|
||||||
|
# Like test_client_emptyintervals, with insert_numpy_context
|
||||||
|
client = nilmdb.client.numpyclient.NumpyClient(testurl)
|
||||||
|
client.stream_create("/empty/test", "uint16_1")
|
||||||
|
def info():
|
||||||
|
result = []
|
||||||
|
for interval in list(client.stream_intervals("/empty/test")):
|
||||||
|
result.append((client.stream_count("/empty/test", *interval),
|
||||||
|
interval))
|
||||||
|
return result
|
||||||
|
eq_(info(), [])
|
||||||
|
|
||||||
|
# Insert a region with just a few points
|
||||||
|
with client.stream_insert_numpy_context("/empty/test") as ctx:
|
||||||
|
ctx.update_start(100)
|
||||||
|
ctx.insert([[140, 1]])
|
||||||
|
ctx.insert([[150, 1]])
|
||||||
|
ctx.insert([[160, 1]])
|
||||||
|
ctx.update_end(200)
|
||||||
|
ctx.finalize()
|
||||||
|
eq_(info(), [(3, [100, 200])])
|
||||||
|
|
||||||
|
# Delete chunk, which will leave one data point and two intervals
|
||||||
|
client.stream_remove("/empty/test", 145, 175)
|
||||||
|
eq_(info(), [(1, [100, 145]),
|
||||||
|
(0, [175, 200])])
|
||||||
|
|
||||||
|
# Try also creating a completely empty interval from scratch,
|
||||||
|
# in a few different ways.
|
||||||
|
client.stream_insert("/empty/test", b"", 300, 350)
|
||||||
|
client.stream_insert("/empty/test", [], 400, 450)
|
||||||
|
with client.stream_insert_numpy_context("/empty/test", 500, 550):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# If enough timestamps aren't provided, empty streams won't be created.
|
||||||
|
client.stream_insert("/empty/test", [])
|
||||||
|
with client.stream_insert_numpy_context("/empty/test"):
|
||||||
|
pass
|
||||||
|
client.stream_insert("/empty/test", [], start = 600)
|
||||||
|
with client.stream_insert_numpy_context("/empty/test", start = 700):
|
||||||
|
pass
|
||||||
|
client.stream_insert("/empty/test", [], end = 850)
|
||||||
|
with client.stream_insert_numpy_context("/empty/test", end = 950):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Equal start and end is OK as long as there's no data
|
||||||
|
with assert_raises(ClientError) as e:
|
||||||
|
with client.stream_insert_numpy_context("/empty/test",
|
||||||
|
start=9, end=9) as ctx:
|
||||||
|
ctx.insert([[9, 9]])
|
||||||
|
ctx.finalize()
|
||||||
|
in_("have data to send, but invalid start/end times", str(e.exception))
|
||||||
|
|
||||||
|
with client.stream_insert_numpy_context("/empty/test",
|
||||||
|
start=9, end=9) as ctx:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# reusing a context object is bad
|
||||||
|
with assert_raises(Exception) as e:
|
||||||
|
ctx.insert([[9, 9]])
|
||||||
|
|
||||||
|
# Try various things that might cause problems
|
||||||
|
with client.stream_insert_numpy_context("/empty/test",
|
||||||
|
1000, 1050) as ctx:
|
||||||
|
ctx.finalize() # inserts [1000, 1050]
|
||||||
|
ctx.finalize() # nothing
|
||||||
|
ctx.finalize() # nothing
|
||||||
|
ctx.insert([[1100, 1]])
|
||||||
|
ctx.finalize() # inserts [1100, 1101]
|
||||||
|
ctx.update_start(1199)
|
||||||
|
ctx.insert([[1200, 1]])
|
||||||
|
ctx.update_end(1250)
|
||||||
|
ctx.finalize() # inserts [1199, 1250]
|
||||||
|
ctx.update_start(1299)
|
||||||
|
ctx.finalize() # nothing
|
||||||
|
ctx.update_end(1350)
|
||||||
|
ctx.finalize() # nothing
|
||||||
|
ctx.update_start(1400)
|
||||||
|
ctx.insert(np.zeros((0,2)))
|
||||||
|
ctx.update_end(1450)
|
||||||
|
ctx.finalize()
|
||||||
|
ctx.update_start(1500)
|
||||||
|
ctx.insert(np.zeros((0,2)))
|
||||||
|
ctx.update_end(1550)
|
||||||
|
ctx.finalize()
|
||||||
|
ctx.insert(np.zeros((0,2)))
|
||||||
|
ctx.insert(np.zeros((0,2)))
|
||||||
|
ctx.insert(np.zeros((0,2)))
|
||||||
|
ctx.finalize()
|
||||||
|
|
||||||
|
# Check everything
|
||||||
|
eq_(info(), [(1, [100, 145]),
|
||||||
|
(0, [175, 200]),
|
||||||
|
(0, [300, 350]),
|
||||||
|
(0, [400, 450]),
|
||||||
|
(0, [500, 550]),
|
||||||
|
(0, [1000, 1050]),
|
||||||
|
(1, [1100, 1101]),
|
||||||
|
(1, [1199, 1250]),
|
||||||
|
(0, [1400, 1450]),
|
||||||
|
(0, [1500, 1550]),
|
||||||
|
])
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
client.stream_remove("/empty/test")
|
||||||
|
client.stream_destroy("/empty/test")
|
||||||
|
client.close()
|
||||||
@@ -3,7 +3,7 @@ from nilmdb.utils.printf import *
|
|||||||
|
|
||||||
from nose.tools import *
|
from nose.tools import *
|
||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
from cStringIO import StringIO
|
from io import StringIO
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from testutil.helpers import *
|
from testutil.helpers import *
|
||||||
@@ -18,7 +18,7 @@ class TestPrintf(object):
|
|||||||
printf("hello, world: %d", 123)
|
printf("hello, world: %d", 123)
|
||||||
fprintf(test2, "hello too: %d", 123)
|
fprintf(test2, "hello too: %d", 123)
|
||||||
test3 = sprintf("hello three: %d", 123)
|
test3 = sprintf("hello three: %d", 123)
|
||||||
except:
|
except Exception:
|
||||||
sys.stdout = old_stdout
|
sys.stdout = old_stdout
|
||||||
raise
|
raise
|
||||||
sys.stdout = old_stdout
|
sys.stdout = old_stdout
|
||||||
|
|||||||
@@ -36,12 +36,12 @@ class TestRBTree:
|
|||||||
# make a set of 100 intervals, inserted in order
|
# make a set of 100 intervals, inserted in order
|
||||||
rb = RBTree()
|
rb = RBTree()
|
||||||
j = 100
|
j = 100
|
||||||
for i in xrange(j):
|
for i in range(j):
|
||||||
rb.insert(RBNode(i, i+1))
|
rb.insert(RBNode(i, i+1))
|
||||||
render(rb, "in-order insert")
|
render(rb, "in-order insert")
|
||||||
|
|
||||||
# remove about half of them
|
# remove about half of them
|
||||||
for i in random.sample(xrange(j),j):
|
for i in random.sample(range(j),j):
|
||||||
if random.randint(0,1):
|
if random.randint(0,1):
|
||||||
rb.delete(rb.find(i, i+1))
|
rb.delete(rb.find(i, i+1))
|
||||||
render(rb, "in-order insert, random delete")
|
render(rb, "in-order insert, random delete")
|
||||||
@@ -49,18 +49,18 @@ class TestRBTree:
|
|||||||
# make a set of 100 intervals, inserted at random
|
# make a set of 100 intervals, inserted at random
|
||||||
rb = RBTree()
|
rb = RBTree()
|
||||||
j = 100
|
j = 100
|
||||||
for i in random.sample(xrange(j),j):
|
for i in random.sample(range(j),j):
|
||||||
rb.insert(RBNode(i, i+1))
|
rb.insert(RBNode(i, i+1))
|
||||||
render(rb, "random insert")
|
render(rb, "random insert")
|
||||||
|
|
||||||
# remove about half of them
|
# remove about half of them
|
||||||
for i in random.sample(xrange(j),j):
|
for i in random.sample(range(j),j):
|
||||||
if random.randint(0,1):
|
if random.randint(0,1):
|
||||||
rb.delete(rb.find(i, i+1))
|
rb.delete(rb.find(i, i+1))
|
||||||
render(rb, "random insert, random delete")
|
render(rb, "random insert, random delete")
|
||||||
|
|
||||||
# in-order insert of 50 more
|
# in-order insert of 50 more
|
||||||
for i in xrange(50):
|
for i in range(50):
|
||||||
rb.insert(RBNode(i+500, i+501))
|
rb.insert(RBNode(i+500, i+501))
|
||||||
render(rb, "random insert, random delete, in-order insert")
|
render(rb, "random insert, random delete, in-order insert")
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ from nose.tools import *
|
|||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
import nilmdb.server
|
||||||
|
|
||||||
from testutil.helpers import *
|
from testutil.helpers import *
|
||||||
|
|
||||||
@@ -28,6 +29,9 @@ class Foo(object):
|
|||||||
def t(self):
|
def t(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def reent(self, func):
|
||||||
|
func()
|
||||||
|
|
||||||
def tester(self, debug = False):
|
def tester(self, debug = False):
|
||||||
# purposely not thread-safe
|
# purposely not thread-safe
|
||||||
self.test_thread = threading.current_thread().name
|
self.test_thread = threading.current_thread().name
|
||||||
@@ -50,7 +54,7 @@ class Base(object):
|
|||||||
def func(foo):
|
def func(foo):
|
||||||
foo.test()
|
foo.test()
|
||||||
threads = []
|
threads = []
|
||||||
for i in xrange(20):
|
for i in range(20):
|
||||||
threads.append(threading.Thread(target = func, args = (self.foo,)))
|
threads.append(threading.Thread(target = func, args = (self.foo,)))
|
||||||
for t in threads:
|
for t in threads:
|
||||||
t.start()
|
t.start()
|
||||||
@@ -62,6 +66,28 @@ class Base(object):
|
|||||||
eq_(self.foo.val, 20)
|
eq_(self.foo.val, 20)
|
||||||
eq_(self.foo.init_thread, self.foo.test_thread)
|
eq_(self.foo.init_thread, self.foo.test_thread)
|
||||||
|
|
||||||
|
class ListLike(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.thread = threading.current_thread().name
|
||||||
|
self.foo = 0
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
eq_(threading.current_thread().name, self.thread)
|
||||||
|
self.foo = 0
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
eq_(threading.current_thread().name, self.thread)
|
||||||
|
return key
|
||||||
|
|
||||||
|
def __next__(self):
|
||||||
|
eq_(threading.current_thread().name, self.thread)
|
||||||
|
if self.foo < 5:
|
||||||
|
self.foo += 1
|
||||||
|
return self.foo
|
||||||
|
else:
|
||||||
|
raise StopIteration
|
||||||
|
|
||||||
class TestUnserialized(Base):
|
class TestUnserialized(Base):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.foo = Foo()
|
self.foo = Foo()
|
||||||
@@ -84,3 +110,23 @@ class TestSerializer(Base):
|
|||||||
sp(sp(Foo("x"))).t()
|
sp(sp(Foo("x"))).t()
|
||||||
sp(sp(Foo)("x")).t()
|
sp(sp(Foo)("x")).t()
|
||||||
sp(sp(Foo))("x").t()
|
sp(sp(Foo))("x").t()
|
||||||
|
|
||||||
|
def test_iter(self):
|
||||||
|
sp = nilmdb.utils.serializer_proxy
|
||||||
|
i = sp(ListLike)()
|
||||||
|
eq_(list(i), [1,2,3,4,5])
|
||||||
|
eq_(i[3], 3)
|
||||||
|
|
||||||
|
def test_del(self):
|
||||||
|
sp = nilmdb.utils.serializer_proxy
|
||||||
|
foo = sp(Foo("x"))
|
||||||
|
|
||||||
|
# trigger exception in __del__, which should be ignored
|
||||||
|
foo._SerializerObjectProxy__call_queue = None
|
||||||
|
del foo
|
||||||
|
|
||||||
|
def test_rocket(self):
|
||||||
|
# Serializer works on a C module?
|
||||||
|
sp = nilmdb.utils.serializer_proxy
|
||||||
|
rkt = sp(nilmdb.server.rocket.Rocket("int32_8", None))
|
||||||
|
eq_(rkt.binary_size, 40)
|
||||||
|
|||||||
@@ -76,14 +76,14 @@ class TestThreadSafety(object):
|
|||||||
def test(self):
|
def test(self):
|
||||||
proxy = nilmdb.utils.threadsafety.verify_proxy
|
proxy = nilmdb.utils.threadsafety.verify_proxy
|
||||||
self.tryit(Test(), True, True)
|
self.tryit(Test(), True, True)
|
||||||
self.tryit(proxy(Test(), True, True, True), False, False)
|
self.tryit(proxy(Test(), True, True), False, False)
|
||||||
self.tryit(proxy(Test(), True, True, False), False, True)
|
self.tryit(proxy(Test(), True, False), False, True)
|
||||||
self.tryit(proxy(Test(), True, False, True), True, False)
|
self.tryit(proxy(Test(), False, True), True, False)
|
||||||
self.tryit(proxy(Test(), True, False, False), True, True)
|
self.tryit(proxy(Test(), False, False), True, True)
|
||||||
self.tryit(proxy(Test, True, True, True)(), False, False)
|
self.tryit(proxy(Test, True, True)(), False, False)
|
||||||
self.tryit(proxy(Test, True, True, False)(), False, True)
|
self.tryit(proxy(Test, True, False)(), False, True)
|
||||||
self.tryit(proxy(Test, True, False, True)(), True, False)
|
self.tryit(proxy(Test, False, True)(), True, False)
|
||||||
self.tryit(proxy(Test, True, False, False)(), True, True)
|
self.tryit(proxy(Test, False, False)(), True, True)
|
||||||
|
|
||||||
proxy(proxy(proxy(Test))()).foo()
|
proxy(proxy(proxy(Test))()).foo()
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
import nilmdb
|
import nilmdb
|
||||||
from nilmdb.utils.printf import *
|
from nilmdb.utils.printf import *
|
||||||
from nilmdb.utils import datetime_tz
|
import datetime_tz
|
||||||
|
|
||||||
from nose.tools import *
|
from nose.tools import *
|
||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import cStringIO
|
import io
|
||||||
|
|
||||||
from testutil.helpers import *
|
from testutil.helpers import *
|
||||||
|
|
||||||
@@ -18,60 +18,62 @@ class TestTimestamper(object):
|
|||||||
|
|
||||||
def test_timestamper(self):
|
def test_timestamper(self):
|
||||||
def join(list):
|
def join(list):
|
||||||
return "\n".join(list) + "\n"
|
return b"\n".join(list) + b"\n"
|
||||||
|
|
||||||
start = datetime_tz.datetime_tz.smartparse("03/24/2012").totimestamp()
|
datetime_tz.localtz_set("America/New_York")
|
||||||
lines_in = [ "hello", "world", "hello world", "# commented out" ]
|
|
||||||
lines_out = [ "1332561600.000000 hello",
|
start = nilmdb.utils.time.parse_time("03/24/2012")
|
||||||
"1332561600.000125 world",
|
lines_in = [ b"hello", b"world", b"hello world", b"# commented out" ]
|
||||||
"1332561600.000250 hello world" ]
|
lines_out = [ b"1332561600000000 hello",
|
||||||
|
b"1332561600000125 world",
|
||||||
|
b"1332561600000250 hello world" ]
|
||||||
|
|
||||||
# full
|
# full
|
||||||
input = cStringIO.StringIO(join(lines_in))
|
input = io.BytesIO(join(lines_in))
|
||||||
ts = timestamper.TimestamperRate(input, start, 8000)
|
ts = timestamper.TimestamperRate(input, start, 8000)
|
||||||
foo = ts.readlines()
|
foo = ts.readlines()
|
||||||
eq_(foo, join(lines_out))
|
eq_(foo, join(lines_out))
|
||||||
in_("TimestamperRate(..., start=", str(ts))
|
in_("TimestamperRate(..., start=", str(ts))
|
||||||
|
|
||||||
# first 30 or so bytes means the first 2 lines
|
# first 30 or so bytes means the first 2 lines
|
||||||
input = cStringIO.StringIO(join(lines_in))
|
input = io.BytesIO(join(lines_in))
|
||||||
ts = timestamper.TimestamperRate(input, start, 8000)
|
ts = timestamper.TimestamperRate(input, start, 8000)
|
||||||
foo = ts.readlines(30)
|
foo = ts.readlines(30)
|
||||||
eq_(foo, join(lines_out[0:2]))
|
eq_(foo, join(lines_out[0:2]))
|
||||||
|
|
||||||
# stop iteration early
|
# stop iteration early
|
||||||
input = cStringIO.StringIO(join(lines_in))
|
input = io.BytesIO(join(lines_in))
|
||||||
ts = timestamper.TimestamperRate(input, start, 8000,
|
ts = timestamper.TimestamperRate(input, start, 8000,
|
||||||
1332561600.000200)
|
1332561600000200)
|
||||||
foo = ""
|
foo = b""
|
||||||
for line in ts:
|
for line in ts:
|
||||||
foo += line
|
foo += line
|
||||||
eq_(foo, join(lines_out[0:2]))
|
eq_(foo, join(lines_out[0:2]))
|
||||||
|
|
||||||
# stop iteration early (readlines)
|
# stop iteration early (readlines)
|
||||||
input = cStringIO.StringIO(join(lines_in))
|
input = io.BytesIO(join(lines_in))
|
||||||
ts = timestamper.TimestamperRate(input, start, 8000,
|
ts = timestamper.TimestamperRate(input, start, 8000,
|
||||||
1332561600.000200)
|
1332561600000200)
|
||||||
foo = ts.readlines()
|
foo = ts.readlines()
|
||||||
eq_(foo, join(lines_out[0:2]))
|
eq_(foo, join(lines_out[0:2]))
|
||||||
|
|
||||||
# stop iteration really early
|
# stop iteration really early
|
||||||
input = cStringIO.StringIO(join(lines_in))
|
input = io.BytesIO(join(lines_in))
|
||||||
ts = timestamper.TimestamperRate(input, start, 8000,
|
ts = timestamper.TimestamperRate(input, start, 8000,
|
||||||
1332561600.000000)
|
1332561600000000)
|
||||||
foo = ts.readlines()
|
foo = ts.readlines()
|
||||||
eq_(foo, "")
|
eq_(foo, b"")
|
||||||
|
|
||||||
# use iterator
|
# use iterator
|
||||||
input = cStringIO.StringIO(join(lines_in))
|
input = io.BytesIO(join(lines_in))
|
||||||
ts = timestamper.TimestamperRate(input, start, 8000)
|
ts = timestamper.TimestamperRate(input, start, 8000)
|
||||||
foo = ""
|
foo = b""
|
||||||
for line in ts:
|
for line in ts:
|
||||||
foo += line
|
foo += line
|
||||||
eq_(foo, join(lines_out))
|
eq_(foo, join(lines_out))
|
||||||
|
|
||||||
# check that TimestamperNow gives similar result
|
# check that TimestamperNow gives similar result
|
||||||
input = cStringIO.StringIO(join(lines_in))
|
input = io.BytesIO(join(lines_in))
|
||||||
ts = timestamper.TimestamperNow(input)
|
ts = timestamper.TimestamperNow(input)
|
||||||
foo = ts.readlines()
|
foo = ts.readlines()
|
||||||
ne_(foo, join(lines_out))
|
ne_(foo, join(lines_out))
|
||||||
|
|||||||
36
tests/test_wsgi.py
Normal file
36
tests/test_wsgi.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
from nose.tools import *
|
||||||
|
from nose.tools import assert_raises
|
||||||
|
from testutil.helpers import *
|
||||||
|
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
import nilmdb.server
|
||||||
|
import webtest
|
||||||
|
|
||||||
|
testdb = "tests/testdb"
|
||||||
|
|
||||||
|
# Test WSGI interface
|
||||||
|
|
||||||
|
class TestWSGI(object):
|
||||||
|
def test_wsgi(self):
|
||||||
|
|
||||||
|
# Bad database gives debug info
|
||||||
|
app = webtest.TestApp(nilmdb.server.wsgi_application("/dev/null", "/"))
|
||||||
|
resp = app.get('/', expect_errors=True)
|
||||||
|
eq_(resp.status_int, 500)
|
||||||
|
eq_(resp.content_type, "text/plain")
|
||||||
|
body = resp.body.decode('utf-8')
|
||||||
|
in_("Initializing database at path '/dev/null' failed", body)
|
||||||
|
in_("Not a directory: b'/dev/null/data'", body)
|
||||||
|
in_("Running as: uid=", body)
|
||||||
|
in_("Environment:", body)
|
||||||
|
|
||||||
|
# Good database works fine
|
||||||
|
app = webtest.TestApp(nilmdb.server.wsgi_application(testdb, "/nilmdb"))
|
||||||
|
resp = app.get('/version', expect_errors=True)
|
||||||
|
eq_(resp.status_int, 404)
|
||||||
|
resp = app.get('/nilmdb/version')
|
||||||
|
eq_(resp.json, nilmdb.__version__)
|
||||||
@@ -1,419 +0,0 @@
|
|||||||
|
|
||||||
#-----------------------------------------------
|
|
||||||
#aplotter.py - ascii art function plotter
|
|
||||||
#Copyright (c) 2006, Imri Goldberg
|
|
||||||
#All rights reserved.
|
|
||||||
#
|
|
||||||
#Redistribution and use in source and binary forms,
|
|
||||||
#with or without modification, are permitted provided
|
|
||||||
#that the following conditions are met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the
|
|
||||||
# above copyright notice, this list of conditions
|
|
||||||
# and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the
|
|
||||||
# above copyright notice, this list of conditions
|
|
||||||
# and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
# * Neither the name of the <ORGANIZATION> nor the names of
|
|
||||||
# its contributors may be used to endorse or promote products
|
|
||||||
# derived from this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
#AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
#IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
#ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
|
||||||
#LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
||||||
#DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
||||||
#SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
||||||
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
||||||
#OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
#OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
#-----------------------------------------------
|
|
||||||
|
|
||||||
import math
|
|
||||||
|
|
||||||
|
|
||||||
EPSILON = 0.000001
|
|
||||||
|
|
||||||
def transposed(mat):
|
|
||||||
result = []
|
|
||||||
for i in xrange(len(mat[0])):
|
|
||||||
result.append([x[i] for x in mat])
|
|
||||||
return result
|
|
||||||
|
|
||||||
def y_reversed(mat):
|
|
||||||
result = []
|
|
||||||
for i in range(len(mat)):
|
|
||||||
result.append(list(reversed(mat[i])))
|
|
||||||
return result
|
|
||||||
|
|
||||||
def sign(x):
|
|
||||||
if 0<x:
|
|
||||||
return 1
|
|
||||||
if 0 == x:
|
|
||||||
return 0
|
|
||||||
return -1
|
|
||||||
|
|
||||||
class Plotter(object):
|
|
||||||
|
|
||||||
class PlotData(object):
|
|
||||||
def __init__(self, x_size, y_size, min_x, max_x, min_y, max_y, x_mod, y_mod):
|
|
||||||
self.x_size = x_size
|
|
||||||
self.y_size = y_size
|
|
||||||
self.min_x = min_x
|
|
||||||
self.max_x = max_x
|
|
||||||
self.min_y = min_y
|
|
||||||
self.max_y = max_y
|
|
||||||
self.x_mod = x_mod
|
|
||||||
self.y_mod = y_mod
|
|
||||||
|
|
||||||
self.x_step = float(max_x - min_x)/float(self.x_size)
|
|
||||||
self.y_step = float(max_y - min_y)/float(self.y_size)
|
|
||||||
self.inv_x_step = 1/self.x_step
|
|
||||||
self.inv_y_step = 1/self.y_step
|
|
||||||
|
|
||||||
self.ratio = self.y_step / self.x_step
|
|
||||||
def __repr__(self):
|
|
||||||
s = "size: %s, bl: %s, tr: %s, step: %s" % ((self.x_size, self.y_size), (self.min_x, self.min_y), (self.max_x, self.max_y),
|
|
||||||
(self.x_step, self.y_step))
|
|
||||||
return s
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
|
|
||||||
self.x_size = kwargs.get("x_size", 80)
|
|
||||||
self.y_size = kwargs.get("y_size", 20)
|
|
||||||
|
|
||||||
self.will_draw_axes = kwargs.get("draw_axes", True)
|
|
||||||
|
|
||||||
self.new_line = kwargs.get("newline", "\n")
|
|
||||||
|
|
||||||
self.dot = kwargs.get("dot", "*")
|
|
||||||
|
|
||||||
self.plot_slope = kwargs.get("plot_slope", True)
|
|
||||||
|
|
||||||
self.x_margin = kwargs.get("x_margin", 0.05)
|
|
||||||
self.y_margin = kwargs.get("y_margin", 0.1)
|
|
||||||
|
|
||||||
self.will_plot_labels = kwargs.get("plot_labels", True)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_symbol_by_slope(slope, default_symbol):
|
|
||||||
draw_symbol = default_symbol
|
|
||||||
if slope > math.tan(3*math.pi/8):
|
|
||||||
draw_symbol = "|"
|
|
||||||
elif slope > math.tan(math.pi/8) and slope < math.tan(3*math.pi/8):
|
|
||||||
draw_symbol = "/"
|
|
||||||
elif abs(slope) < math.tan(math.pi/8):
|
|
||||||
draw_symbol = "-"
|
|
||||||
elif slope < math.tan(-math.pi/8) and slope > math.tan(-3*math.pi/8):
|
|
||||||
draw_symbol = "\\"
|
|
||||||
elif slope < math.tan(-3*math.pi/8):
|
|
||||||
draw_symbol = "|"
|
|
||||||
return draw_symbol
|
|
||||||
|
|
||||||
|
|
||||||
def plot_labels(self, output_buffer, plot_data):
|
|
||||||
if plot_data.y_size < 2:
|
|
||||||
return
|
|
||||||
|
|
||||||
margin_factor = 1
|
|
||||||
|
|
||||||
do_plot_x_label = True
|
|
||||||
do_plot_y_label = True
|
|
||||||
|
|
||||||
x_str = "%+g"
|
|
||||||
if plot_data.x_size < 16:
|
|
||||||
do_plot_x_label = False
|
|
||||||
elif plot_data.x_size < 23:
|
|
||||||
x_str = "%+.2g"
|
|
||||||
|
|
||||||
y_str = "%+g"
|
|
||||||
if plot_data.x_size < 8:
|
|
||||||
do_plot_y_label = False
|
|
||||||
elif plot_data.x_size < 11:
|
|
||||||
y_str = "%+.2g"
|
|
||||||
|
|
||||||
act_min_x = (plot_data.min_x + plot_data.x_mod*margin_factor)
|
|
||||||
act_max_x = (plot_data.max_x - plot_data.x_mod*margin_factor)
|
|
||||||
act_min_y = (plot_data.min_y + plot_data.y_mod*margin_factor)
|
|
||||||
act_max_y = (plot_data.max_y - plot_data.y_mod*margin_factor)
|
|
||||||
|
|
||||||
if abs(act_min_x) < 1:
|
|
||||||
min_x_str = "%+.2g" % act_min_x
|
|
||||||
else:
|
|
||||||
min_x_str = x_str % act_min_x
|
|
||||||
|
|
||||||
if abs(act_max_x) < 1:
|
|
||||||
max_x_str = "%+.2g" % act_max_x
|
|
||||||
else:
|
|
||||||
max_x_str = x_str % act_max_x
|
|
||||||
|
|
||||||
if abs(act_min_y) < 1:
|
|
||||||
min_y_str = "%+.2g" % act_min_y
|
|
||||||
else:
|
|
||||||
min_y_str = y_str % act_min_y
|
|
||||||
|
|
||||||
if abs(act_max_y) < 1:
|
|
||||||
max_y_str = "%+.2g" % act_max_y
|
|
||||||
else:
|
|
||||||
max_y_str = y_str % act_max_y
|
|
||||||
|
|
||||||
min_x_coord = self.get_coord(act_min_x,plot_data.min_x,plot_data.x_step)
|
|
||||||
max_x_coord = self.get_coord(act_max_x,plot_data.min_x,plot_data.x_step)
|
|
||||||
min_y_coord = self.get_coord(act_min_y,plot_data.min_y,plot_data.y_step)
|
|
||||||
max_y_coord = self.get_coord(act_max_y,plot_data.min_y,plot_data.y_step)
|
|
||||||
|
|
||||||
|
|
||||||
#print plot_data
|
|
||||||
|
|
||||||
y_zero_coord = self.get_coord(0, plot_data.min_y, plot_data.y_step)
|
|
||||||
|
|
||||||
#if plot_data.min_x < 0 and plot_data.max_x > 0:
|
|
||||||
x_zero_coord = self.get_coord(0, plot_data.min_x, plot_data.x_step)
|
|
||||||
#else:
|
|
||||||
|
|
||||||
#pass
|
|
||||||
|
|
||||||
output_buffer[x_zero_coord][min_y_coord] = "+"
|
|
||||||
output_buffer[x_zero_coord][max_y_coord] = "+"
|
|
||||||
output_buffer[min_x_coord][y_zero_coord] = "+"
|
|
||||||
output_buffer[max_x_coord][y_zero_coord] = "+"
|
|
||||||
|
|
||||||
if do_plot_x_label:
|
|
||||||
|
|
||||||
for i,c in enumerate(min_x_str):
|
|
||||||
output_buffer[min_x_coord+i][y_zero_coord-1] = c
|
|
||||||
for i,c in enumerate(max_x_str):
|
|
||||||
output_buffer[max_x_coord+i-len(max_x_str)][y_zero_coord-1] = c
|
|
||||||
|
|
||||||
if do_plot_y_label:
|
|
||||||
|
|
||||||
for i,c in enumerate(max_y_str):
|
|
||||||
output_buffer[x_zero_coord+i][max_y_coord] = c
|
|
||||||
for i,c in enumerate(min_y_str):
|
|
||||||
output_buffer[x_zero_coord+i][min_y_coord] = c
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def plot_data(self, xy_seq, output_buffer, plot_data):
|
|
||||||
if self.plot_slope:
|
|
||||||
xy_seq = list(xy_seq)
|
|
||||||
#sort according to the x coord
|
|
||||||
xy_seq.sort(key = lambda c: c[0])
|
|
||||||
prev_p = xy_seq[0]
|
|
||||||
e_xy_seq = enumerate(xy_seq)
|
|
||||||
e_xy_seq.next()
|
|
||||||
for i,(x,y) in e_xy_seq:
|
|
||||||
draw_symbol = self.dot
|
|
||||||
line_drawn = self.plot_line(prev_p, (x,y), output_buffer, plot_data)
|
|
||||||
prev_p = (x,y)
|
|
||||||
if not line_drawn:
|
|
||||||
if i > 0 and i < len(xy_seq)-1:
|
|
||||||
px,py = xy_seq[i-1]
|
|
||||||
nx,ny = xy_seq[i+1]
|
|
||||||
|
|
||||||
if abs(nx-px) > EPSILON:
|
|
||||||
slope = (1.0/plot_data.ratio)*(ny-py)/(nx-px)
|
|
||||||
draw_symbol = self.get_symbol_by_slope(slope, draw_symbol)
|
|
||||||
if x < plot_data.min_x or x >= plot_data.max_x or y < plot_data.min_y or y >= plot_data.max_y:
|
|
||||||
continue
|
|
||||||
|
|
||||||
x_coord = self.get_coord(x, plot_data.min_x, plot_data.x_step)
|
|
||||||
y_coord = self.get_coord(y, plot_data.min_y, plot_data.y_step)
|
|
||||||
if x_coord >= 0 and x_coord < len(output_buffer) and y_coord >= 0 and y_coord < len(output_buffer[0]):
|
|
||||||
if self.draw_axes:
|
|
||||||
if y_coord == self.get_coord(0, plot_data.min_y, plot_data.y_step) and draw_symbol == "-":
|
|
||||||
draw_symbol = "="
|
|
||||||
output_buffer[x_coord][y_coord] = draw_symbol
|
|
||||||
else:
|
|
||||||
for x,y in xy_seq:
|
|
||||||
if x < plot_data.min_x or x >= plot_data.max_x or y < plot_data.min_y or y >= plot_data.max_y:
|
|
||||||
continue
|
|
||||||
x_coord = self.get_coord(x, plot_data.min_x, plot_data.x_step)
|
|
||||||
y_coord = self.get_coord(y, plot_data.min_y, plot_data.y_step)
|
|
||||||
if x_coord >= 0 and x_coord < len(output_buffer) and y_coord > 0 and y_coord < len(output_buffer[0]):
|
|
||||||
output_buffer[x_coord][y_coord] = self.dot
|
|
||||||
|
|
||||||
|
|
||||||
def plot_line(self, start, end, output_buffer, plot_data):
|
|
||||||
|
|
||||||
start_coord = self.get_coord(start[0], plot_data.min_x, plot_data.x_step), self.get_coord(start[1], plot_data.min_y, plot_data.y_step)
|
|
||||||
end_coord = self.get_coord(end[0], plot_data.min_x, plot_data.x_step), self.get_coord(end[1], plot_data.min_y, plot_data.y_step)
|
|
||||||
|
|
||||||
x0,y0 = start_coord
|
|
||||||
x1,y1 = end_coord
|
|
||||||
if (x0,y0) == (x1,y1):
|
|
||||||
return True
|
|
||||||
|
|
||||||
clipped_line = clip_line(start, end, (plot_data.min_x, plot_data.min_y), (plot_data.max_x, plot_data.max_y))
|
|
||||||
if clipped_line != None:
|
|
||||||
start,end = clipped_line
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
start_coord = self.get_coord(start[0], plot_data.min_x, plot_data.x_step), self.get_coord(start[1], plot_data.min_y, plot_data.y_step)
|
|
||||||
end_coord = self.get_coord(end[0], plot_data.min_x, plot_data.x_step), self.get_coord(end[1], plot_data.min_y, plot_data.y_step)
|
|
||||||
|
|
||||||
x0,y0 = start_coord
|
|
||||||
x1,y1 = end_coord
|
|
||||||
if (x0,y0) == (x1,y1):
|
|
||||||
return True
|
|
||||||
x_zero_coord = self.get_coord(0, plot_data.min_x, plot_data.x_step)
|
|
||||||
y_zero_coord = self.get_coord(0, plot_data.min_y, plot_data.y_step)
|
|
||||||
|
|
||||||
if start[0]-end[0] == 0:
|
|
||||||
draw_symbol = "|"
|
|
||||||
else:
|
|
||||||
slope = (1.0/plot_data.ratio)*(end[1]-start[1])/(end[0]-start[0])
|
|
||||||
draw_symbol = self.get_symbol_by_slope(slope, self.dot)
|
|
||||||
try:
|
|
||||||
|
|
||||||
delta = x1-x0, y1-y0
|
|
||||||
if abs(delta[0])>abs(delta[1]):
|
|
||||||
s = sign(delta[0])
|
|
||||||
slope = float(delta[1])/delta[0]
|
|
||||||
for i in range(0,abs(int(delta[0]))):
|
|
||||||
cur_draw_symbol = draw_symbol
|
|
||||||
x = i*s
|
|
||||||
cur_y = int(y0+slope*x)
|
|
||||||
if self.draw_axes and cur_y == y_zero_coord and draw_symbol == "-":
|
|
||||||
cur_draw_symbol = "="
|
|
||||||
output_buffer[x0+x][cur_y] = cur_draw_symbol
|
|
||||||
|
|
||||||
|
|
||||||
else:
|
|
||||||
s = sign(delta[1])
|
|
||||||
slope = float(delta[0])/delta[1]
|
|
||||||
for i in range(0,abs(int(delta[1]))):
|
|
||||||
y = i*s
|
|
||||||
cur_draw_symbol = draw_symbol
|
|
||||||
cur_y = y0+y
|
|
||||||
if self.draw_axes and cur_y == y_zero_coord and draw_symbol == "-":
|
|
||||||
cur_draw_symbol = "="
|
|
||||||
output_buffer[int(x0+slope*y)][cur_y] = cur_draw_symbol
|
|
||||||
except:
|
|
||||||
print start, end
|
|
||||||
print start_coord, end_coord
|
|
||||||
print plot_data
|
|
||||||
raise
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def plot_single(self, seq, min_x = None, max_x = None, min_y = None, max_y = None):
|
|
||||||
return self.plot_double(range(len(seq)),seq, min_x, max_x, min_y, max_y)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def plot_double(self, x_seq, y_seq, min_x = None, max_x = None, min_y = None, max_y = None):
|
|
||||||
if min_x == None:
|
|
||||||
min_x = min(x_seq)
|
|
||||||
if max_x == None:
|
|
||||||
max_x = max(x_seq)
|
|
||||||
if min_y == None:
|
|
||||||
min_y = min(y_seq)
|
|
||||||
if max_y == None:
|
|
||||||
max_y = max(y_seq)
|
|
||||||
|
|
||||||
if max_y == min_y:
|
|
||||||
max_y += 1
|
|
||||||
|
|
||||||
x_mod = (max_x-min_x)*self.x_margin
|
|
||||||
y_mod = (max_y-min_y)*self.y_margin
|
|
||||||
min_x-=x_mod
|
|
||||||
max_x+=x_mod
|
|
||||||
min_y-=y_mod
|
|
||||||
max_y+=y_mod
|
|
||||||
|
|
||||||
|
|
||||||
plot_data = self.PlotData(self.x_size, self.y_size, min_x, max_x, min_y, max_y, x_mod, y_mod)
|
|
||||||
|
|
||||||
output_buffer = [[" "]*self.y_size for i in range(self.x_size)]
|
|
||||||
|
|
||||||
if self.will_draw_axes:
|
|
||||||
self.draw_axes(output_buffer, plot_data)
|
|
||||||
|
|
||||||
self.plot_data(zip(x_seq, y_seq), output_buffer, plot_data)
|
|
||||||
|
|
||||||
if self.will_plot_labels:
|
|
||||||
self.plot_labels(output_buffer, plot_data)
|
|
||||||
|
|
||||||
trans_result = transposed(y_reversed(output_buffer))
|
|
||||||
|
|
||||||
result = self.new_line.join(["".join(row) for row in trans_result])
|
|
||||||
return result
|
|
||||||
|
|
||||||
def draw_axes(self, output_buffer, plot_data):
|
|
||||||
|
|
||||||
|
|
||||||
draw_x = False
|
|
||||||
draw_y = False
|
|
||||||
|
|
||||||
if plot_data.min_x <= 0 and plot_data.max_x > 0:
|
|
||||||
draw_y = True
|
|
||||||
zero_x = self.get_coord(0, plot_data.min_x, plot_data.x_step)
|
|
||||||
for y in xrange(plot_data.y_size):
|
|
||||||
output_buffer[zero_x][y] = "|"
|
|
||||||
|
|
||||||
if plot_data.min_y <= 0 and plot_data.max_y > 0:
|
|
||||||
draw_x = True
|
|
||||||
zero_y = self.get_coord(0, plot_data.min_y, plot_data.y_step)
|
|
||||||
for x in xrange(plot_data.x_size):
|
|
||||||
output_buffer[x][zero_y] = "-"
|
|
||||||
|
|
||||||
if draw_x and draw_y:
|
|
||||||
output_buffer[zero_x][zero_y] = "+"
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_coord(val, min, step):
|
|
||||||
result = int((val - min)/step)
|
|
||||||
return result
|
|
||||||
|
|
||||||
def clip_line(line_pt_1, line_pt_2, rect_bottom_left, rect_top_right):
|
|
||||||
ts = [0.0,1.0]
|
|
||||||
if line_pt_1[0] == line_pt_2[0]:
|
|
||||||
return ((line_pt_1[0], max(min(line_pt_1[1], line_pt_2[1]), rect_bottom_left[1])),
|
|
||||||
(line_pt_1[0], min(max(line_pt_1[1], line_pt_2[1]), rect_top_right[1])))
|
|
||||||
if line_pt_1[1] == line_pt_2[1]:
|
|
||||||
return ((max(min(line_pt_1[0], line_pt_2[0]), rect_bottom_left[0]), line_pt_1[1]),
|
|
||||||
(min(max(line_pt_1[0], line_pt_2[0]), rect_top_right[0]), line_pt_1[1]))
|
|
||||||
|
|
||||||
if ((rect_bottom_left[0] <= line_pt_1[0] and line_pt_1[0] < rect_top_right[0]) and
|
|
||||||
(rect_bottom_left[1] <= line_pt_1[1] and line_pt_1[1] < rect_top_right[1]) and
|
|
||||||
(rect_bottom_left[0] <= line_pt_2[0] and line_pt_2[0] < rect_top_right[0]) and
|
|
||||||
(rect_bottom_left[1] <= line_pt_2[1] and line_pt_2[1] < rect_top_right[1])):
|
|
||||||
return line_pt_1, line_pt_2
|
|
||||||
|
|
||||||
ts.append( float(rect_bottom_left[0]-line_pt_1[0])/(line_pt_2[0]-line_pt_1[0]) )
|
|
||||||
ts.append( float(rect_top_right[0]-line_pt_1[0])/(line_pt_2[0]-line_pt_1[0]) )
|
|
||||||
ts.append( float(rect_bottom_left[1]-line_pt_1[1])/(line_pt_2[1]-line_pt_1[1]) )
|
|
||||||
ts.append( float(rect_top_right[1]-line_pt_1[1])/(line_pt_2[1]-line_pt_1[1]) )
|
|
||||||
|
|
||||||
ts.sort()
|
|
||||||
if ts[2] < 0 or ts[2] >= 1 or ts[3] < 0 or ts[2]>= 1:
|
|
||||||
return None
|
|
||||||
result = [(pt_1 + t*(pt_2-pt_1)) for t in (ts[2],ts[3]) for (pt_1, pt_2) in zip(line_pt_1, line_pt_2)]
|
|
||||||
return (result[0],result[1]), (result[2], result[3])
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def plot(*args,**flags):
|
|
||||||
limit_flags_names = set(["min_x","min_y","max_x","max_y"])
|
|
||||||
limit_flags = dict([(n,flags[n]) for n in limit_flags_names & set(flags)])
|
|
||||||
settting_flags = dict([(n,flags[n]) for n in set(flags) - limit_flags_names])
|
|
||||||
|
|
||||||
if len(args) == 1:
|
|
||||||
p = Plotter(**settting_flags)
|
|
||||||
print p.plot_single(args[0],**limit_flags)
|
|
||||||
elif len(args) == 2:
|
|
||||||
p = Plotter(**settting_flags)
|
|
||||||
print p.plot_double(args[0],args[1],**limit_flags)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError("can't draw multiple graphs yet")
|
|
||||||
|
|
||||||
__all__ = ["Plotter","plot"]
|
|
||||||
|
|
||||||
@@ -3,7 +3,7 @@
|
|||||||
import shutil, os
|
import shutil, os
|
||||||
|
|
||||||
def myrepr(x):
|
def myrepr(x):
|
||||||
if isinstance(x, basestring):
|
if isinstance(x, str):
|
||||||
return '"' + x + '"'
|
return '"' + x + '"'
|
||||||
else:
|
else:
|
||||||
return repr(x)
|
return repr(x)
|
||||||
|
|||||||
2165
versioneer.py
2165
versioneer.py
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user